Detailed changes
@@ -35,6 +35,9 @@ jobs:
- name: steps::install_mold
run: ./script/install-mold
shell: bash -euxo pipefail {0}
+ - name: steps::download_wasi_sdk
+ run: ./script/download-wasi-sdk
+ shell: bash -euxo pipefail {0}
- name: compare_perf::run_perf::install_hyperfine
run: cargo install hyperfine
shell: bash -euxo pipefail {0}
@@ -57,16 +57,19 @@ jobs:
mkdir -p ./../.cargo
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
shell: bash -euxo pipefail {0}
+ - name: steps::cache_rust_dependencies_namespace
+ uses: namespacelabs/nscloud-cache-action@v1
+ with:
+ cache: rust
- name: steps::setup_linux
run: ./script/linux
shell: bash -euxo pipefail {0}
- name: steps::install_mold
run: ./script/install-mold
shell: bash -euxo pipefail {0}
- - name: steps::cache_rust_dependencies_namespace
- uses: namespacelabs/nscloud-cache-action@v1
- with:
- cache: rust
+ - name: steps::download_wasi_sdk
+ run: ./script/download-wasi-sdk
+ shell: bash -euxo pipefail {0}
- name: steps::setup_node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
with:
@@ -202,6 +205,9 @@ jobs:
- name: steps::install_mold
run: ./script/install-mold
shell: bash -euxo pipefail {0}
+ - name: steps::download_wasi_sdk
+ run: ./script/download-wasi-sdk
+ shell: bash -euxo pipefail {0}
- name: ./script/bundle-linux
run: ./script/bundle-linux
shell: bash -euxo pipefail {0}
@@ -242,6 +248,9 @@ jobs:
- name: steps::install_mold
run: ./script/install-mold
shell: bash -euxo pipefail {0}
+ - name: steps::download_wasi_sdk
+ run: ./script/download-wasi-sdk
+ shell: bash -euxo pipefail {0}
- name: ./script/bundle-linux
run: ./script/bundle-linux
shell: bash -euxo pipefail {0}
@@ -93,6 +93,9 @@ jobs:
- name: steps::install_mold
run: ./script/install-mold
shell: bash -euxo pipefail {0}
+ - name: steps::download_wasi_sdk
+ run: ./script/download-wasi-sdk
+ shell: bash -euxo pipefail {0}
- name: ./script/bundle-linux
run: ./script/bundle-linux
shell: bash -euxo pipefail {0}
@@ -140,6 +143,9 @@ jobs:
- name: steps::install_mold
run: ./script/install-mold
shell: bash -euxo pipefail {0}
+ - name: steps::download_wasi_sdk
+ run: ./script/download-wasi-sdk
+ shell: bash -euxo pipefail {0}
- name: ./script/bundle-linux
run: ./script/bundle-linux
shell: bash -euxo pipefail {0}
@@ -8,22 +8,16 @@ env:
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
ZED_EVAL_TELEMETRY: '1'
+ MODEL_NAME: ${{ inputs.model_name }}
on:
- pull_request:
- types:
- - synchronize
- - reopened
- - labeled
- branches:
- - '**'
- schedule:
- - cron: 0 0 * * *
- workflow_dispatch: {}
+ workflow_dispatch:
+ inputs:
+ model_name:
+ description: model_name
+ required: true
+ type: string
jobs:
agent_evals:
- if: |
- github.repository_owner == 'zed-industries' &&
- (github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'run-eval'))
runs-on: namespace-profile-16x32-ubuntu-2204
steps:
- name: steps::checkout_repo
@@ -40,6 +34,9 @@ jobs:
- name: steps::install_mold
run: ./script/install-mold
shell: bash -euxo pipefail {0}
+ - name: steps::download_wasi_sdk
+ run: ./script/download-wasi-sdk
+ shell: bash -euxo pipefail {0}
- name: steps::setup_cargo_config
run: |
mkdir -p ./../.cargo
@@ -49,14 +46,14 @@ jobs:
run: cargo build --package=eval
shell: bash -euxo pipefail {0}
- name: run_agent_evals::agent_evals::run_eval
- run: cargo run --package=eval -- --repetitions=8 --concurrency=1
+ run: cargo run --package=eval -- --repetitions=8 --concurrency=1 --model "${MODEL_NAME}"
shell: bash -euxo pipefail {0}
- name: steps::cleanup_cargo_config
if: always()
run: |
rm -rf ./../.cargo
shell: bash -euxo pipefail {0}
- timeout-minutes: 60
+ timeout-minutes: 600
concurrency:
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
cancel-in-progress: true
@@ -34,6 +34,9 @@ jobs:
- name: steps::install_mold
run: ./script/install-mold
shell: bash -euxo pipefail {0}
+ - name: steps::download_wasi_sdk
+ run: ./script/download-wasi-sdk
+ shell: bash -euxo pipefail {0}
- name: ./script/bundle-linux
run: ./script/bundle-linux
shell: bash -euxo pipefail {0}
@@ -74,6 +77,9 @@ jobs:
- name: steps::install_mold
run: ./script/install-mold
shell: bash -euxo pipefail {0}
+ - name: steps::download_wasi_sdk
+ run: ./script/download-wasi-sdk
+ shell: bash -euxo pipefail {0}
- name: ./script/bundle-linux
run: ./script/bundle-linux
shell: bash -euxo pipefail {0}
@@ -143,16 +143,19 @@ jobs:
mkdir -p ./../.cargo
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
shell: bash -euxo pipefail {0}
+ - name: steps::cache_rust_dependencies_namespace
+ uses: namespacelabs/nscloud-cache-action@v1
+ with:
+ cache: rust
- name: steps::setup_linux
run: ./script/linux
shell: bash -euxo pipefail {0}
- name: steps::install_mold
run: ./script/install-mold
shell: bash -euxo pipefail {0}
- - name: steps::cache_rust_dependencies_namespace
- uses: namespacelabs/nscloud-cache-action@v1
- with:
- cache: rust
+ - name: steps::download_wasi_sdk
+ run: ./script/download-wasi-sdk
+ shell: bash -euxo pipefail {0}
- name: steps::setup_node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
with:
@@ -232,6 +235,9 @@ jobs:
- name: steps::install_mold
run: ./script/install-mold
shell: bash -euxo pipefail {0}
+ - name: steps::download_wasi_sdk
+ run: ./script/download-wasi-sdk
+ shell: bash -euxo pipefail {0}
- name: steps::setup_cargo_config
run: |
mkdir -p ./../.cargo
@@ -263,16 +269,19 @@ jobs:
mkdir -p ./../.cargo
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
shell: bash -euxo pipefail {0}
+ - name: steps::cache_rust_dependencies_namespace
+ uses: namespacelabs/nscloud-cache-action@v1
+ with:
+ cache: rust
- name: steps::setup_linux
run: ./script/linux
shell: bash -euxo pipefail {0}
- name: steps::install_mold
run: ./script/install-mold
shell: bash -euxo pipefail {0}
- - name: steps::cache_rust_dependencies_namespace
- uses: namespacelabs/nscloud-cache-action@v1
- with:
- cache: rust
+ - name: steps::download_wasi_sdk
+ run: ./script/download-wasi-sdk
+ shell: bash -euxo pipefail {0}
- name: cargo build -p collab
run: cargo build -p collab
shell: bash -euxo pipefail {0}
@@ -348,6 +357,9 @@ jobs:
- name: steps::install_mold
run: ./script/install-mold
shell: bash -euxo pipefail {0}
+ - name: steps::download_wasi_sdk
+ run: ./script/download-wasi-sdk
+ shell: bash -euxo pipefail {0}
- name: run_tests::check_docs::install_mdbook
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08
with:
@@ -33,6 +33,9 @@ jobs:
- name: steps::install_mold
run: ./script/install-mold
shell: bash -euxo pipefail {0}
+ - name: steps::download_wasi_sdk
+ run: ./script/download-wasi-sdk
+ shell: bash -euxo pipefail {0}
- name: steps::cargo_install_nextest
run: cargo install cargo-nextest --locked
shell: bash -euxo pipefail {0}
@@ -96,6 +96,7 @@ dependencies = [
"auto_update",
"editor",
"extension_host",
+ "fs",
"futures 0.3.31",
"gpui",
"language",
@@ -1330,10 +1331,14 @@ version = "0.1.0"
dependencies = [
"anyhow",
"client",
+ "clock",
+ "ctor",
"db",
+ "futures 0.3.31",
"gpui",
"http_client",
"log",
+ "parking_lot",
"paths",
"release_channel",
"serde",
@@ -1344,6 +1349,7 @@ dependencies = [
"util",
"which 6.0.3",
"workspace",
+ "zlog",
]
[[package]]
@@ -7799,6 +7805,7 @@ dependencies = [
"parking_lot",
"serde",
"serde_json",
+ "serde_urlencoded",
"sha2",
"tempfile",
"url",
@@ -1,7 +1,7 @@
# Zed
[](https://zed.dev)
-[](https://github.com/zed-industries/zed/actions/workflows/ci.yml)
+[](https://github.com/zed-industries/zed/actions/workflows/run_tests.yml)
Welcome to Zed, a high-performance, multiplayer code editor from the creators of [Atom](https://github.com/atom/atom) and [Tree-sitter](https://github.com/tree-sitter/tree-sitter).
@@ -605,6 +605,10 @@
// to both the horizontal and vertical delta values while scrolling. Fast scrolling
// happens when a user holds the alt or option key while scrolling.
"fast_scroll_sensitivity": 4.0,
+ "sticky_scroll": {
+ // Whether to stick scopes to the top of the editor.
+ "enabled": false
+ },
"relative_line_numbers": "disabled",
// If 'search_wrap' is disabled, search result do not wrap around the end of the file.
"search_wrap": true,
@@ -612,9 +616,13 @@
"search": {
// Whether to show the project search button in the status bar.
"button": true,
+ // Whether to only match on whole words.
"whole_word": false,
+ // Whether to match case sensitively.
"case_sensitive": false,
+ // Whether to include gitignored files in search results.
"include_ignored": false,
+ // Whether to interpret the search query as a regular expression.
"regex": false,
// Whether to center the cursor on each search match when navigating.
"center_on_match": false
@@ -17,6 +17,7 @@ anyhow.workspace = true
auto_update.workspace = true
editor.workspace = true
extension_host.workspace = true
+fs.workspace = true
futures.workspace = true
gpui.workspace = true
language.workspace = true
@@ -51,6 +51,7 @@ pub struct ActivityIndicator {
project: Entity<Project>,
auto_updater: Option<Entity<AutoUpdater>>,
context_menu_handle: PopoverMenuHandle<ContextMenu>,
+ fs_jobs: Vec<fs::JobInfo>,
}
#[derive(Debug)]
@@ -99,6 +100,27 @@ impl ActivityIndicator {
})
.detach();
+ let fs = project.read(cx).fs().clone();
+ let mut job_events = fs.subscribe_to_jobs();
+ cx.spawn(async move |this, cx| {
+ while let Some(job_event) = job_events.next().await {
+ this.update(cx, |this: &mut ActivityIndicator, cx| {
+ match job_event {
+ fs::JobEvent::Started { info } => {
+ this.fs_jobs.retain(|j| j.id != info.id);
+ this.fs_jobs.push(info);
+ }
+ fs::JobEvent::Completed { id } => {
+ this.fs_jobs.retain(|j| j.id != id);
+ }
+ }
+ cx.notify();
+ })?;
+ }
+ anyhow::Ok(())
+ })
+ .detach();
+
cx.subscribe(
&project.read(cx).lsp_store(),
|activity_indicator, _, event, cx| {
@@ -201,7 +223,8 @@ impl ActivityIndicator {
statuses: Vec::new(),
project: project.clone(),
auto_updater,
- context_menu_handle: Default::default(),
+ context_menu_handle: PopoverMenuHandle::default(),
+ fs_jobs: Vec::new(),
}
});
@@ -432,6 +455,23 @@ impl ActivityIndicator {
});
}
+ // Show any long-running fs command
+ for fs_job in &self.fs_jobs {
+ if Instant::now().duration_since(fs_job.start) >= GIT_OPERATION_DELAY {
+ return Some(Content {
+ icon: Some(
+ Icon::new(IconName::ArrowCircle)
+ .size(IconSize::Small)
+ .with_rotate_animation(2)
+ .into_any_element(),
+ ),
+ message: fs_job.message.clone().into(),
+ on_click: None,
+ tooltip_message: None,
+ });
+ }
+ }
+
// Show any language server installation info.
let mut downloading = SmallVec::<[_; 3]>::new();
let mut checking_for_update = SmallVec::<[_; 3]>::new();
@@ -933,7 +933,7 @@ async fn test_profiles(cx: &mut TestAppContext) {
// Test that test-1 profile (default) has echo and delay tools
thread
.update(cx, |thread, cx| {
- thread.set_profile(AgentProfileId("test-1".into()));
+ thread.set_profile(AgentProfileId("test-1".into()), cx);
thread.send(UserMessageId::new(), ["test"], cx)
})
.unwrap();
@@ -953,7 +953,7 @@ async fn test_profiles(cx: &mut TestAppContext) {
// Switch to test-2 profile, and verify that it has only the infinite tool.
thread
.update(cx, |thread, cx| {
- thread.set_profile(AgentProfileId("test-2".into()));
+ thread.set_profile(AgentProfileId("test-2".into()), cx);
thread.send(UserMessageId::new(), ["test2"], cx)
})
.unwrap();
@@ -1002,8 +1002,8 @@ async fn test_mcp_tools(cx: &mut TestAppContext) {
)
.await;
cx.run_until_parked();
- thread.update(cx, |thread, _| {
- thread.set_profile(AgentProfileId("test".into()))
+ thread.update(cx, |thread, cx| {
+ thread.set_profile(AgentProfileId("test".into()), cx)
});
let mut mcp_tool_calls = setup_context_server(
@@ -1169,8 +1169,8 @@ async fn test_mcp_tool_truncation(cx: &mut TestAppContext) {
.await;
cx.run_until_parked();
- thread.update(cx, |thread, _| {
- thread.set_profile(AgentProfileId("test".into()));
+ thread.update(cx, |thread, cx| {
+ thread.set_profile(AgentProfileId("test".into()), cx);
thread.add_tool(EchoTool);
thread.add_tool(DelayTool);
thread.add_tool(WordListTool);
@@ -30,16 +30,17 @@ use gpui::{
};
use language_model::{
LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent, LanguageModelExt,
- LanguageModelImage, LanguageModelProviderId, LanguageModelRegistry, LanguageModelRequest,
- LanguageModelRequestMessage, LanguageModelRequestTool, LanguageModelToolResult,
- LanguageModelToolResultContent, LanguageModelToolSchemaFormat, LanguageModelToolUse,
- LanguageModelToolUseId, Role, SelectedModel, StopReason, TokenUsage, ZED_CLOUD_PROVIDER_ID,
+ LanguageModelId, LanguageModelImage, LanguageModelProviderId, LanguageModelRegistry,
+ LanguageModelRequest, LanguageModelRequestMessage, LanguageModelRequestTool,
+ LanguageModelToolResult, LanguageModelToolResultContent, LanguageModelToolSchemaFormat,
+ LanguageModelToolUse, LanguageModelToolUseId, Role, SelectedModel, StopReason, TokenUsage,
+ ZED_CLOUD_PROVIDER_ID,
};
use project::Project;
use prompt_store::ProjectContext;
use schemars::{JsonSchema, Schema};
use serde::{Deserialize, Serialize};
-use settings::{Settings, update_settings_file};
+use settings::{LanguageModelSelection, Settings, update_settings_file};
use smol::stream::StreamExt;
use std::{
collections::BTreeMap,
@@ -798,7 +799,8 @@ impl Thread {
let profile_id = db_thread
.profile
.unwrap_or_else(|| AgentSettings::get_global(cx).default_profile.clone());
- let model = LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
+
+ let mut model = LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
db_thread
.model
.and_then(|model| {
@@ -811,6 +813,16 @@ impl Thread {
.or_else(|| registry.default_model())
.map(|model| model.model)
});
+
+ if model.is_none() {
+ model = Self::resolve_profile_model(&profile_id, cx);
+ }
+ if model.is_none() {
+ model = LanguageModelRegistry::global(cx).update(cx, |registry, _cx| {
+ registry.default_model().map(|model| model.model)
+ });
+ }
+
let (prompt_capabilities_tx, prompt_capabilities_rx) =
watch::channel(Self::prompt_capabilities(model.as_deref()));
@@ -1007,8 +1019,17 @@ impl Thread {
&self.profile_id
}
- pub fn set_profile(&mut self, profile_id: AgentProfileId) {
+ pub fn set_profile(&mut self, profile_id: AgentProfileId, cx: &mut Context<Self>) {
+ if self.profile_id == profile_id {
+ return;
+ }
+
self.profile_id = profile_id;
+
+ // Swap to the profile's preferred model when available.
+ if let Some(model) = Self::resolve_profile_model(&self.profile_id, cx) {
+ self.set_model(model, cx);
+ }
}
pub fn cancel(&mut self, cx: &mut Context<Self>) {
@@ -1065,6 +1086,35 @@ impl Thread {
})
}
+ /// Look up the active profile and resolve its preferred model if one is configured.
+ fn resolve_profile_model(
+ profile_id: &AgentProfileId,
+ cx: &mut Context<Self>,
+ ) -> Option<Arc<dyn LanguageModel>> {
+ let selection = AgentSettings::get_global(cx)
+ .profiles
+ .get(profile_id)?
+ .default_model
+ .clone()?;
+ Self::resolve_model_from_selection(&selection, cx)
+ }
+
+ /// Translate a stored model selection into the configured model from the registry.
+ fn resolve_model_from_selection(
+ selection: &LanguageModelSelection,
+ cx: &mut Context<Self>,
+ ) -> Option<Arc<dyn LanguageModel>> {
+ let selected = SelectedModel {
+ provider: LanguageModelProviderId::from(selection.provider.0.clone()),
+ model: LanguageModelId::from(selection.model.clone()),
+ };
+ LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
+ registry
+ .select_model(&selected, cx)
+ .map(|configured| configured.model)
+ })
+ }
+
pub fn resume(
&mut self,
cx: &mut Context<Self>,
@@ -6,8 +6,8 @@ use convert_case::{Case, Casing as _};
use fs::Fs;
use gpui::{App, SharedString};
use settings::{
- AgentProfileContent, ContextServerPresetContent, Settings as _, SettingsContent,
- update_settings_file,
+ AgentProfileContent, ContextServerPresetContent, LanguageModelSelection, Settings as _,
+ SettingsContent, update_settings_file,
};
use util::ResultExt as _;
@@ -53,19 +53,30 @@ impl AgentProfile {
let base_profile =
base_profile_id.and_then(|id| AgentSettings::get_global(cx).profiles.get(&id).cloned());
+ // Copy toggles from the base profile so the new profile starts with familiar defaults.
+ let tools = base_profile
+ .as_ref()
+ .map(|profile| profile.tools.clone())
+ .unwrap_or_default();
+ let enable_all_context_servers = base_profile
+ .as_ref()
+ .map(|profile| profile.enable_all_context_servers)
+ .unwrap_or_default();
+ let context_servers = base_profile
+ .as_ref()
+ .map(|profile| profile.context_servers.clone())
+ .unwrap_or_default();
+ // Preserve the base profile's model preference when cloning into a new profile.
+ let default_model = base_profile
+ .as_ref()
+ .and_then(|profile| profile.default_model.clone());
+
let profile_settings = AgentProfileSettings {
name: name.into(),
- tools: base_profile
- .as_ref()
- .map(|profile| profile.tools.clone())
- .unwrap_or_default(),
- enable_all_context_servers: base_profile
- .as_ref()
- .map(|profile| profile.enable_all_context_servers)
- .unwrap_or_default(),
- context_servers: base_profile
- .map(|profile| profile.context_servers)
- .unwrap_or_default(),
+ tools,
+ enable_all_context_servers,
+ context_servers,
+ default_model,
};
update_settings_file(fs, cx, {
@@ -96,6 +107,8 @@ pub struct AgentProfileSettings {
pub tools: IndexMap<Arc<str>, bool>,
pub enable_all_context_servers: bool,
pub context_servers: IndexMap<Arc<str>, ContextServerPreset>,
+ /// Default language model to apply when this profile becomes active.
+ pub default_model: Option<LanguageModelSelection>,
}
impl AgentProfileSettings {
@@ -144,6 +157,7 @@ impl AgentProfileSettings {
)
})
.collect(),
+ default_model: self.default_model.clone(),
},
);
@@ -153,15 +167,23 @@ impl AgentProfileSettings {
impl From<AgentProfileContent> for AgentProfileSettings {
fn from(content: AgentProfileContent) -> Self {
+ let AgentProfileContent {
+ name,
+ tools,
+ enable_all_context_servers,
+ context_servers,
+ default_model,
+ } = content;
+
Self {
- name: content.name.into(),
- tools: content.tools,
- enable_all_context_servers: content.enable_all_context_servers.unwrap_or_default(),
- context_servers: content
- .context_servers
+ name: name.into(),
+ tools,
+ enable_all_context_servers: enable_all_context_servers.unwrap_or_default(),
+ context_servers: context_servers
.into_iter()
.map(|(server_id, preset)| (server_id, preset.into()))
.collect(),
+ default_model,
}
}
}
@@ -1195,6 +1195,17 @@ impl MessageEditor {
self.editor.read(cx).text(cx)
}
+ pub fn set_placeholder_text(
+ &mut self,
+ placeholder: &str,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ self.editor.update(cx, |editor, cx| {
+ editor.set_placeholder_text(placeholder, window, cx);
+ });
+ }
+
#[cfg(test)]
pub fn set_text(&mut self, text: &str, window: &mut Window, cx: &mut Context<Self>) {
self.editor.update(cx, |editor, cx| {
@@ -457,25 +457,23 @@ impl Render for AcpThreadHistory {
.on_action(cx.listener(Self::select_last))
.on_action(cx.listener(Self::confirm))
.on_action(cx.listener(Self::remove_selected_thread))
- .when(!self.history_store.read(cx).is_empty(cx), |parent| {
- parent.child(
- h_flex()
- .h(px(41.)) // Match the toolbar perfectly
- .w_full()
- .py_1()
- .px_2()
- .gap_2()
- .justify_between()
- .border_b_1()
- .border_color(cx.theme().colors().border)
- .child(
- Icon::new(IconName::MagnifyingGlass)
- .color(Color::Muted)
- .size(IconSize::Small),
- )
- .child(self.search_editor.clone()),
- )
- })
+ .child(
+ h_flex()
+ .h(px(41.)) // Match the toolbar perfectly
+ .w_full()
+ .py_1()
+ .px_2()
+ .gap_2()
+ .justify_between()
+ .border_b_1()
+ .border_color(cx.theme().colors().border)
+ .child(
+ Icon::new(IconName::MagnifyingGlass)
+ .color(Color::Muted)
+ .size(IconSize::Small),
+ )
+ .child(self.search_editor.clone()),
+ )
.child({
let view = v_flex()
.id("list-container")
@@ -484,19 +482,15 @@ impl Render for AcpThreadHistory {
.flex_grow();
if self.history_store.read(cx).is_empty(cx) {
- view.justify_center()
- .child(
- h_flex().w_full().justify_center().child(
- Label::new("You don't have any past threads yet.")
- .size(LabelSize::Small),
- ),
- )
- } else if self.search_produced_no_matches() {
- view.justify_center().child(
- h_flex().w_full().justify_center().child(
- Label::new("No threads match your search.").size(LabelSize::Small),
- ),
+ view.justify_center().items_center().child(
+ Label::new("You don't have any past threads yet.")
+ .size(LabelSize::Small)
+ .color(Color::Muted),
)
+ } else if self.search_produced_no_matches() {
+ view.justify_center()
+ .items_center()
+ .child(Label::new("No threads match your search.").size(LabelSize::Small))
} else {
view.child(
uniform_list(
@@ -673,7 +667,7 @@ impl EntryTimeFormat {
timezone,
time_format::TimestampFormat::EnhancedAbsolute,
),
- EntryTimeFormat::TimeOnly => time_format::format_time(timestamp),
+ EntryTimeFormat::TimeOnly => time_format::format_time(timestamp.to_offset(timezone)),
}
}
}
@@ -125,8 +125,9 @@ impl ProfileProvider for Entity<agent::Thread> {
}
fn set_profile(&self, profile_id: AgentProfileId, cx: &mut App) {
- self.update(cx, |thread, _cx| {
- thread.set_profile(profile_id);
+ self.update(cx, |thread, cx| {
+ // Apply the profile and let the thread swap to its default model.
+ thread.set_profile(profile_id, cx);
});
}
@@ -336,19 +337,7 @@ impl AcpThreadView {
let prompt_capabilities = Rc::new(RefCell::new(acp::PromptCapabilities::default()));
let available_commands = Rc::new(RefCell::new(vec![]));
- let placeholder = if agent.name() == "Zed Agent" {
- format!("Message the {} β @ to include context", agent.name())
- } else if agent.name() == "Claude Code"
- || agent.name() == "Codex"
- || !available_commands.borrow().is_empty()
- {
- format!(
- "Message {} β @ to include context, / for commands",
- agent.name()
- )
- } else {
- format!("Message {} β @ to include context", agent.name())
- };
+ let placeholder = placeholder_text(agent.name().as_ref(), false);
let message_editor = cx.new(|cx| {
let mut editor = MessageEditor::new(
@@ -1455,7 +1444,14 @@ impl AcpThreadView {
});
}
+ let has_commands = !available_commands.is_empty();
self.available_commands.replace(available_commands);
+
+ let new_placeholder = placeholder_text(self.agent.name().as_ref(), has_commands);
+
+ self.message_editor.update(cx, |editor, cx| {
+ editor.set_placeholder_text(&new_placeholder, window, cx);
+ });
}
AcpThreadEvent::ModeUpdated(_mode) => {
// The connection keeps track of the mode
@@ -5707,6 +5703,19 @@ fn loading_contents_spinner(size: IconSize) -> AnyElement {
.into_any_element()
}
+fn placeholder_text(agent_name: &str, has_commands: bool) -> String {
+ if agent_name == "Zed Agent" {
+ format!("Message the {} β @ to include context", agent_name)
+ } else if has_commands {
+ format!(
+ "Message {} β @ to include context, / for commands",
+ agent_name
+ )
+ } else {
+ format!("Message {} β @ to include context", agent_name)
+ }
+}
+
impl Focusable for AcpThreadView {
fn focus_handle(&self, cx: &App) -> FocusHandle {
match self.thread_state {
@@ -638,15 +638,13 @@ impl AgentConfiguration {
let is_running = matches!(server_status, ContextServerStatus::Running);
let item_id = SharedString::from(context_server_id.0.clone());
- let is_from_extension = server_configuration
- .as_ref()
- .map(|config| {
- matches!(
- config.as_ref(),
- ContextServerConfiguration::Extension { .. }
- )
- })
- .unwrap_or(false);
+ // Servers without a configuration can only be provided by extensions.
+ let provided_by_extension = server_configuration.is_none_or(|config| {
+ matches!(
+ config.as_ref(),
+ ContextServerConfiguration::Extension { .. }
+ )
+ });
let error = if let ContextServerStatus::Error(error) = server_status.clone() {
Some(error)
@@ -660,7 +658,7 @@ impl AgentConfiguration {
.tools_for_server(&context_server_id)
.count();
- let (source_icon, source_tooltip) = if is_from_extension {
+ let (source_icon, source_tooltip) = if provided_by_extension {
(
IconName::ZedSrcExtension,
"This MCP server was installed from an extension.",
@@ -710,7 +708,6 @@ impl AgentConfiguration {
let fs = self.fs.clone();
let context_server_id = context_server_id.clone();
let language_registry = self.language_registry.clone();
- let context_server_store = self.context_server_store.clone();
let workspace = self.workspace.clone();
let context_server_registry = self.context_server_registry.clone();
@@ -752,23 +749,10 @@ impl AgentConfiguration {
.entry("Uninstall", None, {
let fs = fs.clone();
let context_server_id = context_server_id.clone();
- let context_server_store = context_server_store.clone();
let workspace = workspace.clone();
move |_, cx| {
- let is_provided_by_extension = context_server_store
- .read(cx)
- .configuration_for_server(&context_server_id)
- .as_ref()
- .map(|config| {
- matches!(
- config.as_ref(),
- ContextServerConfiguration::Extension { .. }
- )
- })
- .unwrap_or(false);
-
let uninstall_extension_task = match (
- is_provided_by_extension,
+ provided_by_extension,
resolve_extension_for_context_server(&context_server_id, cx),
) {
(true, Some((id, manifest))) => {
@@ -7,8 +7,10 @@ use agent_settings::{AgentProfile, AgentProfileId, AgentSettings, builtin_profil
use editor::Editor;
use fs::Fs;
use gpui::{DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Subscription, prelude::*};
-use language_model::LanguageModel;
-use settings::Settings as _;
+use language_model::{LanguageModel, LanguageModelRegistry};
+use settings::{
+ LanguageModelProviderSetting, LanguageModelSelection, Settings as _, update_settings_file,
+};
use ui::{
KeyBinding, ListItem, ListItemSpacing, ListSeparator, Navigable, NavigableEntry, prelude::*,
};
@@ -16,6 +18,7 @@ use workspace::{ModalView, Workspace};
use crate::agent_configuration::manage_profiles_modal::profile_modal_header::ProfileModalHeader;
use crate::agent_configuration::tool_picker::{ToolPicker, ToolPickerDelegate};
+use crate::language_model_selector::{LanguageModelSelector, language_model_selector};
use crate::{AgentPanel, ManageProfiles};
enum Mode {
@@ -32,6 +35,11 @@ enum Mode {
tool_picker: Entity<ToolPicker>,
_subscription: Subscription,
},
+ ConfigureDefaultModel {
+ profile_id: AgentProfileId,
+ model_picker: Entity<LanguageModelSelector>,
+ _subscription: Subscription,
+ },
}
impl Mode {
@@ -83,6 +91,7 @@ pub struct ChooseProfileMode {
pub struct ViewProfileMode {
profile_id: AgentProfileId,
fork_profile: NavigableEntry,
+ configure_default_model: NavigableEntry,
configure_tools: NavigableEntry,
configure_mcps: NavigableEntry,
cancel_item: NavigableEntry,
@@ -180,6 +189,7 @@ impl ManageProfilesModal {
self.mode = Mode::ViewProfile(ViewProfileMode {
profile_id,
fork_profile: NavigableEntry::focusable(cx),
+ configure_default_model: NavigableEntry::focusable(cx),
configure_tools: NavigableEntry::focusable(cx),
configure_mcps: NavigableEntry::focusable(cx),
cancel_item: NavigableEntry::focusable(cx),
@@ -187,6 +197,83 @@ impl ManageProfilesModal {
self.focus_handle(cx).focus(window);
}
+ fn configure_default_model(
+ &mut self,
+ profile_id: AgentProfileId,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ let fs = self.fs.clone();
+ let profile_id_for_closure = profile_id.clone();
+
+ let model_picker = cx.new(|cx| {
+ let fs = fs.clone();
+ let profile_id = profile_id_for_closure.clone();
+
+ language_model_selector(
+ {
+ let profile_id = profile_id.clone();
+ move |cx| {
+ let settings = AgentSettings::get_global(cx);
+
+ settings
+ .profiles
+ .get(&profile_id)
+ .and_then(|profile| profile.default_model.as_ref())
+ .and_then(|selection| {
+ let registry = LanguageModelRegistry::read_global(cx);
+ let provider_id = language_model::LanguageModelProviderId(
+ gpui::SharedString::from(selection.provider.0.clone()),
+ );
+ let provider = registry.provider(&provider_id)?;
+ let model = provider
+ .provided_models(cx)
+ .iter()
+ .find(|m| m.id().0 == selection.model.as_str())?
+ .clone();
+ Some(language_model::ConfiguredModel { provider, model })
+ })
+ }
+ },
+ move |model, cx| {
+ let provider = model.provider_id().0.to_string();
+ let model_id = model.id().0.to_string();
+ let profile_id = profile_id.clone();
+
+ update_settings_file(fs.clone(), cx, move |settings, _cx| {
+ let agent_settings = settings.agent.get_or_insert_default();
+ if let Some(profiles) = agent_settings.profiles.as_mut() {
+ if let Some(profile) = profiles.get_mut(profile_id.0.as_ref()) {
+ profile.default_model = Some(LanguageModelSelection {
+ provider: LanguageModelProviderSetting(provider.clone()),
+ model: model_id.clone(),
+ });
+ }
+ }
+ });
+ },
+ false, // Do not use popover styles for the model picker
+ window,
+ cx,
+ )
+ .modal(false)
+ });
+
+ let dismiss_subscription = cx.subscribe_in(&model_picker, window, {
+ let profile_id = profile_id.clone();
+ move |this, _picker, _: &DismissEvent, window, cx| {
+ this.view_profile(profile_id.clone(), window, cx);
+ }
+ });
+
+ self.mode = Mode::ConfigureDefaultModel {
+ profile_id,
+ model_picker,
+ _subscription: dismiss_subscription,
+ };
+ self.focus_handle(cx).focus(window);
+ }
+
fn configure_mcp_tools(
&mut self,
profile_id: AgentProfileId,
@@ -277,6 +364,7 @@ impl ManageProfilesModal {
Mode::ViewProfile(_) => {}
Mode::ConfigureTools { .. } => {}
Mode::ConfigureMcps { .. } => {}
+ Mode::ConfigureDefaultModel { .. } => {}
}
}
@@ -299,6 +387,9 @@ impl ManageProfilesModal {
Mode::ConfigureMcps { profile_id, .. } => {
self.view_profile(profile_id.clone(), window, cx)
}
+ Mode::ConfigureDefaultModel { profile_id, .. } => {
+ self.view_profile(profile_id.clone(), window, cx)
+ }
}
}
}
@@ -313,6 +404,7 @@ impl Focusable for ManageProfilesModal {
Mode::ViewProfile(_) => self.focus_handle.clone(),
Mode::ConfigureTools { tool_picker, .. } => tool_picker.focus_handle(cx),
Mode::ConfigureMcps { tool_picker, .. } => tool_picker.focus_handle(cx),
+ Mode::ConfigureDefaultModel { model_picker, .. } => model_picker.focus_handle(cx),
}
}
}
@@ -544,6 +636,47 @@ impl ManageProfilesModal {
}),
),
)
+ .child(
+ div()
+ .id("configure-default-model")
+ .track_focus(&mode.configure_default_model.focus_handle)
+ .on_action({
+ let profile_id = mode.profile_id.clone();
+ cx.listener(move |this, _: &menu::Confirm, window, cx| {
+ this.configure_default_model(
+ profile_id.clone(),
+ window,
+ cx,
+ );
+ })
+ })
+ .child(
+ ListItem::new("model-item")
+ .toggle_state(
+ mode.configure_default_model
+ .focus_handle
+ .contains_focused(window, cx),
+ )
+ .inset(true)
+ .spacing(ListItemSpacing::Sparse)
+ .start_slot(
+ Icon::new(IconName::ZedAssistant)
+ .size(IconSize::Small)
+ .color(Color::Muted),
+ )
+ .child(Label::new("Configure Default Model"))
+ .on_click({
+ let profile_id = mode.profile_id.clone();
+ cx.listener(move |this, _, window, cx| {
+ this.configure_default_model(
+ profile_id.clone(),
+ window,
+ cx,
+ );
+ })
+ }),
+ ),
+ )
.child(
div()
.id("configure-builtin-tools")
@@ -668,6 +801,7 @@ impl ManageProfilesModal {
.into_any_element(),
)
.entry(mode.fork_profile)
+ .entry(mode.configure_default_model)
.entry(mode.configure_tools)
.entry(mode.configure_mcps)
.entry(mode.cancel_item)
@@ -753,6 +887,29 @@ impl Render for ManageProfilesModal {
.child(go_back_item)
.into_any_element()
}
+ Mode::ConfigureDefaultModel {
+ profile_id,
+ model_picker,
+ ..
+ } => {
+ let profile_name = settings
+ .profiles
+ .get(profile_id)
+ .map(|profile| profile.name.clone())
+ .unwrap_or_else(|| "Unknown".into());
+
+ v_flex()
+ .pb_1()
+ .child(ProfileModalHeader::new(
+ format!("{profile_name} β Configure Default Model"),
+ Some(IconName::Ai),
+ ))
+ .child(ListSeparator)
+ .child(v_flex().w(rems(34.)).child(model_picker.clone()))
+ .child(ListSeparator)
+ .child(go_back_item)
+ .into_any_element()
+ }
Mode::ConfigureMcps {
profile_id,
tool_picker,
@@ -314,6 +314,7 @@ impl PickerDelegate for ToolPickerDelegate {
)
})
.collect(),
+ default_model: default_profile.default_model.clone(),
});
if let Some(server_id) = server_id {
@@ -47,6 +47,7 @@ impl AgentModelSelector {
}
}
},
+ true, // Use popover styles for picker
window,
cx,
)
@@ -19,14 +19,26 @@ pub type LanguageModelSelector = Picker<LanguageModelPickerDelegate>;
pub fn language_model_selector(
get_active_model: impl Fn(&App) -> Option<ConfiguredModel> + 'static,
on_model_changed: impl Fn(Arc<dyn LanguageModel>, &mut App) + 'static,
+ popover_styles: bool,
window: &mut Window,
cx: &mut Context<LanguageModelSelector>,
) -> LanguageModelSelector {
- let delegate = LanguageModelPickerDelegate::new(get_active_model, on_model_changed, window, cx);
- Picker::list(delegate, window, cx)
- .show_scrollbar(true)
- .width(rems(20.))
- .max_height(Some(rems(20.).into()))
+ let delegate = LanguageModelPickerDelegate::new(
+ get_active_model,
+ on_model_changed,
+ popover_styles,
+ window,
+ cx,
+ );
+
+ if popover_styles {
+ Picker::list(delegate, window, cx)
+ .show_scrollbar(true)
+ .width(rems(20.))
+ .max_height(Some(rems(20.).into()))
+ } else {
+ Picker::list(delegate, window, cx).show_scrollbar(true)
+ }
}
fn all_models(cx: &App) -> GroupedModels {
@@ -75,12 +87,14 @@ pub struct LanguageModelPickerDelegate {
selected_index: usize,
_authenticate_all_providers_task: Task<()>,
_subscriptions: Vec<Subscription>,
+ popover_styles: bool,
}
impl LanguageModelPickerDelegate {
fn new(
get_active_model: impl Fn(&App) -> Option<ConfiguredModel> + 'static,
on_model_changed: impl Fn(Arc<dyn LanguageModel>, &mut App) + 'static,
+ popover_styles: bool,
window: &mut Window,
cx: &mut Context<Picker<Self>>,
) -> Self {
@@ -113,6 +127,7 @@ impl LanguageModelPickerDelegate {
}
},
)],
+ popover_styles,
}
}
@@ -530,6 +545,10 @@ impl PickerDelegate for LanguageModelPickerDelegate {
_window: &mut Window,
cx: &mut Context<Picker<Self>>,
) -> Option<gpui::AnyElement> {
+ if !self.popover_styles {
+ return None;
+ }
+
Some(
h_flex()
.w_full()
@@ -15,8 +15,8 @@ use std::{
sync::{Arc, atomic::AtomicBool},
};
use ui::{
- DocumentationAside, DocumentationEdge, DocumentationSide, HighlightedLabel, LabelSize,
- ListItem, ListItemSpacing, PopoverMenuHandle, TintColor, Tooltip, prelude::*,
+ DocumentationAside, DocumentationEdge, DocumentationSide, HighlightedLabel, KeyBinding,
+ LabelSize, ListItem, ListItemSpacing, PopoverMenuHandle, TintColor, Tooltip, prelude::*,
};
/// Trait for types that can provide and manage agent profiles
@@ -81,6 +81,7 @@ impl ProfileSelector {
self.provider.clone(),
self.profiles.clone(),
cx.background_executor().clone(),
+ self.focus_handle.clone(),
cx,
);
@@ -207,6 +208,7 @@ pub(crate) struct ProfilePickerDelegate {
selected_index: usize,
query: String,
cancel: Option<Arc<AtomicBool>>,
+ focus_handle: FocusHandle,
}
impl ProfilePickerDelegate {
@@ -215,6 +217,7 @@ impl ProfilePickerDelegate {
provider: Arc<dyn ProfileProvider>,
profiles: AvailableProfiles,
background: BackgroundExecutor,
+ focus_handle: FocusHandle,
cx: &mut Context<ProfileSelector>,
) -> Self {
let candidates = Self::candidates_from(profiles);
@@ -231,6 +234,7 @@ impl ProfilePickerDelegate {
selected_index: 0,
query: String::new(),
cancel: None,
+ focus_handle,
};
this.selected_index = this
@@ -594,20 +598,26 @@ impl PickerDelegate for ProfilePickerDelegate {
_: &mut Window,
cx: &mut Context<Picker<Self>>,
) -> Option<gpui::AnyElement> {
+ let focus_handle = self.focus_handle.clone();
+
Some(
h_flex()
.w_full()
.border_t_1()
.border_color(cx.theme().colors().border_variant)
- .p_1()
- .gap_4()
- .justify_between()
+ .p_1p5()
.child(
Button::new("configure", "Configure")
- .icon(IconName::Settings)
- .icon_size(IconSize::Small)
- .icon_color(Color::Muted)
- .icon_position(IconPosition::Start)
+ .full_width()
+ .style(ButtonStyle::Outlined)
+ .key_binding(
+ KeyBinding::for_action_in(
+ &ManageProfiles::default(),
+ &focus_handle,
+ cx,
+ )
+ .map(|kb| kb.size(rems_from_px(12.))),
+ )
.on_click(|_, window, cx| {
window.dispatch_action(ManageProfiles::default().boxed_clone(), cx);
}),
@@ -659,20 +669,25 @@ mod tests {
is_builtin: true,
}];
- let delegate = ProfilePickerDelegate {
- fs: FakeFs::new(cx.executor()),
- provider: Arc::new(TestProfileProvider::new(AgentProfileId("write".into()))),
- background: cx.executor(),
- candidates,
- string_candidates: Arc::new(Vec::new()),
- filtered_entries: Vec::new(),
- selected_index: 0,
- query: String::new(),
- cancel: None,
- };
-
- let matches = Vec::new(); // No matches
- let _entries = delegate.entries_from_matches(matches);
+ cx.update(|cx| {
+ let focus_handle = cx.focus_handle();
+
+ let delegate = ProfilePickerDelegate {
+ fs: FakeFs::new(cx.background_executor().clone()),
+ provider: Arc::new(TestProfileProvider::new(AgentProfileId("write".into()))),
+ background: cx.background_executor().clone(),
+ candidates,
+ string_candidates: Arc::new(Vec::new()),
+ filtered_entries: Vec::new(),
+ selected_index: 0,
+ query: String::new(),
+ cancel: None,
+ focus_handle,
+ };
+
+ let matches = Vec::new(); // No matches
+ let _entries = delegate.entries_from_matches(matches);
+ });
}
#[gpui::test]
@@ -690,30 +705,35 @@ mod tests {
},
];
- let delegate = ProfilePickerDelegate {
- fs: FakeFs::new(cx.executor()),
- provider: Arc::new(TestProfileProvider::new(AgentProfileId("write".into()))),
- background: cx.executor(),
- candidates,
- string_candidates: Arc::new(Vec::new()),
- filtered_entries: vec![
- ProfilePickerEntry::Profile(ProfileMatchEntry {
- candidate_index: 0,
- positions: Vec::new(),
- }),
- ProfilePickerEntry::Profile(ProfileMatchEntry {
- candidate_index: 1,
- positions: Vec::new(),
- }),
- ],
- selected_index: 0,
- query: String::new(),
- cancel: None,
- };
-
- // Active profile should be found at index 0
- let active_index = delegate.index_of_profile(&AgentProfileId("write".into()));
- assert_eq!(active_index, Some(0));
+ cx.update(|cx| {
+ let focus_handle = cx.focus_handle();
+
+ let delegate = ProfilePickerDelegate {
+ fs: FakeFs::new(cx.background_executor().clone()),
+ provider: Arc::new(TestProfileProvider::new(AgentProfileId("write".into()))),
+ background: cx.background_executor().clone(),
+ candidates,
+ string_candidates: Arc::new(Vec::new()),
+ filtered_entries: vec![
+ ProfilePickerEntry::Profile(ProfileMatchEntry {
+ candidate_index: 0,
+ positions: Vec::new(),
+ }),
+ ProfilePickerEntry::Profile(ProfileMatchEntry {
+ candidate_index: 1,
+ positions: Vec::new(),
+ }),
+ ],
+ selected_index: 0,
+ query: String::new(),
+ cancel: None,
+ focus_handle,
+ };
+
+ // Active profile should be found at index 0
+ let active_index = delegate.index_of_profile(&AgentProfileId("write".into()));
+ assert_eq!(active_index, Some(0));
+ });
}
struct TestProfileProvider {
@@ -314,6 +314,7 @@ impl TextThreadEditor {
)
});
},
+ true, // Use popover styles for picker
window,
cx,
)
@@ -477,7 +478,7 @@ impl TextThreadEditor {
editor.insert(&format!("/{name}"), window, cx);
if command.accepts_arguments() {
editor.insert(" ", window, cx);
- editor.show_completions(&ShowCompletions::default(), window, cx);
+ editor.show_completions(&ShowCompletions, window, cx);
}
});
});
@@ -33,4 +33,9 @@ workspace.workspace = true
which.workspace = true
[dev-dependencies]
+ctor.workspace = true
+clock= { workspace = true, "features" = ["test-support"] }
+futures.workspace = true
gpui = { workspace = true, "features" = ["test-support"] }
+parking_lot.workspace = true
+zlog.workspace = true
@@ -1,12 +1,11 @@
use anyhow::{Context as _, Result};
-use client::{Client, TelemetrySettings};
-use db::RELEASE_CHANNEL;
+use client::Client;
use db::kvp::KEY_VALUE_STORE;
use gpui::{
App, AppContext as _, AsyncApp, BackgroundExecutor, Context, Entity, Global, SemanticVersion,
Task, Window, actions,
};
-use http_client::{AsyncBody, HttpClient, HttpClientWithUrl};
+use http_client::{HttpClient, HttpClientWithUrl};
use paths::remote_servers_dir;
use release_channel::{AppCommitSha, ReleaseChannel};
use serde::{Deserialize, Serialize};
@@ -41,22 +40,23 @@ actions!(
]
);
-#[derive(Serialize)]
-struct UpdateRequestBody {
- installation_id: Option<Arc<str>>,
- release_channel: Option<&'static str>,
- telemetry: bool,
- is_staff: Option<bool>,
- destination: &'static str,
-}
-
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum VersionCheckType {
Sha(AppCommitSha),
Semantic(SemanticVersion),
}
-#[derive(Clone)]
+#[derive(Serialize, Debug)]
+pub struct AssetQuery<'a> {
+ asset: &'a str,
+ os: &'a str,
+ arch: &'a str,
+ metrics_id: Option<&'a str>,
+ system_id: Option<&'a str>,
+ is_staff: Option<bool>,
+}
+
+#[derive(Clone, Debug)]
pub enum AutoUpdateStatus {
Idle,
Checking,
@@ -66,6 +66,31 @@ pub enum AutoUpdateStatus {
Errored { error: Arc<anyhow::Error> },
}
+impl PartialEq for AutoUpdateStatus {
+ fn eq(&self, other: &Self) -> bool {
+ match (self, other) {
+ (AutoUpdateStatus::Idle, AutoUpdateStatus::Idle) => true,
+ (AutoUpdateStatus::Checking, AutoUpdateStatus::Checking) => true,
+ (
+ AutoUpdateStatus::Downloading { version: v1 },
+ AutoUpdateStatus::Downloading { version: v2 },
+ ) => v1 == v2,
+ (
+ AutoUpdateStatus::Installing { version: v1 },
+ AutoUpdateStatus::Installing { version: v2 },
+ ) => v1 == v2,
+ (
+ AutoUpdateStatus::Updated { version: v1 },
+ AutoUpdateStatus::Updated { version: v2 },
+ ) => v1 == v2,
+ (AutoUpdateStatus::Errored { error: e1 }, AutoUpdateStatus::Errored { error: e2 }) => {
+ e1.to_string() == e2.to_string()
+ }
+ _ => false,
+ }
+ }
+}
+
impl AutoUpdateStatus {
pub fn is_updated(&self) -> bool {
matches!(self, Self::Updated { .. })
@@ -75,13 +100,13 @@ impl AutoUpdateStatus {
pub struct AutoUpdater {
status: AutoUpdateStatus,
current_version: SemanticVersion,
- http_client: Arc<HttpClientWithUrl>,
+ client: Arc<Client>,
pending_poll: Option<Task<Option<()>>>,
quit_subscription: Option<gpui::Subscription>,
}
-#[derive(Deserialize, Clone, Debug)]
-pub struct JsonRelease {
+#[derive(Deserialize, Serialize, Clone, Debug)]
+pub struct ReleaseAsset {
pub version: String,
pub url: String,
}
@@ -137,7 +162,7 @@ struct GlobalAutoUpdate(Option<Entity<AutoUpdater>>);
impl Global for GlobalAutoUpdate {}
-pub fn init(http_client: Arc<HttpClientWithUrl>, cx: &mut App) {
+pub fn init(client: Arc<Client>, cx: &mut App) {
cx.observe_new(|workspace: &mut Workspace, _window, _cx| {
workspace.register_action(|_, action, window, cx| check(action, window, cx));
@@ -149,7 +174,7 @@ pub fn init(http_client: Arc<HttpClientWithUrl>, cx: &mut App) {
let version = release_channel::AppVersion::global(cx);
let auto_updater = cx.new(|cx| {
- let updater = AutoUpdater::new(version, http_client, cx);
+ let updater = AutoUpdater::new(version, client, cx);
let poll_for_updates = ReleaseChannel::try_global(cx)
.map(|channel| channel.poll_for_updates())
@@ -233,7 +258,7 @@ pub fn view_release_notes(_: &ViewReleaseNotes, cx: &mut App) -> Option<()> {
let current_version = auto_updater.current_version;
let release_channel = release_channel.dev_name();
let path = format!("/releases/{release_channel}/{current_version}");
- let url = &auto_updater.http_client.build_url(&path);
+ let url = &auto_updater.client.http_client().build_url(&path);
cx.open_url(url);
}
ReleaseChannel::Nightly => {
@@ -296,11 +321,7 @@ impl AutoUpdater {
cx.default_global::<GlobalAutoUpdate>().0.clone()
}
- fn new(
- current_version: SemanticVersion,
- http_client: Arc<HttpClientWithUrl>,
- cx: &mut Context<Self>,
- ) -> Self {
+ fn new(current_version: SemanticVersion, client: Arc<Client>, cx: &mut Context<Self>) -> Self {
// On windows, executable files cannot be overwritten while they are
// running, so we must wait to overwrite the application until quitting
// or restarting. When quitting the app, we spawn the auto update helper
@@ -321,7 +342,7 @@ impl AutoUpdater {
Self {
status: AutoUpdateStatus::Idle,
current_version,
- http_client,
+ client,
pending_poll: None,
quit_subscription,
}
@@ -354,7 +375,7 @@ impl AutoUpdater {
cx.notify();
self.pending_poll = Some(cx.spawn(async move |this, cx| {
- let result = Self::update(this.upgrade()?, cx.clone()).await;
+ let result = Self::update(this.upgrade()?, cx).await;
this.update(cx, |this, cx| {
this.pending_poll = None;
if let Err(error) = result {
@@ -400,10 +421,10 @@ impl AutoUpdater {
// you can override this function. You should also update get_remote_server_release_url to return
// Ok(None).
pub async fn download_remote_server_release(
- os: &str,
- arch: &str,
release_channel: ReleaseChannel,
version: Option<SemanticVersion>,
+ os: &str,
+ arch: &str,
set_status: impl Fn(&str, &mut AsyncApp) + Send + 'static,
cx: &mut AsyncApp,
) -> Result<PathBuf> {
@@ -415,13 +436,13 @@ impl AutoUpdater {
})??;
set_status("Fetching remote server release", cx);
- let release = Self::get_release(
+ let release = Self::get_release_asset(
&this,
+ release_channel,
+ version,
"zed-remote-server",
os,
arch,
- version,
- Some(release_channel),
cx,
)
.await?;
@@ -432,7 +453,7 @@ impl AutoUpdater {
let version_path = platform_dir.join(format!("{}.gz", release.version));
smol::fs::create_dir_all(&platform_dir).await.ok();
- let client = this.read_with(cx, |this, _| this.http_client.clone())?;
+ let client = this.read_with(cx, |this, _| this.client.http_client())?;
if smol::fs::metadata(&version_path).await.is_err() {
log::info!(
@@ -440,19 +461,19 @@ impl AutoUpdater {
release.version
);
set_status("Downloading remote server", cx);
- download_remote_server_binary(&version_path, release, client, cx).await?;
+ download_remote_server_binary(&version_path, release, client).await?;
}
Ok(version_path)
}
pub async fn get_remote_server_release_url(
+ channel: ReleaseChannel,
+ version: Option<SemanticVersion>,
os: &str,
arch: &str,
- release_channel: ReleaseChannel,
- version: Option<SemanticVersion>,
cx: &mut AsyncApp,
- ) -> Result<Option<(String, String)>> {
+ ) -> Result<Option<String>> {
let this = cx.update(|cx| {
cx.default_global::<GlobalAutoUpdate>()
.0
@@ -460,108 +481,99 @@ impl AutoUpdater {
.context("auto-update not initialized")
})??;
- let release = Self::get_release(
- &this,
- "zed-remote-server",
- os,
- arch,
- version,
- Some(release_channel),
- cx,
- )
- .await?;
-
- let update_request_body = build_remote_server_update_request_body(cx)?;
- let body = serde_json::to_string(&update_request_body)?;
+ let release =
+ Self::get_release_asset(&this, channel, version, "zed-remote-server", os, arch, cx)
+ .await?;
- Ok(Some((release.url, body)))
+ Ok(Some(release.url))
}
- async fn get_release(
+ async fn get_release_asset(
this: &Entity<Self>,
+ release_channel: ReleaseChannel,
+ version: Option<SemanticVersion>,
asset: &str,
os: &str,
arch: &str,
- version: Option<SemanticVersion>,
- release_channel: Option<ReleaseChannel>,
cx: &mut AsyncApp,
- ) -> Result<JsonRelease> {
- let client = this.read_with(cx, |this, _| this.http_client.clone())?;
-
- if let Some(version) = version {
- let channel = release_channel.map(|c| c.dev_name()).unwrap_or("stable");
-
- let url = format!("/api/releases/{channel}/{version}/{asset}-{os}-{arch}.gz?update=1",);
-
- Ok(JsonRelease {
- version: version.to_string(),
- url: client.build_url(&url),
- })
+ ) -> Result<ReleaseAsset> {
+ let client = this.read_with(cx, |this, _| this.client.clone())?;
+
+ let (system_id, metrics_id, is_staff) = if client.telemetry().metrics_enabled() {
+ (
+ client.telemetry().system_id(),
+ client.telemetry().metrics_id(),
+ client.telemetry().is_staff(),
+ )
} else {
- let mut url_string = client.build_url(&format!(
- "/api/releases/latest?asset={}&os={}&arch={}",
- asset, os, arch
- ));
- if let Some(param) = release_channel.and_then(|c| c.release_query_param()) {
- url_string += "&";
- url_string += param;
- }
+ (None, None, None)
+ };
- let mut response = client.get(&url_string, Default::default(), true).await?;
- let mut body = Vec::new();
- response.body_mut().read_to_end(&mut body).await?;
+ let version = if let Some(version) = version {
+ version.to_string()
+ } else {
+ "latest".to_string()
+ };
+ let http_client = client.http_client();
+
+ let path = format!("/releases/{}/{}/asset", release_channel.dev_name(), version,);
+ let url = http_client.build_zed_cloud_url_with_query(
+ &path,
+ AssetQuery {
+ os,
+ arch,
+ asset,
+ metrics_id: metrics_id.as_deref(),
+ system_id: system_id.as_deref(),
+ is_staff: is_staff,
+ },
+ )?;
- anyhow::ensure!(
- response.status().is_success(),
- "failed to fetch release: {:?}",
- String::from_utf8_lossy(&body),
- );
+ let mut response = http_client
+ .get(url.as_str(), Default::default(), true)
+ .await?;
+ let mut body = Vec::new();
+ response.body_mut().read_to_end(&mut body).await?;
- serde_json::from_slice(body.as_slice()).with_context(|| {
- format!(
- "error deserializing release {:?}",
- String::from_utf8_lossy(&body),
- )
- })
- }
- }
+ anyhow::ensure!(
+ response.status().is_success(),
+ "failed to fetch release: {:?}",
+ String::from_utf8_lossy(&body),
+ );
- async fn get_latest_release(
- this: &Entity<Self>,
- asset: &str,
- os: &str,
- arch: &str,
- release_channel: Option<ReleaseChannel>,
- cx: &mut AsyncApp,
- ) -> Result<JsonRelease> {
- Self::get_release(this, asset, os, arch, None, release_channel, cx).await
+ serde_json::from_slice(body.as_slice()).with_context(|| {
+ format!(
+ "error deserializing release {:?}",
+ String::from_utf8_lossy(&body),
+ )
+ })
}
- async fn update(this: Entity<Self>, mut cx: AsyncApp) -> Result<()> {
+ async fn update(this: Entity<Self>, cx: &mut AsyncApp) -> Result<()> {
let (client, installed_version, previous_status, release_channel) =
- this.read_with(&cx, |this, cx| {
+ this.read_with(cx, |this, cx| {
(
- this.http_client.clone(),
+ this.client.http_client(),
this.current_version,
this.status.clone(),
- ReleaseChannel::try_global(cx),
+ ReleaseChannel::try_global(cx).unwrap_or(ReleaseChannel::Stable),
)
})?;
Self::check_dependencies()?;
- this.update(&mut cx, |this, cx| {
+ this.update(cx, |this, cx| {
this.status = AutoUpdateStatus::Checking;
log::info!("Auto Update: checking for updates");
cx.notify();
})?;
let fetched_release_data =
- Self::get_latest_release(&this, "zed", OS, ARCH, release_channel, &mut cx).await?;
+ Self::get_release_asset(&this, release_channel, None, "zed", OS, ARCH, cx).await?;
let fetched_version = fetched_release_data.clone().version;
let app_commit_sha = cx.update(|cx| AppCommitSha::try_global(cx).map(|sha| sha.full()));
let newer_version = Self::check_if_fetched_version_is_newer(
- *RELEASE_CHANNEL,
+ release_channel,
app_commit_sha,
installed_version,
fetched_version,
@@ -569,7 +581,7 @@ impl AutoUpdater {
)?;
let Some(newer_version) = newer_version else {
- return this.update(&mut cx, |this, cx| {
+ return this.update(cx, |this, cx| {
let status = match previous_status {
AutoUpdateStatus::Updated { .. } => previous_status,
_ => AutoUpdateStatus::Idle,
@@ -579,7 +591,7 @@ impl AutoUpdater {
});
};
- this.update(&mut cx, |this, cx| {
+ this.update(cx, |this, cx| {
this.status = AutoUpdateStatus::Downloading {
version: newer_version.clone(),
};
@@ -588,21 +600,21 @@ impl AutoUpdater {
let installer_dir = InstallerDir::new().await?;
let target_path = Self::target_path(&installer_dir).await?;
- download_release(&target_path, fetched_release_data, client, &cx).await?;
+ download_release(&target_path, fetched_release_data, client).await?;
- this.update(&mut cx, |this, cx| {
+ this.update(cx, |this, cx| {
this.status = AutoUpdateStatus::Installing {
version: newer_version.clone(),
};
cx.notify();
})?;
- let new_binary_path = Self::install_release(installer_dir, target_path, &cx).await?;
+ let new_binary_path = Self::install_release(installer_dir, target_path, cx).await?;
if let Some(new_binary_path) = new_binary_path {
cx.update(|cx| cx.set_restart_path(new_binary_path))?;
}
- this.update(&mut cx, |this, cx| {
+ this.update(cx, |this, cx| {
this.set_should_show_update_notification(true, cx)
.detach_and_log_err(cx);
this.status = AutoUpdateStatus::Updated {
@@ -681,6 +693,12 @@ impl AutoUpdater {
target_path: PathBuf,
cx: &AsyncApp,
) -> Result<Option<PathBuf>> {
+ #[cfg(test)]
+ if let Some(test_install) =
+ cx.try_read_global::<tests::InstallOverride, _>(|g, _| g.0.clone())
+ {
+ return test_install(target_path, cx);
+ }
match OS {
"macos" => install_release_macos(&installer_dir, target_path, cx).await,
"linux" => install_release_linux(&installer_dir, target_path, cx).await,
@@ -731,16 +749,13 @@ impl AutoUpdater {
async fn download_remote_server_binary(
target_path: &PathBuf,
- release: JsonRelease,
+ release: ReleaseAsset,
client: Arc<HttpClientWithUrl>,
- cx: &AsyncApp,
) -> Result<()> {
let temp = tempfile::Builder::new().tempfile_in(remote_servers_dir())?;
let mut temp_file = File::create(&temp).await?;
- let update_request_body = build_remote_server_update_request_body(cx)?;
- let request_body = AsyncBody::from(serde_json::to_string(&update_request_body)?);
- let mut response = client.get(&release.url, request_body, true).await?;
+ let mut response = client.get(&release.url, Default::default(), true).await?;
anyhow::ensure!(
response.status().is_success(),
"failed to download remote server release: {:?}",
@@ -752,65 +767,19 @@ async fn download_remote_server_binary(
Ok(())
}
-fn build_remote_server_update_request_body(cx: &AsyncApp) -> Result<UpdateRequestBody> {
- let (installation_id, release_channel, telemetry_enabled, is_staff) = cx.update(|cx| {
- let telemetry = Client::global(cx).telemetry().clone();
- let is_staff = telemetry.is_staff();
- let installation_id = telemetry.installation_id();
- let release_channel =
- ReleaseChannel::try_global(cx).map(|release_channel| release_channel.display_name());
- let telemetry_enabled = TelemetrySettings::get_global(cx).metrics;
-
- (
- installation_id,
- release_channel,
- telemetry_enabled,
- is_staff,
- )
- })?;
-
- Ok(UpdateRequestBody {
- installation_id,
- release_channel,
- telemetry: telemetry_enabled,
- is_staff,
- destination: "remote",
- })
-}
-
async fn download_release(
target_path: &Path,
- release: JsonRelease,
+ release: ReleaseAsset,
client: Arc<HttpClientWithUrl>,
- cx: &AsyncApp,
) -> Result<()> {
let mut target_file = File::create(&target_path).await?;
- let (installation_id, release_channel, telemetry_enabled, is_staff) = cx.update(|cx| {
- let telemetry = Client::global(cx).telemetry().clone();
- let is_staff = telemetry.is_staff();
- let installation_id = telemetry.installation_id();
- let release_channel =
- ReleaseChannel::try_global(cx).map(|release_channel| release_channel.display_name());
- let telemetry_enabled = TelemetrySettings::get_global(cx).metrics;
-
- (
- installation_id,
- release_channel,
- telemetry_enabled,
- is_staff,
- )
- })?;
-
- let request_body = AsyncBody::from(serde_json::to_string(&UpdateRequestBody {
- installation_id,
- release_channel,
- telemetry: telemetry_enabled,
- is_staff,
- destination: "local",
- })?);
-
- let mut response = client.get(&release.url, request_body, true).await?;
+ let mut response = client.get(&release.url, Default::default(), true).await?;
+ anyhow::ensure!(
+ response.status().is_success(),
+ "failed to download update: {:?}",
+ response.status()
+ );
smol::io::copy(response.body_mut(), &mut target_file).await?;
log::info!("downloaded update. path:{:?}", target_path);
@@ -936,26 +905,15 @@ async fn install_release_macos(
#[cfg(target_os = "windows")]
async fn cleanup_windows() -> Result<()> {
- use util::ResultExt;
-
let parent = std::env::current_exe()?
.parent()
.context("No parent dir for Zed.exe")?
.to_owned();
// keep in sync with crates/auto_update_helper/src/updater.rs
- smol::fs::remove_dir(parent.join("updates"))
- .await
- .context("failed to remove updates dir")
- .log_err();
- smol::fs::remove_dir(parent.join("install"))
- .await
- .context("failed to remove install dir")
- .log_err();
- smol::fs::remove_dir(parent.join("old"))
- .await
- .context("failed to remove old version dir")
- .log_err();
+ _ = smol::fs::remove_dir(parent.join("updates")).await;
+ _ = smol::fs::remove_dir(parent.join("install")).await;
+ _ = smol::fs::remove_dir(parent.join("old")).await;
Ok(())
}
@@ -1010,11 +968,33 @@ pub async fn finalize_auto_update_on_quit() {
#[cfg(test)]
mod tests {
+ use client::Client;
+ use clock::FakeSystemClock;
+ use futures::channel::oneshot;
use gpui::TestAppContext;
+ use http_client::{FakeHttpClient, Response};
use settings::default_settings;
+ use std::{
+ rc::Rc,
+ sync::{
+ Arc,
+ atomic::{self, AtomicBool},
+ },
+ };
+ use tempfile::tempdir;
+
+ #[ctor::ctor]
+ fn init_logger() {
+ zlog::init_test();
+ }
use super::*;
+ pub(super) struct InstallOverride(
+ pub Rc<dyn Fn(PathBuf, &AsyncApp) -> Result<Option<PathBuf>>>,
+ );
+ impl Global for InstallOverride {}
+
#[gpui::test]
fn test_auto_update_defaults_to_true(cx: &mut TestAppContext) {
cx.update(|cx| {
@@ -1030,6 +1010,115 @@ mod tests {
});
}
+ #[gpui::test]
+ async fn test_auto_update_downloads(cx: &mut TestAppContext) {
+ cx.background_executor.allow_parking();
+ zlog::init_test();
+ let release_available = Arc::new(AtomicBool::new(false));
+
+ let (dmg_tx, dmg_rx) = oneshot::channel::<String>();
+
+ cx.update(|cx| {
+ settings::init(cx);
+
+ let current_version = SemanticVersion::new(0, 100, 0);
+ release_channel::init_test(current_version, ReleaseChannel::Stable, cx);
+
+ let clock = Arc::new(FakeSystemClock::new());
+ let release_available = Arc::clone(&release_available);
+ let dmg_rx = Arc::new(parking_lot::Mutex::new(Some(dmg_rx)));
+ let fake_client_http = FakeHttpClient::create(move |req| {
+ let release_available = release_available.load(atomic::Ordering::Relaxed);
+ let dmg_rx = dmg_rx.clone();
+ async move {
+ if req.uri().path() == "/releases/stable/latest/asset" {
+ if release_available {
+ return Ok(Response::builder().status(200).body(
+ r#"{"version":"0.100.1","url":"https://test.example/new-download"}"#.into()
+ ).unwrap());
+ } else {
+ return Ok(Response::builder().status(200).body(
+ r#"{"version":"0.100.0","url":"https://test.example/old-download"}"#.into()
+ ).unwrap());
+ }
+ } else if req.uri().path() == "/new-download" {
+ return Ok(Response::builder().status(200).body({
+ let dmg_rx = dmg_rx.lock().take().unwrap();
+ dmg_rx.await.unwrap().into()
+ }).unwrap());
+ }
+ Ok(Response::builder().status(404).body("".into()).unwrap())
+ }
+ });
+ let client = Client::new(clock, fake_client_http, cx);
+ crate::init(client, cx);
+ });
+
+ let auto_updater = cx.update(|cx| AutoUpdater::get(cx).expect("auto updater should exist"));
+
+ cx.background_executor.run_until_parked();
+
+ auto_updater.read_with(cx, |updater, _| {
+ assert_eq!(updater.status(), AutoUpdateStatus::Idle);
+ assert_eq!(updater.current_version(), SemanticVersion::new(0, 100, 0));
+ });
+
+ release_available.store(true, atomic::Ordering::SeqCst);
+ cx.background_executor.advance_clock(POLL_INTERVAL);
+ cx.background_executor.run_until_parked();
+
+ loop {
+ cx.background_executor.timer(Duration::from_millis(0)).await;
+ cx.run_until_parked();
+ let status = auto_updater.read_with(cx, |updater, _| updater.status());
+ if !matches!(status, AutoUpdateStatus::Idle) {
+ break;
+ }
+ }
+ let status = auto_updater.read_with(cx, |updater, _| updater.status());
+ assert_eq!(
+ status,
+ AutoUpdateStatus::Downloading {
+ version: VersionCheckType::Semantic(SemanticVersion::new(0, 100, 1))
+ }
+ );
+
+ dmg_tx.send("<fake-zed-update>".to_owned()).unwrap();
+
+ let tmp_dir = Arc::new(tempdir().unwrap());
+
+ cx.update(|cx| {
+ let tmp_dir = tmp_dir.clone();
+ cx.set_global(InstallOverride(Rc::new(move |target_path, _cx| {
+ let tmp_dir = tmp_dir.clone();
+ let dest_path = tmp_dir.path().join("zed");
+ std::fs::copy(&target_path, &dest_path)?;
+ Ok(Some(dest_path))
+ })));
+ });
+
+ loop {
+ cx.background_executor.timer(Duration::from_millis(0)).await;
+ cx.run_until_parked();
+ let status = auto_updater.read_with(cx, |updater, _| updater.status());
+ if !matches!(status, AutoUpdateStatus::Downloading { .. }) {
+ break;
+ }
+ }
+ let status = auto_updater.read_with(cx, |updater, _| updater.status());
+ assert_eq!(
+ status,
+ AutoUpdateStatus::Updated {
+ version: VersionCheckType::Semantic(SemanticVersion::new(0, 100, 1))
+ }
+ );
+ let will_restart = cx.expect_restart();
+ cx.update(|cx| cx.restart());
+ let path = will_restart.await.unwrap().unwrap();
+ assert_eq!(path, tmp_dir.path().join("zed"));
+ assert_eq!(std::fs::read_to_string(path).unwrap(), "<fake-zed-update>");
+ }
+
#[test]
fn test_stable_does_not_update_when_fetched_version_is_not_higher() {
let release_channel = ReleaseChannel::Stable;
@@ -1,6 +1,6 @@
use std::{
- cell::LazyCell,
path::Path,
+ sync::LazyLock,
time::{Duration, Instant},
};
@@ -13,8 +13,8 @@ use windows::Win32::{
use crate::windows_impl::WM_JOB_UPDATED;
pub(crate) struct Job {
- pub apply: Box<dyn Fn(&Path) -> Result<()>>,
- pub rollback: Box<dyn Fn(&Path) -> Result<()>>,
+ pub apply: Box<dyn Fn(&Path) -> Result<()> + Send + Sync>,
+ pub rollback: Box<dyn Fn(&Path) -> Result<()> + Send + Sync>,
}
impl Job {
@@ -154,10 +154,8 @@ impl Job {
}
}
-// app is single threaded
#[cfg(not(test))]
-#[allow(clippy::declare_interior_mutable_const)]
-pub(crate) const JOBS: LazyCell<[Job; 22]> = LazyCell::new(|| {
+pub(crate) static JOBS: LazyLock<[Job; 22]> = LazyLock::new(|| {
fn p(value: &str) -> &Path {
Path::new(value)
}
@@ -206,10 +204,8 @@ pub(crate) const JOBS: LazyCell<[Job; 22]> = LazyCell::new(|| {
]
});
-// app is single threaded
#[cfg(test)]
-#[allow(clippy::declare_interior_mutable_const)]
-pub(crate) const JOBS: LazyCell<[Job; 9]> = LazyCell::new(|| {
+pub(crate) static JOBS: LazyLock<[Job; 9]> = LazyLock::new(|| {
fn p(value: &str) -> &Path {
Path::new(value)
}
@@ -1487,7 +1487,7 @@ impl Client {
let url = self
.http
- .build_zed_cloud_url("/internal/users/impersonate", &[])?;
+ .build_zed_cloud_url("/internal/users/impersonate")?;
let request = Request::post(url.as_str())
.header("Content-Type", "application/json")
.header("Authorization", format!("Bearer {api_token}"))
@@ -435,7 +435,7 @@ impl Telemetry {
Some(project_types)
}
- fn report_event(self: &Arc<Self>, event: Event) {
+ fn report_event(self: &Arc<Self>, mut event: Event) {
let mut state = self.state.lock();
// RUST_LOG=telemetry=trace to debug telemetry events
log::trace!(target: "telemetry", "{:?}", event);
@@ -444,6 +444,12 @@ impl Telemetry {
return;
}
+ match &mut event {
+ Event::Flexible(event) => event
+ .event_properties
+ .insert("event_source".into(), "zed".into()),
+ };
+
if state.flush_events_task.is_none() {
let this = self.clone();
state.flush_events_task = Some(self.executor.spawn(async move {
@@ -62,7 +62,7 @@ impl CloudApiClient {
let request = self.build_request(
Request::builder().method(Method::GET).uri(
self.http_client
- .build_zed_cloud_url("/client/users/me", &[])?
+ .build_zed_cloud_url("/client/users/me")?
.as_ref(),
),
AsyncBody::default(),
@@ -89,7 +89,7 @@ impl CloudApiClient {
pub fn connect(&self, cx: &App) -> Result<Task<Result<Connection>>> {
let mut connect_url = self
.http_client
- .build_zed_cloud_url("/client/users/connect", &[])?;
+ .build_zed_cloud_url("/client/users/connect")?;
connect_url
.set_scheme(match connect_url.scheme() {
"https" => "wss",
@@ -123,7 +123,7 @@ impl CloudApiClient {
.method(Method::POST)
.uri(
self.http_client
- .build_zed_cloud_url("/client/llm_tokens", &[])?
+ .build_zed_cloud_url("/client/llm_tokens")?
.as_ref(),
)
.when_some(system_id, |builder, system_id| {
@@ -154,7 +154,7 @@ impl CloudApiClient {
let request = build_request(
Request::builder().method(Method::GET).uri(
self.http_client
- .build_zed_cloud_url("/client/users/me", &[])?
+ .build_zed_cloud_url("/client/users/me")?
.as_ref(),
),
AsyncBody::default(),
@@ -73,6 +73,7 @@ pub enum PromptFormat {
MarkedExcerpt,
LabeledSections,
NumLinesUniDiff,
+ OldTextNewText,
/// Prompt format intended for use via zeta_cli
OnlySnippets,
}
@@ -100,6 +101,7 @@ impl std::fmt::Display for PromptFormat {
PromptFormat::LabeledSections => write!(f, "Labeled Sections"),
PromptFormat::OnlySnippets => write!(f, "Only Snippets"),
PromptFormat::NumLinesUniDiff => write!(f, "Numbered Lines / Unified Diff"),
+ PromptFormat::OldTextNewText => write!(f, "Old Text / New Text"),
}
}
}
@@ -56,50 +56,98 @@ const LABELED_SECTIONS_INSTRUCTIONS: &str = indoc! {r#"
const NUMBERED_LINES_INSTRUCTIONS: &str = indoc! {r#"
# Instructions
- You are a code completion assistant helping a programmer finish their work. Your task is to:
+ You are an edit prediction agent in a code editor.
+ Your job is to predict the next edit that the user will make,
+ based on their last few edits and their current cursor location.
- 1. Analyze the edit history to understand what the programmer is trying to achieve
- 2. Identify any incomplete refactoring or changes that need to be finished
- 3. Make the remaining edits that a human programmer would logically make next
- 4. Apply systematic changes consistently across the entire codebase - if you see a pattern starting, complete it everywhere.
+ ## Output Format
- Focus on:
- - Understanding the intent behind the changes (e.g., improving error handling, refactoring APIs, fixing bugs)
- - Completing any partially-applied changes across the codebase
- - Ensuring consistency with the programming style and patterns already established
- - Making edits that maintain or improve code quality
- - If the programmer started refactoring one instance of a pattern, find and update ALL similar instances
- - Don't write a lot of code if you're not sure what to do
-
- Rules:
- - Do not just mechanically apply patterns - reason about what changes make sense given the context and the programmer's apparent goals.
- - Do not just fix syntax errors - look for the broader refactoring pattern and apply it systematically throughout the code.
- - Write the edits in the unified diff format as shown in the example.
-
- # Example output:
+ You must briefly explain your understanding of the user's goal, in one
+ or two sentences, and then specify their next edit in the form of a
+ unified diff, like this:
```
--- a/src/myapp/cli.py
+++ b/src/myapp/cli.py
- @@ -1,3 +1,3 @@
- -
- -
- -import sys
- +import json
+ @@ ... @@
+ import os
+ import time
+ import sys
+ +from constants import LOG_LEVEL_WARNING
+ @@ ... @@
+ config.headless()
+ config.set_interactive(false)
+ -config.set_log_level(LOG_L)
+ +config.set_log_level(LOG_LEVEL_WARNING)
+ config.set_use_color(True)
```
- # Edit History:
+ ## Edit History
"#};
const UNIFIED_DIFF_REMINDER: &str = indoc! {"
---
- Please analyze the edit history and the files, then provide the unified diff for your predicted edits.
+ Analyze the edit history and the files, then provide the unified diff for your predicted edits.
Do not include the cursor marker in your output.
- If you're editing multiple files, be sure to reflect filename in the hunk's header.
+ Your diff should include edited file paths in its file headers (lines beginning with `---` and `+++`).
+ Do not include line numbers in the hunk headers, use `@@ ... @@`.
+ Removed lines begin with `-`.
+ Added lines begin with `+`.
+ Context lines begin with an extra space.
+ Context and removed lines are used to match the target edit location, so make sure to include enough of them
+ to uniquely identify it amongst all excerpts of code provided.
"};
+const XML_TAGS_INSTRUCTIONS: &str = indoc! {r#"
+ # Instructions
+
+ You are an edit prediction agent in a code editor.
+ Your job is to predict the next edit that the user will make,
+ based on their last few edits and their current cursor location.
+
+ # Output Format
+
+ You must briefly explain your understanding of the user's goal, in one
+ or two sentences, and then specify their next edit, using the following
+ XML format:
+
+ <edits path="my-project/src/myapp/cli.py">
+ <old_text>
+ OLD TEXT 1 HERE
+ </old_text>
+ <new_text>
+ NEW TEXT 1 HERE
+ </new_text>
+
+ <old_text>
+ OLD TEXT 1 HERE
+ </old_text>
+ <new_text>
+ NEW TEXT 1 HERE
+ </new_text>
+ </edits>
+
+ - Specify the file to edit using the `path` attribute.
+ - Use `<old_text>` and `<new_text>` tags to replace content
+ - `<old_text>` must exactly match existing file content, including indentation
+ - `<old_text>` cannot be empty
+ - Do not escape quotes, newlines, or other characters within tags
+ - Always close all tags properly
+ - Don't include the <|user_cursor|> marker in your output.
+
+ # Edit History:
+
+"#};
+
+const OLD_TEXT_NEW_TEXT_REMINDER: &str = indoc! {r#"
+ ---
+
+ Remember that the edits in the edit history have already been deployed.
+ The files are currently as shown in the Code Excerpts section.
+"#};
+
pub fn build_prompt(
request: &predict_edits_v3::PredictEditsRequest,
) -> Result<(String, SectionLabels)> {
@@ -121,8 +169,9 @@ pub fn build_prompt(
EDITABLE_REGION_END_MARKER_WITH_NEWLINE,
),
],
- PromptFormat::LabeledSections => vec![(request.cursor_point, CURSOR_MARKER)],
- PromptFormat::NumLinesUniDiff => {
+ PromptFormat::LabeledSections
+ | PromptFormat::NumLinesUniDiff
+ | PromptFormat::OldTextNewText => {
vec![(request.cursor_point, CURSOR_MARKER)]
}
PromptFormat::OnlySnippets => vec![],
@@ -132,46 +181,32 @@ pub fn build_prompt(
PromptFormat::MarkedExcerpt => MARKED_EXCERPT_INSTRUCTIONS.to_string(),
PromptFormat::LabeledSections => LABELED_SECTIONS_INSTRUCTIONS.to_string(),
PromptFormat::NumLinesUniDiff => NUMBERED_LINES_INSTRUCTIONS.to_string(),
- // only intended for use via zeta_cli
+ PromptFormat::OldTextNewText => XML_TAGS_INSTRUCTIONS.to_string(),
PromptFormat::OnlySnippets => String::new(),
};
if request.events.is_empty() {
prompt.push_str("(No edit history)\n\n");
} else {
- prompt.push_str(
- "The following are the latest edits made by the user, from earlier to later.\n\n",
- );
+ prompt.push_str("Here are the latest edits made by the user, from earlier to later.\n\n");
push_events(&mut prompt, &request.events);
}
+ prompt.push_str(indoc! {"
+ # Code Excerpts
+
+ The cursor marker <|user_cursor|> indicates the current user cursor position.
+ The file is in current state, edits from edit history have been applied.
+ "});
+
if request.prompt_format == PromptFormat::NumLinesUniDiff {
- if request.referenced_declarations.is_empty() {
- prompt.push_str(indoc! {"
- # File under the cursor:
-
- The cursor marker <|user_cursor|> indicates the current user cursor position.
- The file is in current state, edits from edit history have been applied.
- We prepend line numbers (e.g., `123|<actual line>`); they are not part of the file.
-
- "});
- } else {
- // Note: This hasn't been trained on yet
- prompt.push_str(indoc! {"
- # Code Excerpts:
-
- The cursor marker <|user_cursor|> indicates the current user cursor position.
- Other excerpts of code from the project have been included as context based on their similarity to the code under the cursor.
- Context excerpts are not guaranteed to be relevant, so use your own judgement.
- Files are in their current state, edits from edit history have been applied.
- We prepend line numbers (e.g., `123|<actual line>`); they are not part of the file.
-
- "});
- }
- } else {
- prompt.push_str("\n## Code\n\n");
+ prompt.push_str(indoc! {"
+ We prepend line numbers (e.g., `123|<actual line>`); they are not part of the file.
+ "});
}
+ prompt.push('\n');
+
let mut section_labels = Default::default();
if !request.referenced_declarations.is_empty() || !request.signatures.is_empty() {
@@ -198,8 +233,14 @@ pub fn build_prompt(
}
}
- if request.prompt_format == PromptFormat::NumLinesUniDiff {
- prompt.push_str(UNIFIED_DIFF_REMINDER);
+ match request.prompt_format {
+ PromptFormat::NumLinesUniDiff => {
+ prompt.push_str(UNIFIED_DIFF_REMINDER);
+ }
+ PromptFormat::OldTextNewText => {
+ prompt.push_str(OLD_TEXT_NEW_TEXT_REMINDER);
+ }
+ _ => {}
}
Ok((prompt, section_labels))
@@ -624,6 +665,7 @@ impl<'a> SyntaxBasedPrompt<'a> {
match self.request.prompt_format {
PromptFormat::MarkedExcerpt
| PromptFormat::OnlySnippets
+ | PromptFormat::OldTextNewText
| PromptFormat::NumLinesUniDiff => {
if range.start.0 > 0 && !skipped_last_snippet {
output.push_str("β¦\n");
@@ -291,29 +291,6 @@ CREATE TABLE IF NOT EXISTS "channel_chat_participants" (
CREATE INDEX "index_channel_chat_participants_on_channel_id" ON "channel_chat_participants" ("channel_id");
-CREATE TABLE IF NOT EXISTS "channel_messages" (
- "id" INTEGER PRIMARY KEY AUTOINCREMENT,
- "channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE,
- "sender_id" INTEGER NOT NULL REFERENCES users (id),
- "body" TEXT NOT NULL,
- "sent_at" TIMESTAMP,
- "edited_at" TIMESTAMP,
- "nonce" BLOB NOT NULL,
- "reply_to_message_id" INTEGER DEFAULT NULL
-);
-
-CREATE INDEX "index_channel_messages_on_channel_id" ON "channel_messages" ("channel_id");
-
-CREATE UNIQUE INDEX "index_channel_messages_on_sender_id_nonce" ON "channel_messages" ("sender_id", "nonce");
-
-CREATE TABLE "channel_message_mentions" (
- "message_id" INTEGER NOT NULL REFERENCES channel_messages (id) ON DELETE CASCADE,
- "start_offset" INTEGER NOT NULL,
- "end_offset" INTEGER NOT NULL,
- "user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE,
- PRIMARY KEY (message_id, start_offset)
-);
-
CREATE TABLE "channel_members" (
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
"channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE,
@@ -408,15 +385,6 @@ CREATE TABLE "observed_buffer_edits" (
CREATE UNIQUE INDEX "index_observed_buffers_user_and_buffer_id" ON "observed_buffer_edits" ("user_id", "buffer_id");
-CREATE TABLE IF NOT EXISTS "observed_channel_messages" (
- "user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE,
- "channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE,
- "channel_message_id" INTEGER NOT NULL,
- PRIMARY KEY (user_id, channel_id)
-);
-
-CREATE UNIQUE INDEX "index_observed_channel_messages_user_and_channel_id" ON "observed_channel_messages" ("user_id", "channel_id");
-
CREATE TABLE "notification_kinds" (
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
"name" VARCHAR NOT NULL
@@ -0,0 +1,3 @@
+drop table observed_channel_messages;
+drop table channel_message_mentions;
+drop table channel_messages;
@@ -66,40 +66,6 @@ impl Database {
.await
}
- /// Returns all users flagged as staff.
- pub async fn get_staff_users(&self) -> Result<Vec<user::Model>> {
- self.transaction(|tx| async {
- let tx = tx;
- Ok(user::Entity::find()
- .filter(user::Column::Admin.eq(true))
- .all(&*tx)
- .await?)
- })
- .await
- }
-
- /// Returns a user by email address. There are no access checks here, so this should only be used internally.
- pub async fn get_user_by_email(&self, email: &str) -> Result<Option<User>> {
- self.transaction(|tx| async move {
- Ok(user::Entity::find()
- .filter(user::Column::EmailAddress.eq(email))
- .one(&*tx)
- .await?)
- })
- .await
- }
-
- /// Returns a user by GitHub user ID. There are no access checks here, so this should only be used internally.
- pub async fn get_user_by_github_user_id(&self, github_user_id: i32) -> Result<Option<User>> {
- self.transaction(|tx| async move {
- Ok(user::Entity::find()
- .filter(user::Column::GithubUserId.eq(github_user_id))
- .one(&*tx)
- .await?)
- })
- .await
- }
-
/// Returns a user by GitHub login. There are no access checks here, so this should only be used internally.
pub async fn get_user_by_github_login(&self, github_login: &str) -> Result<Option<User>> {
self.transaction(|tx| async move {
@@ -270,39 +236,6 @@ impl Database {
.await
}
- /// Sets "accepted_tos_at" on the user to the given timestamp.
- pub async fn set_user_accepted_tos_at(
- &self,
- id: UserId,
- accepted_tos_at: Option<DateTime>,
- ) -> Result<()> {
- self.transaction(|tx| async move {
- user::Entity::update_many()
- .filter(user::Column::Id.eq(id))
- .set(user::ActiveModel {
- accepted_tos_at: ActiveValue::set(accepted_tos_at),
- ..Default::default()
- })
- .exec(&*tx)
- .await?;
- Ok(())
- })
- .await
- }
-
- /// hard delete the user.
- pub async fn destroy_user(&self, id: UserId) -> Result<()> {
- self.transaction(|tx| async move {
- access_token::Entity::delete_many()
- .filter(access_token::Column::UserId.eq(id))
- .exec(&*tx)
- .await?;
- user::Entity::delete_by_id(id).exec(&*tx).await?;
- Ok(())
- })
- .await
- }
-
/// Find users where github_login ILIKE name_query.
pub async fn fuzzy_search_users(&self, name_query: &str, limit: u32) -> Result<Vec<User>> {
self.transaction(|tx| async {
@@ -341,14 +274,4 @@ impl Database {
result.push('%');
result
}
-
- pub async fn get_users_missing_github_user_created_at(&self) -> Result<Vec<user::Model>> {
- self.transaction(|tx| async move {
- Ok(user::Entity::find()
- .filter(user::Column::GithubUserCreatedAt.is_null())
- .all(&*tx)
- .await?)
- })
- .await
- }
}
@@ -6,8 +6,6 @@ pub mod channel;
pub mod channel_buffer_collaborator;
pub mod channel_chat_participant;
pub mod channel_member;
-pub mod channel_message;
-pub mod channel_message_mention;
pub mod contact;
pub mod contributor;
pub mod embedding;
@@ -18,7 +16,6 @@ pub mod language_server;
pub mod notification;
pub mod notification_kind;
pub mod observed_buffer_edits;
-pub mod observed_channel_messages;
pub mod project;
pub mod project_collaborator;
pub mod project_repository;
@@ -1,47 +0,0 @@
-use crate::db::{ChannelId, MessageId, UserId};
-use sea_orm::entity::prelude::*;
-use time::PrimitiveDateTime;
-
-#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
-#[sea_orm(table_name = "channel_messages")]
-pub struct Model {
- #[sea_orm(primary_key)]
- pub id: MessageId,
- pub channel_id: ChannelId,
- pub sender_id: UserId,
- pub body: String,
- pub sent_at: PrimitiveDateTime,
- pub edited_at: Option<PrimitiveDateTime>,
- pub nonce: Uuid,
- pub reply_to_message_id: Option<MessageId>,
-}
-
-impl ActiveModelBehavior for ActiveModel {}
-
-#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
-pub enum Relation {
- #[sea_orm(
- belongs_to = "super::channel::Entity",
- from = "Column::ChannelId",
- to = "super::channel::Column::Id"
- )]
- Channel,
- #[sea_orm(
- belongs_to = "super::user::Entity",
- from = "Column::SenderId",
- to = "super::user::Column::Id"
- )]
- Sender,
-}
-
-impl Related<super::channel::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::Channel.def()
- }
-}
-
-impl Related<super::user::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::Sender.def()
- }
-}
@@ -1,43 +0,0 @@
-use crate::db::{MessageId, UserId};
-use sea_orm::entity::prelude::*;
-
-#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
-#[sea_orm(table_name = "channel_message_mentions")]
-pub struct Model {
- #[sea_orm(primary_key)]
- pub message_id: MessageId,
- #[sea_orm(primary_key)]
- pub start_offset: i32,
- pub end_offset: i32,
- pub user_id: UserId,
-}
-
-impl ActiveModelBehavior for ActiveModel {}
-
-#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
-pub enum Relation {
- #[sea_orm(
- belongs_to = "super::channel_message::Entity",
- from = "Column::MessageId",
- to = "super::channel_message::Column::Id"
- )]
- Message,
- #[sea_orm(
- belongs_to = "super::user::Entity",
- from = "Column::UserId",
- to = "super::user::Column::Id"
- )]
- MentionedUser,
-}
-
-impl Related<super::channel::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::Message.def()
- }
-}
-
-impl Related<super::user::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::MentionedUser.def()
- }
-}
@@ -1,41 +0,0 @@
-use crate::db::{ChannelId, MessageId, UserId};
-use sea_orm::entity::prelude::*;
-
-#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
-#[sea_orm(table_name = "observed_channel_messages")]
-pub struct Model {
- #[sea_orm(primary_key)]
- pub user_id: UserId,
- pub channel_id: ChannelId,
- pub channel_message_id: MessageId,
-}
-
-#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
-pub enum Relation {
- #[sea_orm(
- belongs_to = "super::channel::Entity",
- from = "Column::ChannelId",
- to = "super::channel::Column::Id"
- )]
- Channel,
- #[sea_orm(
- belongs_to = "super::user::Entity",
- from = "Column::UserId",
- to = "super::user::Column::Id"
- )]
- User,
-}
-
-impl Related<super::channel::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::Channel.def()
- }
-}
-
-impl Related<super::user::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::User.def()
- }
-}
-
-impl ActiveModelBehavior for ActiveModel {}
@@ -6,7 +6,6 @@ mod db_tests;
#[cfg(target_os = "macos")]
mod embedding_tests;
mod extension_tests;
-mod user_tests;
use crate::migrations::run_database_migrations;
@@ -1,7 +1,7 @@
use super::*;
use crate::test_both_dbs;
use chrono::Utc;
-use pretty_assertions::{assert_eq, assert_ne};
+use pretty_assertions::assert_eq;
use std::sync::Arc;
test_both_dbs!(
@@ -457,53 +457,6 @@ async fn test_add_contacts(db: &Arc<Database>) {
);
}
-test_both_dbs!(
- test_metrics_id,
- test_metrics_id_postgres,
- test_metrics_id_sqlite
-);
-
-async fn test_metrics_id(db: &Arc<Database>) {
- let NewUserResult {
- user_id: user1,
- metrics_id: metrics_id1,
- ..
- } = db
- .create_user(
- "person1@example.com",
- None,
- false,
- NewUserParams {
- github_login: "person1".into(),
- github_user_id: 101,
- },
- )
- .await
- .unwrap();
- let NewUserResult {
- user_id: user2,
- metrics_id: metrics_id2,
- ..
- } = db
- .create_user(
- "person2@example.com",
- None,
- false,
- NewUserParams {
- github_login: "person2".into(),
- github_user_id: 102,
- },
- )
- .await
- .unwrap();
-
- assert_eq!(db.get_user_metrics_id(user1).await.unwrap(), metrics_id1);
- assert_eq!(db.get_user_metrics_id(user2).await.unwrap(), metrics_id2);
- assert_eq!(metrics_id1.len(), 36);
- assert_eq!(metrics_id2.len(), 36);
- assert_ne!(metrics_id1, metrics_id2);
-}
-
test_both_dbs!(
test_project_count,
test_project_count_postgres,
@@ -1,96 +0,0 @@
-use chrono::Utc;
-
-use crate::{
- db::{Database, NewUserParams},
- test_both_dbs,
-};
-use std::sync::Arc;
-
-test_both_dbs!(
- test_accepted_tos,
- test_accepted_tos_postgres,
- test_accepted_tos_sqlite
-);
-
-async fn test_accepted_tos(db: &Arc<Database>) {
- let user_id = db
- .create_user(
- "user1@example.com",
- None,
- false,
- NewUserParams {
- github_login: "user1".to_string(),
- github_user_id: 1,
- },
- )
- .await
- .unwrap()
- .user_id;
-
- let user = db.get_user_by_id(user_id).await.unwrap().unwrap();
- assert!(user.accepted_tos_at.is_none());
-
- let accepted_tos_at = Utc::now().naive_utc();
- db.set_user_accepted_tos_at(user_id, Some(accepted_tos_at))
- .await
- .unwrap();
-
- let user = db.get_user_by_id(user_id).await.unwrap().unwrap();
- assert!(user.accepted_tos_at.is_some());
- assert_eq!(user.accepted_tos_at, Some(accepted_tos_at));
-
- db.set_user_accepted_tos_at(user_id, None).await.unwrap();
-
- let user = db.get_user_by_id(user_id).await.unwrap().unwrap();
- assert!(user.accepted_tos_at.is_none());
-}
-
-test_both_dbs!(
- test_destroy_user_cascade_deletes_access_tokens,
- test_destroy_user_cascade_deletes_access_tokens_postgres,
- test_destroy_user_cascade_deletes_access_tokens_sqlite
-);
-
-async fn test_destroy_user_cascade_deletes_access_tokens(db: &Arc<Database>) {
- let user_id = db
- .create_user(
- "user1@example.com",
- Some("user1"),
- false,
- NewUserParams {
- github_login: "user1".to_string(),
- github_user_id: 12345,
- },
- )
- .await
- .unwrap()
- .user_id;
-
- let user = db.get_user_by_id(user_id).await.unwrap();
- assert!(user.is_some());
-
- let token_1_id = db
- .create_access_token(user_id, None, "token-1", 10)
- .await
- .unwrap();
-
- let token_2_id = db
- .create_access_token(user_id, None, "token-2", 10)
- .await
- .unwrap();
-
- let token_1 = db.get_access_token(token_1_id).await;
- let token_2 = db.get_access_token(token_2_id).await;
- assert!(token_1.is_ok());
- assert!(token_2.is_ok());
-
- db.destroy_user(user_id).await.unwrap();
-
- let user = db.get_user_by_id(user_id).await.unwrap();
- assert!(user.is_none());
-
- let token_1 = db.get_access_token(token_1_id).await;
- let token_2 = db.get_access_token(token_2_id).await;
- assert!(token_1.is_err());
- assert!(token_2.is_err());
-}
@@ -564,6 +564,20 @@ impl ProjectDiagnosticsEditor {
blocks.extend(more);
}
+ let cmp_excerpts = |buffer_snapshot: &BufferSnapshot,
+ a: &ExcerptRange<text::Anchor>,
+ b: &ExcerptRange<text::Anchor>| {
+ let context_start = || a.context.start.cmp(&b.context.start, buffer_snapshot);
+ let context_end = || a.context.end.cmp(&b.context.end, buffer_snapshot);
+ let primary_start = || a.primary.start.cmp(&b.primary.start, buffer_snapshot);
+ let primary_end = || a.primary.end.cmp(&b.primary.end, buffer_snapshot);
+ context_start()
+ .then_with(context_end)
+ .then_with(primary_start)
+ .then_with(primary_end)
+ .then(cmp::Ordering::Greater)
+ };
+
let mut excerpt_ranges: Vec<ExcerptRange<_>> = this.update(cx, |this, cx| {
this.multibuffer.update(cx, |multi_buffer, cx| {
let is_dirty = multi_buffer
@@ -575,10 +589,12 @@ impl ProjectDiagnosticsEditor {
.excerpts_for_buffer(buffer_id, cx)
.into_iter()
.map(|(_, range)| range)
+ .sorted_by(|a, b| cmp_excerpts(&buffer_snapshot, a, b))
.collect(),
}
})
})?;
+
let mut result_blocks = vec![None; excerpt_ranges.len()];
let context_lines = cx.update(|_, cx| multibuffer_context_lines(cx))?;
for b in blocks {
@@ -592,40 +608,14 @@ impl ProjectDiagnosticsEditor {
buffer_snapshot = cx.update(|_, cx| buffer.read(cx).snapshot())?;
let initial_range = buffer_snapshot.anchor_after(b.initial_range.start)
..buffer_snapshot.anchor_before(b.initial_range.end);
-
- let bin_search = |probe: &ExcerptRange<text::Anchor>| {
- let context_start = || {
- probe
- .context
- .start
- .cmp(&excerpt_range.start, &buffer_snapshot)
- };
- let context_end =
- || probe.context.end.cmp(&excerpt_range.end, &buffer_snapshot);
- let primary_start = || {
- probe
- .primary
- .start
- .cmp(&initial_range.start, &buffer_snapshot)
- };
- let primary_end =
- || probe.primary.end.cmp(&initial_range.end, &buffer_snapshot);
- context_start()
- .then_with(context_end)
- .then_with(primary_start)
- .then_with(primary_end)
- .then(cmp::Ordering::Greater)
+ let excerpt_range = ExcerptRange {
+ context: excerpt_range,
+ primary: initial_range,
};
let i = excerpt_ranges
- .binary_search_by(bin_search)
+ .binary_search_by(|probe| cmp_excerpts(&buffer_snapshot, probe, &excerpt_range))
.unwrap_or_else(|i| i);
- excerpt_ranges.insert(
- i,
- ExcerptRange {
- context: excerpt_range,
- primary: initial_range,
- },
- );
+ excerpt_ranges.insert(i, excerpt_range);
result_blocks.insert(i, Some(b));
}
@@ -379,11 +379,12 @@ impl Render for EditPredictionButton {
})
});
- let this = cx.entity();
+ let this = cx.weak_entity();
let mut popover_menu = PopoverMenu::new("zeta")
.menu(move |window, cx| {
- Some(this.update(cx, |this, cx| this.build_zeta_context_menu(window, cx)))
+ this.update(cx, |this, cx| this.build_zeta_context_menu(window, cx))
+ .ok()
})
.anchor(Corner::BottomRight)
.with_handle(self.popover_menu_handle.clone());
@@ -213,15 +213,6 @@ pub struct ExpandExcerptsDown {
pub(super) lines: u32,
}
-/// Shows code completion suggestions at the cursor position.
-#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema, Action)]
-#[action(namespace = editor)]
-#[serde(deny_unknown_fields)]
-pub struct ShowCompletions {
- #[serde(default)]
- pub(super) trigger: Option<String>,
-}
-
/// Handles text input in the editor.
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema, Action)]
#[action(namespace = editor)]
@@ -736,6 +727,8 @@ actions!(
SelectToStartOfParagraph,
/// Extends selection up.
SelectUp,
+ /// Shows code completion suggestions at the cursor position.
+ ShowCompletions,
/// Shows the system character palette.
ShowCharacterPalette,
/// Shows edit prediction at cursor.
@@ -252,8 +252,17 @@ enum MarkdownCacheKey {
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum CompletionsMenuSource {
+ /// Show all completions (words, snippets, LSP)
Normal,
+ /// Show only snippets (not words or LSP)
+ ///
+ /// Used after typing a non-word character
+ SnippetsOnly,
+ /// Tab stops within a snippet that have a predefined finite set of choices
SnippetChoices,
+ /// Show only words (not snippets or LSP)
+ ///
+ /// Used when word completions are explicitly triggered
Words { ignore_threshold: bool },
}
@@ -19,7 +19,7 @@ use std::{
cell::RefCell,
cmp::{self, Ordering},
fmt::Debug,
- ops::{Deref, DerefMut, Range, RangeBounds, RangeInclusive},
+ ops::{Deref, DerefMut, Not, Range, RangeBounds, RangeInclusive},
sync::{
Arc,
atomic::{AtomicUsize, Ordering::SeqCst},
@@ -1879,18 +1879,14 @@ impl Iterator for BlockRows<'_> {
}
let transform = self.transforms.item()?;
- if let Some(block) = transform.block.as_ref() {
- if block.is_replacement() && self.transforms.start().0 == self.output_row {
- if matches!(block, Block::FoldedBuffer { .. }) {
- Some(RowInfo::default())
- } else {
- Some(self.input_rows.next().unwrap())
- }
- } else {
- Some(RowInfo::default())
- }
+ if transform.block.as_ref().is_none_or(|block| {
+ block.is_replacement()
+ && self.transforms.start().0 == self.output_row
+ && matches!(block, Block::FoldedBuffer { .. }).not()
+ }) {
+ self.input_rows.next()
} else {
- Some(self.input_rows.next().unwrap())
+ Some(RowInfo::default())
}
}
}
@@ -965,7 +965,7 @@ impl<'a> Iterator for WrapChunks<'a> {
}
if self.input_chunk.text.is_empty() {
- self.input_chunk = self.input_chunks.next().unwrap();
+ self.input_chunk = self.input_chunks.next()?;
}
let mut input_len = 0;
@@ -74,7 +74,7 @@ use ::git::{
blame::{BlameEntry, ParsedCommitMessage},
status::FileStatus,
};
-use aho_corasick::AhoCorasick;
+use aho_corasick::{AhoCorasick, AhoCorasickBuilder, BuildError};
use anyhow::{Context as _, Result, anyhow};
use blink_manager::BlinkManager;
use buffer_diff::DiffHunkStatus;
@@ -117,8 +117,8 @@ use language::{
AutoindentMode, BlockCommentConfig, BracketMatch, BracketPair, Buffer, BufferRow,
BufferSnapshot, Capability, CharClassifier, CharKind, CharScopeContext, CodeLabel, CursorShape,
DiagnosticEntryRef, DiffOptions, EditPredictionsMode, EditPreview, HighlightedText, IndentKind,
- IndentSize, Language, OffsetRangeExt, Point, Runnable, RunnableRange, Selection, SelectionGoal,
- TextObject, TransactionId, TreeSitterOptions, WordsQuery,
+ IndentSize, Language, OffsetRangeExt, OutlineItem, Point, Runnable, RunnableRange, Selection,
+ SelectionGoal, TextObject, TransactionId, TreeSitterOptions, WordsQuery,
language_settings::{
self, LspInsertMode, RewrapBehavior, WordsCompletionMode, all_language_settings,
language_settings,
@@ -1183,12 +1183,14 @@ pub struct Editor {
hide_mouse_mode: HideMouseMode,
pub change_list: ChangeList,
inline_value_cache: InlineValueCache,
+
selection_drag_state: SelectionDragState,
colors: Option<LspColorData>,
post_scroll_update: Task<()>,
refresh_colors_task: Task<()>,
inlay_hints: Option<LspInlayHintData>,
folding_newlines: Task<()>,
+ select_next_is_case_sensitive: Option<bool>,
pub lookup_key: Option<Box<dyn Any + Send + Sync>>,
}
@@ -1764,6 +1766,51 @@ impl Editor {
Editor::new_internal(mode, buffer, project, None, window, cx)
}
+ pub fn sticky_headers(&self, cx: &App) -> Option<Vec<OutlineItem<Anchor>>> {
+ let multi_buffer = self.buffer().read(cx);
+ let multi_buffer_snapshot = multi_buffer.snapshot(cx);
+ let multi_buffer_visible_start = self
+ .scroll_manager
+ .anchor()
+ .anchor
+ .to_point(&multi_buffer_snapshot);
+ let max_row = multi_buffer_snapshot.max_point().row;
+
+ let start_row = (multi_buffer_visible_start.row).min(max_row);
+ let end_row = (multi_buffer_visible_start.row + 10).min(max_row);
+
+ if let Some((excerpt_id, buffer_id, buffer)) = multi_buffer.read(cx).as_singleton() {
+ let outline_items = buffer
+ .outline_items_containing(
+ Point::new(start_row, 0)..Point::new(end_row, 0),
+ true,
+ self.style().map(|style| style.syntax.as_ref()),
+ )
+ .into_iter()
+ .map(|outline_item| OutlineItem {
+ depth: outline_item.depth,
+ range: Anchor::range_in_buffer(*excerpt_id, buffer_id, outline_item.range),
+ source_range_for_text: Anchor::range_in_buffer(
+ *excerpt_id,
+ buffer_id,
+ outline_item.source_range_for_text,
+ ),
+ text: outline_item.text,
+ highlight_ranges: outline_item.highlight_ranges,
+ name_ranges: outline_item.name_ranges,
+ body_range: outline_item
+ .body_range
+ .map(|range| Anchor::range_in_buffer(*excerpt_id, buffer_id, range)),
+ annotation_range: outline_item
+ .annotation_range
+ .map(|range| Anchor::range_in_buffer(*excerpt_id, buffer_id, range)),
+ });
+ return Some(outline_items.collect());
+ }
+
+ None
+ }
+
fn new_internal(
mode: EditorMode,
multi_buffer: Entity<MultiBuffer>,
@@ -2287,6 +2334,7 @@ impl Editor {
selection_drag_state: SelectionDragState::None,
folding_newlines: Task::ready(()),
lookup_key: None,
+ select_next_is_case_sensitive: None,
};
if is_minimap {
@@ -3216,7 +3264,7 @@ impl Editor {
};
if continue_showing {
- self.show_completions(&ShowCompletions { trigger: None }, window, cx);
+ self.open_or_update_completions_menu(None, None, false, window, cx);
} else {
self.hide_context_menu(window, cx);
}
@@ -3401,6 +3449,21 @@ impl Editor {
Subscription::join(other_subscription, this_subscription)
}
+ fn unfold_buffers_with_selections(&mut self, cx: &mut Context<Self>) {
+ if self.buffer().read(cx).is_singleton() {
+ return;
+ }
+ let snapshot = self.buffer.read(cx).snapshot(cx);
+ let buffer_ids: HashSet<BufferId> = self
+ .selections
+ .disjoint_anchor_ranges()
+ .flat_map(|range| snapshot.buffer_ids_for_range(range))
+ .collect();
+ for buffer_id in buffer_ids {
+ self.unfold_buffer(buffer_id, cx);
+ }
+ }
+
/// Changes selections using the provided mutation function. Changes to `self.selections` occur
/// immediately, but when run within `transact` or `with_selection_effects_deferred` other
/// effects of selection change occur at the end of the transaction.
@@ -4142,6 +4205,8 @@ impl Editor {
self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx);
+ self.unfold_buffers_with_selections(cx);
+
let selections = self.selections.all_adjusted(&self.display_snapshot(cx));
let mut bracket_inserted = false;
let mut edits = Vec::new();
@@ -5051,57 +5116,18 @@ impl Editor {
ignore_threshold: false,
}),
None,
- window,
- cx,
- );
- }
- Some(CompletionsMenuSource::Normal)
- | Some(CompletionsMenuSource::SnippetChoices)
- | None
- if self.is_completion_trigger(
- text,
trigger_in_words,
- completions_source.is_some(),
- cx,
- ) =>
- {
- self.show_completions(
- &ShowCompletions {
- trigger: Some(text.to_owned()).filter(|x| !x.is_empty()),
- },
window,
cx,
- )
- }
- _ => {
- self.hide_context_menu(window, cx);
+ );
}
- }
- }
-
- fn is_completion_trigger(
- &self,
- text: &str,
- trigger_in_words: bool,
- menu_is_open: bool,
- cx: &mut Context<Self>,
- ) -> bool {
- let position = self.selections.newest_anchor().head();
- let Some(buffer) = self.buffer.read(cx).buffer_for_anchor(position, cx) else {
- return false;
- };
-
- if let Some(completion_provider) = &self.completion_provider {
- completion_provider.is_completion_trigger(
- &buffer,
- position.text_anchor,
- text,
- trigger_in_words,
- menu_is_open,
+ _ => self.open_or_update_completions_menu(
+ None,
+ Some(text.to_owned()).filter(|x| !x.is_empty()),
+ true,
+ window,
cx,
- )
- } else {
- false
+ ),
}
}
@@ -5379,6 +5405,7 @@ impl Editor {
ignore_threshold: true,
}),
None,
+ false,
window,
cx,
);
@@ -5386,17 +5413,18 @@ impl Editor {
pub fn show_completions(
&mut self,
- options: &ShowCompletions,
+ _: &ShowCompletions,
window: &mut Window,
cx: &mut Context<Self>,
) {
- self.open_or_update_completions_menu(None, options.trigger.as_deref(), window, cx);
+ self.open_or_update_completions_menu(None, None, false, window, cx);
}
fn open_or_update_completions_menu(
&mut self,
requested_source: Option<CompletionsMenuSource>,
- trigger: Option<&str>,
+ trigger: Option<String>,
+ trigger_in_words: bool,
window: &mut Window,
cx: &mut Context<Self>,
) {
@@ -5404,6 +5432,15 @@ impl Editor {
return;
}
+ let completions_source = self
+ .context_menu
+ .borrow()
+ .as_ref()
+ .and_then(|menu| match menu {
+ CodeContextMenu::Completions(completions_menu) => Some(completions_menu.source),
+ CodeContextMenu::CodeActions(_) => None,
+ });
+
let multibuffer_snapshot = self.buffer.read(cx).read(cx);
// Typically `start` == `end`, but with snippet tabstop choices the default choice is
@@ -5451,7 +5488,8 @@ impl Editor {
ignore_word_threshold = ignore_threshold;
None
}
- Some(CompletionsMenuSource::SnippetChoices) => {
+ Some(CompletionsMenuSource::SnippetChoices)
+ | Some(CompletionsMenuSource::SnippetsOnly) => {
log::error!("bug: SnippetChoices requested_source is not handled");
None
}
@@ -5465,13 +5503,19 @@ impl Editor {
.as_ref()
.is_none_or(|provider| provider.filter_completions());
+ let was_snippets_only = matches!(
+ completions_source,
+ Some(CompletionsMenuSource::SnippetsOnly)
+ );
+
if let Some(CodeContextMenu::Completions(menu)) = self.context_menu.borrow_mut().as_mut() {
if filter_completions {
menu.filter(query.clone(), provider.clone(), window, cx);
}
// When `is_incomplete` is false, no need to re-query completions when the current query
// is a suffix of the initial query.
- if !menu.is_incomplete {
+ let was_complete = !menu.is_incomplete;
+ if was_complete && !was_snippets_only {
// If the new query is a suffix of the old query (typing more characters) and
// the previous result was complete, the existing completions can be filtered.
//
@@ -5495,23 +5539,6 @@ impl Editor {
}
};
- let trigger_kind = match trigger {
- Some(trigger) if buffer.read(cx).completion_triggers().contains(trigger) => {
- CompletionTriggerKind::TRIGGER_CHARACTER
- }
- _ => CompletionTriggerKind::INVOKED,
- };
- let completion_context = CompletionContext {
- trigger_character: trigger.and_then(|trigger| {
- if trigger_kind == CompletionTriggerKind::TRIGGER_CHARACTER {
- Some(String::from(trigger))
- } else {
- None
- }
- }),
- trigger_kind,
- };
-
let Anchor {
excerpt_id: buffer_excerpt_id,
text_anchor: buffer_position,
@@ -5564,54 +5591,88 @@ impl Editor {
.as_ref()
.is_none_or(|query| !query.chars().any(|c| c.is_digit(10)));
- let omit_word_completions = !self.word_completions_enabled
- || (!ignore_word_threshold
- && match &query {
- Some(query) => query.chars().count() < completion_settings.words_min_length,
- None => completion_settings.words_min_length != 0,
- });
-
- let (mut words, provider_responses) = match &provider {
- Some(provider) => {
- let provider_responses = provider.completions(
- buffer_excerpt_id,
+ let load_provider_completions = provider.as_ref().is_some_and(|provider| {
+ trigger.as_ref().is_none_or(|trigger| {
+ provider.is_completion_trigger(
&buffer,
- buffer_position,
- completion_context,
- window,
+ position.text_anchor,
+ trigger,
+ trigger_in_words,
+ completions_source.is_some(),
cx,
- );
+ )
+ })
+ });
- let words = match (omit_word_completions, completion_settings.words) {
- (true, _) | (_, WordsCompletionMode::Disabled) => {
- Task::ready(BTreeMap::default())
- }
- (false, WordsCompletionMode::Enabled | WordsCompletionMode::Fallback) => cx
- .background_spawn(async move {
- buffer_snapshot.words_in_range(WordsQuery {
- fuzzy_contents: None,
- range: word_search_range,
- skip_digits,
- })
- }),
- };
+ let provider_responses = if let Some(provider) = &provider
+ && load_provider_completions
+ {
+ let trigger_character =
+ trigger.filter(|trigger| buffer.read(cx).completion_triggers().contains(trigger));
+ let completion_context = CompletionContext {
+ trigger_kind: match &trigger_character {
+ Some(_) => CompletionTriggerKind::TRIGGER_CHARACTER,
+ None => CompletionTriggerKind::INVOKED,
+ },
+ trigger_character,
+ };
- (words, provider_responses)
- }
- None => {
- let words = if omit_word_completions {
- Task::ready(BTreeMap::default())
- } else {
- cx.background_spawn(async move {
- buffer_snapshot.words_in_range(WordsQuery {
- fuzzy_contents: None,
- range: word_search_range,
- skip_digits,
- })
- })
- };
- (words, Task::ready(Ok(Vec::new())))
- }
+ provider.completions(
+ buffer_excerpt_id,
+ &buffer,
+ buffer_position,
+ completion_context,
+ window,
+ cx,
+ )
+ } else {
+ Task::ready(Ok(Vec::new()))
+ };
+
+ let load_word_completions = if !self.word_completions_enabled {
+ false
+ } else if requested_source
+ == Some(CompletionsMenuSource::Words {
+ ignore_threshold: true,
+ })
+ {
+ true
+ } else {
+ load_provider_completions
+ && completion_settings.words != WordsCompletionMode::Disabled
+ && (ignore_word_threshold || {
+ let words_min_length = completion_settings.words_min_length;
+ // check whether word has at least `words_min_length` characters
+ let query_chars = query.iter().flat_map(|q| q.chars());
+ query_chars.take(words_min_length).count() == words_min_length
+ })
+ };
+
+ let mut words = if load_word_completions {
+ cx.background_spawn(async move {
+ buffer_snapshot.words_in_range(WordsQuery {
+ fuzzy_contents: None,
+ range: word_search_range,
+ skip_digits,
+ })
+ })
+ } else {
+ Task::ready(BTreeMap::default())
+ };
+
+ let snippets = if let Some(provider) = &provider
+ && provider.show_snippets()
+ && let Some(project) = self.project()
+ {
+ project.update(cx, |project, cx| {
+ snippet_completions(project, &buffer, buffer_position, cx)
+ })
+ } else {
+ Task::ready(Ok(CompletionResponse {
+ completions: Vec::new(),
+ display_options: Default::default(),
+ is_incomplete: false,
+ }))
};
let snippet_sort_order = EditorSettings::get_global(cx).snippet_sort_order;
@@ -5669,6 +5730,13 @@ impl Editor {
confirm: None,
}));
+ completions.extend(
+ snippets
+ .await
+ .into_iter()
+ .flat_map(|response| response.completions),
+ );
+
let menu = if completions.is_empty() {
None
} else {
@@ -5680,7 +5748,11 @@ impl Editor {
.map(|workspace| workspace.read(cx).app_state().languages.clone());
let menu = CompletionsMenu::new(
id,
- requested_source.unwrap_or(CompletionsMenuSource::Normal),
+ requested_source.unwrap_or(if load_provider_completions {
+ CompletionsMenuSource::Normal
+ } else {
+ CompletionsMenuSource::SnippetsOnly
+ }),
sort_completions,
show_completion_documentation,
position,
@@ -6010,7 +6082,7 @@ impl Editor {
.as_ref()
.is_some_and(|confirm| confirm(intent, window, cx));
if show_new_completions_on_confirm {
- self.show_completions(&ShowCompletions { trigger: None }, window, cx);
+ self.open_or_update_completions_menu(None, None, false, window, cx);
}
let provider = self.completion_provider.as_ref()?;
@@ -12806,6 +12878,10 @@ impl Editor {
});
}
+ // π€ | .. | show_in_menu |
+ // | .. | true true
+ // | had_edit_prediction | false true
+
let trigger_in_words =
this.show_edit_predictions_in_menu() || !had_active_edit_prediction;
@@ -14588,7 +14664,7 @@ impl Editor {
.collect::<String>();
let is_empty = query.is_empty();
let select_state = SelectNextState {
- query: AhoCorasick::new(&[query])?,
+ query: self.build_query(&[query], cx)?,
wordwise: true,
done: is_empty,
};
@@ -14598,7 +14674,7 @@ impl Editor {
}
} else if let Some(selected_text) = selected_text {
self.select_next_state = Some(SelectNextState {
- query: AhoCorasick::new(&[selected_text])?,
+ query: self.build_query(&[selected_text], cx)?,
wordwise: false,
done: false,
});
@@ -14806,7 +14882,7 @@ impl Editor {
.collect::<String>();
let is_empty = query.is_empty();
let select_state = SelectNextState {
- query: AhoCorasick::new(&[query.chars().rev().collect::<String>()])?,
+ query: self.build_query(&[query.chars().rev().collect::<String>()], cx)?,
wordwise: true,
done: is_empty,
};
@@ -14816,7 +14892,8 @@ impl Editor {
}
} else if let Some(selected_text) = selected_text {
self.select_prev_state = Some(SelectNextState {
- query: AhoCorasick::new(&[selected_text.chars().rev().collect::<String>()])?,
+ query: self
+ .build_query(&[selected_text.chars().rev().collect::<String>()], cx)?,
wordwise: false,
done: false,
});
@@ -14826,6 +14903,25 @@ impl Editor {
Ok(())
}
+ /// Builds an `AhoCorasick` automaton from the provided patterns, while
+ /// setting the case sensitivity based on the global
+ /// `SelectNextCaseSensitive` setting, if set, otherwise based on the
+ /// editor's settings.
+ fn build_query<I, P>(&self, patterns: I, cx: &Context<Self>) -> Result<AhoCorasick, BuildError>
+ where
+ I: IntoIterator<Item = P>,
+ P: AsRef<[u8]>,
+ {
+ let case_sensitive = self.select_next_is_case_sensitive.map_or_else(
+ || EditorSettings::get_global(cx).search.case_sensitive,
+ |value| value,
+ );
+
+ let mut builder = AhoCorasickBuilder::new();
+ builder.ascii_case_insensitive(!case_sensitive);
+ builder.build(patterns)
+ }
+
pub fn find_next_match(
&mut self,
_: &FindNextMatch,
@@ -18800,10 +18896,17 @@ impl Editor {
if self.buffer().read(cx).is_singleton() || self.is_buffer_folded(buffer_id, cx) {
return;
}
+
let folded_excerpts = self.buffer().read(cx).excerpts_for_buffer(buffer_id, cx);
self.display_map.update(cx, |display_map, cx| {
display_map.fold_buffers([buffer_id], cx)
});
+
+ let snapshot = self.display_snapshot(cx);
+ self.selections.change_with(&snapshot, |selections| {
+ selections.remove_selections_from_buffer(buffer_id);
+ });
+
cx.emit(EditorEvent::BufferFoldToggled {
ids: folded_excerpts.iter().map(|&(id, _)| id).collect(),
folded: true,
@@ -23013,6 +23116,10 @@ pub trait CompletionProvider {
fn filter_completions(&self) -> bool {
true
}
+
+ fn show_snippets(&self) -> bool {
+ false
+ }
}
pub trait CodeActionProvider {
@@ -23273,16 +23380,8 @@ impl CompletionProvider for Entity<Project> {
cx: &mut Context<Editor>,
) -> Task<Result<Vec<CompletionResponse>>> {
self.update(cx, |project, cx| {
- let snippets = snippet_completions(project, buffer, buffer_position, cx);
- let project_completions = project.completions(buffer, buffer_position, options, cx);
- cx.background_spawn(async move {
- let mut responses = project_completions.await?;
- let snippets = snippets.await?;
- if !snippets.completions.is_empty() {
- responses.push(snippets);
- }
- Ok(responses)
- })
+ let task = project.completions(buffer, buffer_position, options, cx);
+ cx.background_spawn(task)
})
}
@@ -23354,6 +23453,10 @@ impl CompletionProvider for Entity<Project> {
buffer.completion_triggers().contains(text)
}
+
+ fn show_snippets(&self) -> bool {
+ true
+ }
}
impl SemanticsProvider for Entity<Project> {
@@ -33,6 +33,7 @@ pub struct EditorSettings {
pub horizontal_scroll_margin: f32,
pub scroll_sensitivity: f32,
pub fast_scroll_sensitivity: f32,
+ pub sticky_scroll: StickyScroll,
pub relative_line_numbers: RelativeLineNumbers,
pub seed_search_query_from_cursor: SeedQuerySetting,
pub use_smartcase_search: bool,
@@ -65,6 +66,11 @@ pub struct Jupyter {
pub enabled: bool,
}
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub struct StickyScroll {
+ pub enabled: bool,
+}
+
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct Toolbar {
pub breadcrumbs: bool,
@@ -156,10 +162,15 @@ pub struct DragAndDropSelection {
pub struct SearchSettings {
/// Whether to show the project search button in the status bar.
pub button: bool,
+ /// Whether to only match on whole words.
pub whole_word: bool,
+ /// Whether to match case sensitively.
pub case_sensitive: bool,
+ /// Whether to include gitignored files in search results.
pub include_ignored: bool,
+ /// Whether to interpret the search query as a regular expression.
pub regex: bool,
+ /// Whether to center the cursor on each search match when navigating.
pub center_on_match: bool,
}
@@ -185,6 +196,7 @@ impl Settings for EditorSettings {
let toolbar = editor.toolbar.unwrap();
let search = editor.search.unwrap();
let drag_and_drop_selection = editor.drag_and_drop_selection.unwrap();
+ let sticky_scroll = editor.sticky_scroll.unwrap();
Self {
cursor_blink: editor.cursor_blink.unwrap(),
cursor_shape: editor.cursor_shape.map(Into::into),
@@ -235,6 +247,9 @@ impl Settings for EditorSettings {
horizontal_scroll_margin: editor.horizontal_scroll_margin.unwrap(),
scroll_sensitivity: editor.scroll_sensitivity.unwrap(),
fast_scroll_sensitivity: editor.fast_scroll_sensitivity.unwrap(),
+ sticky_scroll: StickyScroll {
+ enabled: sticky_scroll.enabled.unwrap(),
+ },
relative_line_numbers: editor.relative_line_numbers.unwrap(),
seed_search_query_from_cursor: editor.seed_search_query_from_cursor.unwrap(),
use_smartcase_search: editor.use_smartcase_search.unwrap(),
@@ -3,6 +3,7 @@ use crate::{
JoinLines,
code_context_menus::CodeContextMenu,
edit_prediction_tests::FakeEditPredictionProvider,
+ element::StickyHeader,
linked_editing_ranges::LinkedEditingRanges,
scroll::scroll_amount::ScrollAmount,
test::{
@@ -43,8 +44,8 @@ use project::{
};
use serde_json::{self, json};
use settings::{
- AllLanguageSettingsContent, IndentGuideBackgroundColoring, IndentGuideColoring,
- ProjectSettingsContent,
+ AllLanguageSettingsContent, EditorSettingsContent, IndentGuideBackgroundColoring,
+ IndentGuideColoring, ProjectSettingsContent, SearchSettingsContent,
};
use std::{cell::RefCell, future::Future, rc::Rc, sync::atomic::AtomicBool, time::Instant};
use std::{
@@ -8313,8 +8314,15 @@ async fn test_add_selection_above_below_multi_cursor_existing_state(cx: &mut Tes
#[gpui::test]
async fn test_select_next(cx: &mut TestAppContext) {
init_test(cx, |_| {});
-
let mut cx = EditorTestContext::new(cx).await;
+
+ // Enable case sensitive search.
+ update_test_editor_settings(&mut cx, |settings| {
+ let mut search_settings = SearchSettingsContent::default();
+ search_settings.case_sensitive = Some(true);
+ settings.search = Some(search_settings);
+ });
+
cx.set_state("abc\nΛabc abc\ndefabc\nabc");
cx.update_editor(|e, window, cx| e.select_next(&SelectNext::default(), window, cx))
@@ -8345,14 +8353,41 @@ async fn test_select_next(cx: &mut TestAppContext) {
cx.update_editor(|e, window, cx| e.select_next(&SelectNext::default(), window, cx))
.unwrap();
cx.assert_editor_state("abc\nΒ«ΛabcΒ» Β«ΛabcΒ»\ndefabc\nabc");
+
+ // Test case sensitivity
+ cx.set_state("Β«ΛfooΒ»\nFOO\nFoo\nfoo");
+ cx.update_editor(|e, window, cx| {
+ e.select_next(&SelectNext::default(), window, cx).unwrap();
+ });
+ cx.assert_editor_state("Β«ΛfooΒ»\nFOO\nFoo\nΒ«ΛfooΒ»");
+
+ // Disable case sensitive search.
+ update_test_editor_settings(&mut cx, |settings| {
+ let mut search_settings = SearchSettingsContent::default();
+ search_settings.case_sensitive = Some(false);
+ settings.search = Some(search_settings);
+ });
+
+ cx.set_state("Β«ΛfooΒ»\nFOO\nFoo");
+ cx.update_editor(|e, window, cx| {
+ e.select_next(&SelectNext::default(), window, cx).unwrap();
+ e.select_next(&SelectNext::default(), window, cx).unwrap();
+ });
+ cx.assert_editor_state("Β«ΛfooΒ»\nΒ«ΛFOOΒ»\nΒ«ΛFooΒ»");
}
#[gpui::test]
async fn test_select_all_matches(cx: &mut TestAppContext) {
init_test(cx, |_| {});
-
let mut cx = EditorTestContext::new(cx).await;
+ // Enable case sensitive search.
+ update_test_editor_settings(&mut cx, |settings| {
+ let mut search_settings = SearchSettingsContent::default();
+ search_settings.case_sensitive = Some(true);
+ settings.search = Some(search_settings);
+ });
+
// Test caret-only selections
cx.set_state("abc\nΛabc abc\ndefabc\nabc");
cx.update_editor(|e, window, cx| e.select_all_matches(&SelectAllMatches, window, cx))
@@ -8397,6 +8432,26 @@ async fn test_select_all_matches(cx: &mut TestAppContext) {
e.set_clip_at_line_ends(false, cx);
});
cx.assert_editor_state("Β«abcΛΒ»");
+
+ // Test case sensitivity
+ cx.set_state("fΛoo\nFOO\nFoo");
+ cx.update_editor(|e, window, cx| {
+ e.select_all_matches(&SelectAllMatches, window, cx).unwrap();
+ });
+ cx.assert_editor_state("Β«fooΛΒ»\nFOO\nFoo");
+
+ // Disable case sensitive search.
+ update_test_editor_settings(&mut cx, |settings| {
+ let mut search_settings = SearchSettingsContent::default();
+ search_settings.case_sensitive = Some(false);
+ settings.search = Some(search_settings);
+ });
+
+ cx.set_state("fΛoo\nFOO\nFoo");
+ cx.update_editor(|e, window, cx| {
+ e.select_all_matches(&SelectAllMatches, window, cx).unwrap();
+ });
+ cx.assert_editor_state("Β«fooΛΒ»\nΒ«FOOΛΒ»\nΒ«FooΛΒ»");
}
#[gpui::test]
@@ -8768,8 +8823,15 @@ let foo = Β«2ΛΒ»;"#,
#[gpui::test]
async fn test_select_previous_with_single_selection(cx: &mut TestAppContext) {
init_test(cx, |_| {});
-
let mut cx = EditorTestContext::new(cx).await;
+
+ // Enable case sensitive search.
+ update_test_editor_settings(&mut cx, |settings| {
+ let mut search_settings = SearchSettingsContent::default();
+ search_settings.case_sensitive = Some(true);
+ settings.search = Some(search_settings);
+ });
+
cx.set_state("abc\nΒ«ΛabcΒ» abc\ndefabc\nabc");
cx.update_editor(|e, window, cx| e.select_previous(&SelectPrevious::default(), window, cx))
@@ -8794,6 +8856,32 @@ async fn test_select_previous_with_single_selection(cx: &mut TestAppContext) {
cx.update_editor(|e, window, cx| e.select_previous(&SelectPrevious::default(), window, cx))
.unwrap();
cx.assert_editor_state("Β«ΛabcΒ»\nΒ«ΛabcΒ» Β«ΛabcΒ»\ndefΒ«ΛabcΒ»\nΒ«ΛabcΒ»");
+
+ // Test case sensitivity
+ cx.set_state("foo\nFOO\nFoo\nΒ«ΛfooΒ»");
+ cx.update_editor(|e, window, cx| {
+ e.select_previous(&SelectPrevious::default(), window, cx)
+ .unwrap();
+ e.select_previous(&SelectPrevious::default(), window, cx)
+ .unwrap();
+ });
+ cx.assert_editor_state("Β«ΛfooΒ»\nFOO\nFoo\nΒ«ΛfooΒ»");
+
+ // Disable case sensitive search.
+ update_test_editor_settings(&mut cx, |settings| {
+ let mut search_settings = SearchSettingsContent::default();
+ search_settings.case_sensitive = Some(false);
+ settings.search = Some(search_settings);
+ });
+
+ cx.set_state("foo\nFOO\nΒ«ΛFooΒ»");
+ cx.update_editor(|e, window, cx| {
+ e.select_previous(&SelectPrevious::default(), window, cx)
+ .unwrap();
+ e.select_previous(&SelectPrevious::default(), window, cx)
+ .unwrap();
+ });
+ cx.assert_editor_state("Β«ΛfooΒ»\nΒ«ΛFOOΒ»\nΒ«ΛFooΒ»");
}
#[gpui::test]
@@ -13826,7 +13914,7 @@ async fn test_completion_mode(cx: &mut TestAppContext) {
cx.set_state(&run.initial_state);
cx.update_editor(|editor, window, cx| {
- editor.show_completions(&ShowCompletions { trigger: None }, window, cx);
+ editor.show_completions(&ShowCompletions, window, cx);
});
let counter = Arc::new(AtomicUsize::new(0));
@@ -13886,7 +13974,7 @@ async fn test_completion_with_mode_specified_by_action(cx: &mut TestAppContext)
cx.set_state(initial_state);
cx.update_editor(|editor, window, cx| {
- editor.show_completions(&ShowCompletions { trigger: None }, window, cx);
+ editor.show_completions(&ShowCompletions, window, cx);
});
let counter = Arc::new(AtomicUsize::new(0));
@@ -13922,7 +14010,7 @@ async fn test_completion_with_mode_specified_by_action(cx: &mut TestAppContext)
cx.set_state(initial_state);
cx.update_editor(|editor, window, cx| {
- editor.show_completions(&ShowCompletions { trigger: None }, window, cx);
+ editor.show_completions(&ShowCompletions, window, cx);
});
handle_completion_request_with_insert_and_replace(
&mut cx,
@@ -14009,7 +14097,7 @@ async fn test_completion_replacing_surrounding_text_with_multicursors(cx: &mut T
"};
cx.set_state(initial_state);
cx.update_editor(|editor, window, cx| {
- editor.show_completions(&ShowCompletions { trigger: None }, window, cx);
+ editor.show_completions(&ShowCompletions, window, cx);
});
handle_completion_request_with_insert_and_replace(
&mut cx,
@@ -14063,7 +14151,7 @@ async fn test_completion_replacing_surrounding_text_with_multicursors(cx: &mut T
"};
cx.set_state(initial_state);
cx.update_editor(|editor, window, cx| {
- editor.show_completions(&ShowCompletions { trigger: None }, window, cx);
+ editor.show_completions(&ShowCompletions, window, cx);
});
handle_completion_request_with_insert_and_replace(
&mut cx,
@@ -14112,7 +14200,7 @@ async fn test_completion_replacing_surrounding_text_with_multicursors(cx: &mut T
"};
cx.set_state(initial_state);
cx.update_editor(|editor, window, cx| {
- editor.show_completions(&ShowCompletions { trigger: None }, window, cx);
+ editor.show_completions(&ShowCompletions, window, cx);
});
handle_completion_request_with_insert_and_replace(
&mut cx,
@@ -14263,7 +14351,7 @@ async fn test_completion_in_multibuffer_with_replace_range(cx: &mut TestAppConte
});
editor.update_in(cx, |editor, window, cx| {
- editor.show_completions(&ShowCompletions { trigger: None }, window, cx);
+ editor.show_completions(&ShowCompletions, window, cx);
});
fake_server
@@ -14502,7 +14590,7 @@ async fn test_completion(cx: &mut TestAppContext) {
cx.assert_editor_state("editor.cloΛ");
assert!(cx.editor(|e, _, _| e.context_menu.borrow_mut().is_none()));
cx.update_editor(|editor, window, cx| {
- editor.show_completions(&ShowCompletions { trigger: None }, window, cx);
+ editor.show_completions(&ShowCompletions, window, cx);
});
handle_completion_request(
"editor.<clo|>",
@@ -14901,7 +14989,7 @@ async fn test_word_completions_usually_skip_digits(cx: &mut TestAppContext) {
4.5f32
"});
cx.update_editor(|editor, window, cx| {
- editor.show_completions(&ShowCompletions::default(), window, cx);
+ editor.show_completions(&ShowCompletions, window, cx);
});
cx.executor().run_until_parked();
cx.condition(|editor, _| editor.context_menu_visible())
@@ -14927,7 +15015,7 @@ async fn test_word_completions_usually_skip_digits(cx: &mut TestAppContext) {
33.35f32
"});
cx.update_editor(|editor, window, cx| {
- editor.show_completions(&ShowCompletions::default(), window, cx);
+ editor.show_completions(&ShowCompletions, window, cx);
});
cx.executor().run_until_parked();
cx.condition(|editor, _| editor.context_menu_visible())
@@ -15055,6 +15143,35 @@ async fn test_word_completions_disabled(cx: &mut TestAppContext) {
});
}
+#[gpui::test]
+async fn test_word_completions_disabled_with_no_provider(cx: &mut TestAppContext) {
+ init_test(cx, |language_settings| {
+ language_settings.defaults.completions = Some(CompletionSettingsContent {
+ words: Some(WordsCompletionMode::Disabled),
+ words_min_length: Some(0),
+ lsp_insert_mode: Some(LspInsertMode::Insert),
+ ..Default::default()
+ });
+ });
+
+ let mut cx = EditorLspTestContext::new_rust(lsp::ServerCapabilities::default(), cx).await;
+ cx.update_editor(|editor, _, _| {
+ editor.set_completion_provider(None);
+ });
+ cx.set_state(indoc! {"Λ
+ wow
+ wowen
+ wowser
+ "});
+ cx.simulate_keystroke("w");
+ cx.executor().run_until_parked();
+ cx.update_editor(|editor, _, _| {
+ if editor.context_menu.borrow_mut().is_some() {
+ panic!("expected completion menu to be hidden, as disabled in settings");
+ }
+ });
+}
+
fn gen_text_edit(params: &CompletionParams, text: &str) -> Option<lsp::CompletionTextEdit> {
let position = || lsp::Position {
line: params.text_document_position.position.line,
@@ -15351,13 +15468,7 @@ async fn test_as_is_completions(cx: &mut TestAppContext) {
cx.set_state("fn a() {}\n nΛ");
cx.executor().run_until_parked();
cx.update_editor(|editor, window, cx| {
- editor.show_completions(
- &ShowCompletions {
- trigger: Some("\n".into()),
- },
- window,
- cx,
- );
+ editor.trigger_completion_on_input("n", true, window, cx)
});
cx.executor().run_until_parked();
@@ -15455,7 +15566,7 @@ int fn_branch(bool do_branch1, bool do_branch2);
})))
});
cx.update_editor(|editor, window, cx| {
- editor.show_completions(&ShowCompletions { trigger: None }, window, cx);
+ editor.show_completions(&ShowCompletions, window, cx);
});
cx.executor().run_until_parked();
cx.update_editor(|editor, window, cx| {
@@ -15504,7 +15615,7 @@ int fn_branch(bool do_branch1, bool do_branch2);
})))
});
cx.update_editor(|editor, window, cx| {
- editor.show_completions(&ShowCompletions { trigger: None }, window, cx);
+ editor.show_completions(&ShowCompletions, window, cx);
});
cx.executor().run_until_parked();
cx.update_editor(|editor, window, cx| {
@@ -17994,7 +18105,7 @@ async fn test_context_menus_hide_hover_popover(cx: &mut gpui::TestAppContext) {
}
});
cx.update_editor(|editor, window, cx| {
- editor.show_completions(&ShowCompletions { trigger: None }, window, cx);
+ editor.show_completions(&ShowCompletions, window, cx);
});
completion_requests.next().await;
cx.condition(|editor, _| editor.context_menu_visible())
@@ -22267,7 +22378,7 @@ async fn test_folding_buffers(cx: &mut TestAppContext) {
assert_eq!(
multi_buffer_editor.update(cx, |editor, cx| editor.display_text(cx)),
- "\n\nB\n\n\n\n\n\n\nllll\nmmmm\nnnnn\n\n\nqqqq\nrrrr\n\n\nuuuu\n\n",
+ "\n\naaaa\nBbbbb\ncccc\n\n\nffff\ngggg\n\n\njjjj\n\n\nllll\nmmmm\nnnnn\n\n\nqqqq\nrrrr\n\n\nuuuu\n\n",
"After unfolding the first buffer, its and 2nd buffer's text should be displayed"
);
@@ -22276,7 +22387,7 @@ async fn test_folding_buffers(cx: &mut TestAppContext) {
});
assert_eq!(
multi_buffer_editor.update(cx, |editor, cx| editor.display_text(cx)),
- "\n\nB\n\n\n\n\n\n\nllll\nmmmm\nnnnn\n\n\nqqqq\nrrrr\n\n\nuuuu\n\n\nvvvv\nwwww\nxxxx\n\n\n1111\n2222\n\n\n5555",
+ "\n\naaaa\nBbbbb\ncccc\n\n\nffff\ngggg\n\n\njjjj\n\n\nllll\nmmmm\nnnnn\n\n\nqqqq\nrrrr\n\n\nuuuu\n\n\nvvvv\nwwww\nxxxx\n\n\n1111\n2222\n\n\n5555",
"After unfolding the all buffers, all original text should be displayed"
);
}
@@ -24390,7 +24501,7 @@ async fn test_html_linked_edits_on_completion(cx: &mut TestAppContext) {
])))
});
editor.update_in(cx, |editor, window, cx| {
- editor.show_completions(&ShowCompletions { trigger: None }, window, cx);
+ editor.show_completions(&ShowCompletions, window, cx);
});
cx.run_until_parked();
completion_handle.next().await.unwrap();
@@ -25693,6 +25804,17 @@ pub(crate) fn update_test_project_settings(
});
}
+pub(crate) fn update_test_editor_settings(
+ cx: &mut TestAppContext,
+ f: impl Fn(&mut EditorSettingsContent),
+) {
+ cx.update(|cx| {
+ SettingsStore::update_global(cx, |store, cx| {
+ store.update_user_settings(cx, |settings| f(&mut settings.editor));
+ })
+ })
+}
+
pub(crate) fn init_test(cx: &mut TestAppContext, f: fn(&mut AllLanguageSettingsContent)) {
cx.update(|cx| {
assets::Assets.load_test_fonts(cx);
@@ -27003,6 +27125,215 @@ async fn test_end_of_editor_context(cx: &mut TestAppContext) {
});
}
+#[gpui::test]
+async fn test_sticky_scroll(cx: &mut TestAppContext) {
+ init_test(cx, |_| {});
+ let mut cx = EditorTestContext::new(cx).await;
+
+ let buffer = indoc! {"
+ Λfn foo() {
+ let abc = 123;
+ }
+ struct Bar;
+ impl Bar {
+ fn new() -> Self {
+ Self
+ }
+ }
+ fn baz() {
+ }
+ "};
+ cx.set_state(&buffer);
+
+ cx.update_editor(|e, _, cx| {
+ e.buffer()
+ .read(cx)
+ .as_singleton()
+ .unwrap()
+ .update(cx, |buffer, cx| {
+ buffer.set_language(Some(rust_lang()), cx);
+ })
+ });
+
+ let mut sticky_headers = |offset: ScrollOffset| {
+ cx.update_editor(|e, window, cx| {
+ e.scroll(gpui::Point { x: 0., y: offset }, None, window, cx);
+ EditorElement::sticky_headers(&e, &e.snapshot(window, cx), cx)
+ .into_iter()
+ .map(
+ |StickyHeader {
+ start_point,
+ offset,
+ ..
+ }| { (start_point, offset) },
+ )
+ .collect::<Vec<_>>()
+ })
+ };
+
+ let fn_foo = Point { row: 0, column: 0 };
+ let impl_bar = Point { row: 4, column: 0 };
+ let fn_new = Point { row: 5, column: 4 };
+
+ assert_eq!(sticky_headers(0.0), vec![]);
+ assert_eq!(sticky_headers(0.5), vec![(fn_foo, 0.0)]);
+ assert_eq!(sticky_headers(1.0), vec![(fn_foo, 0.0)]);
+ assert_eq!(sticky_headers(1.5), vec![(fn_foo, -0.5)]);
+ assert_eq!(sticky_headers(2.0), vec![]);
+ assert_eq!(sticky_headers(2.5), vec![]);
+ assert_eq!(sticky_headers(3.0), vec![]);
+ assert_eq!(sticky_headers(3.5), vec![]);
+ assert_eq!(sticky_headers(4.0), vec![]);
+ assert_eq!(sticky_headers(4.5), vec![(impl_bar, 0.0), (fn_new, 1.0)]);
+ assert_eq!(sticky_headers(5.0), vec![(impl_bar, 0.0), (fn_new, 1.0)]);
+ assert_eq!(sticky_headers(5.5), vec![(impl_bar, 0.0), (fn_new, 0.5)]);
+ assert_eq!(sticky_headers(6.0), vec![(impl_bar, 0.0)]);
+ assert_eq!(sticky_headers(6.5), vec![(impl_bar, 0.0)]);
+ assert_eq!(sticky_headers(7.0), vec![(impl_bar, 0.0)]);
+ assert_eq!(sticky_headers(7.5), vec![(impl_bar, -0.5)]);
+ assert_eq!(sticky_headers(8.0), vec![]);
+ assert_eq!(sticky_headers(8.5), vec![]);
+ assert_eq!(sticky_headers(9.0), vec![]);
+ assert_eq!(sticky_headers(9.5), vec![]);
+ assert_eq!(sticky_headers(10.0), vec![]);
+}
+
+#[gpui::test]
+async fn test_scroll_by_clicking_sticky_header(cx: &mut TestAppContext) {
+ init_test(cx, |_| {});
+ cx.update(|cx| {
+ SettingsStore::update_global(cx, |store, cx| {
+ store.update_user_settings(cx, |settings| {
+ settings.editor.sticky_scroll = Some(settings::StickyScrollContent {
+ enabled: Some(true),
+ })
+ });
+ });
+ });
+ let mut cx = EditorTestContext::new(cx).await;
+
+ let line_height = cx.editor(|editor, window, _cx| {
+ editor
+ .style()
+ .unwrap()
+ .text
+ .line_height_in_pixels(window.rem_size())
+ });
+
+ let buffer = indoc! {"
+ Λfn foo() {
+ let abc = 123;
+ }
+ struct Bar;
+ impl Bar {
+ fn new() -> Self {
+ Self
+ }
+ }
+ fn baz() {
+ }
+ "};
+ cx.set_state(&buffer);
+
+ cx.update_editor(|e, _, cx| {
+ e.buffer()
+ .read(cx)
+ .as_singleton()
+ .unwrap()
+ .update(cx, |buffer, cx| {
+ buffer.set_language(Some(rust_lang()), cx);
+ })
+ });
+
+ let fn_foo = || empty_range(0, 0);
+ let impl_bar = || empty_range(4, 0);
+ let fn_new = || empty_range(5, 4);
+
+ let mut scroll_and_click = |scroll_offset: ScrollOffset, click_offset: ScrollOffset| {
+ cx.update_editor(|e, window, cx| {
+ e.scroll(
+ gpui::Point {
+ x: 0.,
+ y: scroll_offset,
+ },
+ None,
+ window,
+ cx,
+ );
+ });
+ cx.simulate_click(
+ gpui::Point {
+ x: px(0.),
+ y: click_offset as f32 * line_height,
+ },
+ Modifiers::none(),
+ );
+ cx.update_editor(|e, _, cx| (e.scroll_position(cx), display_ranges(e, cx)))
+ };
+
+ assert_eq!(
+ scroll_and_click(
+ 4.5, // impl Bar is halfway off the screen
+ 0.0 // click top of screen
+ ),
+ // scrolled to impl Bar
+ (gpui::Point { x: 0., y: 4. }, vec![impl_bar()])
+ );
+
+ assert_eq!(
+ scroll_and_click(
+ 4.5, // impl Bar is halfway off the screen
+ 0.25 // click middle of impl Bar
+ ),
+ // scrolled to impl Bar
+ (gpui::Point { x: 0., y: 4. }, vec![impl_bar()])
+ );
+
+ assert_eq!(
+ scroll_and_click(
+ 4.5, // impl Bar is halfway off the screen
+ 1.5 // click below impl Bar (e.g. fn new())
+ ),
+ // scrolled to fn new() - this is below the impl Bar header which has persisted
+ (gpui::Point { x: 0., y: 4. }, vec![fn_new()])
+ );
+
+ assert_eq!(
+ scroll_and_click(
+ 5.5, // fn new is halfway underneath impl Bar
+ 0.75 // click on the overlap of impl Bar and fn new()
+ ),
+ (gpui::Point { x: 0., y: 4. }, vec![impl_bar()])
+ );
+
+ assert_eq!(
+ scroll_and_click(
+ 5.5, // fn new is halfway underneath impl Bar
+ 1.25 // click on the visible part of fn new()
+ ),
+ (gpui::Point { x: 0., y: 4. }, vec![fn_new()])
+ );
+
+ assert_eq!(
+ scroll_and_click(
+ 1.5, // fn foo is halfway off the screen
+ 0.0 // click top of screen
+ ),
+ (gpui::Point { x: 0., y: 0. }, vec![fn_foo()])
+ );
+
+ assert_eq!(
+ scroll_and_click(
+ 1.5, // fn foo is halfway off the screen
+ 0.75 // click visible part of let abc...
+ )
+ .0,
+ // no change in scroll
+ // we don't assert on the visible_range because if we clicked the gutter, our line is fully selected
+ (gpui::Point { x: 0., y: 1.5 })
+ );
+}
+
#[gpui::test]
async fn test_next_prev_reference(cx: &mut TestAppContext) {
const CYCLE_POSITIONS: &[&'static str] = &[
@@ -27122,3 +27453,213 @@ async fn test_next_prev_reference(cx: &mut TestAppContext) {
_move(Direction::Prev, 2, &mut cx).await;
cx.assert_editor_state(CYCLE_POSITIONS[1]);
}
+
+#[gpui::test]
+async fn test_multibuffer_selections_with_folding(cx: &mut TestAppContext) {
+ init_test(cx, |_| {});
+
+ let (editor, cx) = cx.add_window_view(|window, cx| {
+ let multi_buffer = MultiBuffer::build_multi(
+ [
+ ("1\n2\n3\n", vec![Point::row_range(0..3)]),
+ ("1\n2\n3\n", vec![Point::row_range(0..3)]),
+ ],
+ cx,
+ );
+ Editor::new(EditorMode::full(), multi_buffer, None, window, cx)
+ });
+
+ let mut cx = EditorTestContext::for_editor_in(editor.clone(), cx).await;
+ let buffer_ids = cx.multibuffer(|mb, _| mb.excerpt_buffer_ids());
+
+ cx.assert_excerpts_with_selections(indoc! {"
+ [EXCERPT]
+ Λ1
+ 2
+ 3
+ [EXCERPT]
+ 1
+ 2
+ 3
+ "});
+
+ // Scenario 1: Unfolded buffers, position cursor on "2", select all matches, then insert
+ cx.update_editor(|editor, window, cx| {
+ editor.change_selections(None.into(), window, cx, |s| {
+ s.select_ranges([2..3]);
+ });
+ });
+ cx.assert_excerpts_with_selections(indoc! {"
+ [EXCERPT]
+ 1
+ 2Λ
+ 3
+ [EXCERPT]
+ 1
+ 2
+ 3
+ "});
+
+ cx.update_editor(|editor, window, cx| {
+ editor
+ .select_all_matches(&SelectAllMatches, window, cx)
+ .unwrap();
+ });
+ cx.assert_excerpts_with_selections(indoc! {"
+ [EXCERPT]
+ 1
+ 2Λ
+ 3
+ [EXCERPT]
+ 1
+ 2Λ
+ 3
+ "});
+
+ cx.update_editor(|editor, window, cx| {
+ editor.handle_input("X", window, cx);
+ });
+ cx.assert_excerpts_with_selections(indoc! {"
+ [EXCERPT]
+ 1
+ XΛ
+ 3
+ [EXCERPT]
+ 1
+ XΛ
+ 3
+ "});
+
+ // Scenario 2: Select "2", then fold second buffer before insertion
+ cx.update_multibuffer(|mb, cx| {
+ for buffer_id in buffer_ids.iter() {
+ let buffer = mb.buffer(*buffer_id).unwrap();
+ buffer.update(cx, |buffer, cx| {
+ buffer.edit([(0..buffer.len(), "1\n2\n3\n")], None, cx);
+ });
+ }
+ });
+
+ // Select "2" and select all matches
+ cx.update_editor(|editor, window, cx| {
+ editor.change_selections(None.into(), window, cx, |s| {
+ s.select_ranges([2..3]);
+ });
+ editor
+ .select_all_matches(&SelectAllMatches, window, cx)
+ .unwrap();
+ });
+
+ // Fold second buffer - should remove selections from folded buffer
+ cx.update_editor(|editor, _, cx| {
+ editor.fold_buffer(buffer_ids[1], cx);
+ });
+ cx.assert_excerpts_with_selections(indoc! {"
+ [EXCERPT]
+ 1
+ 2Λ
+ 3
+ [EXCERPT]
+ [FOLDED]
+ "});
+
+ // Insert text - should only affect first buffer
+ cx.update_editor(|editor, window, cx| {
+ editor.handle_input("Y", window, cx);
+ });
+ cx.update_editor(|editor, _, cx| {
+ editor.unfold_buffer(buffer_ids[1], cx);
+ });
+ cx.assert_excerpts_with_selections(indoc! {"
+ [EXCERPT]
+ 1
+ YΛ
+ 3
+ [EXCERPT]
+ 1
+ 2
+ 3
+ "});
+
+ // Scenario 3: Select "2", then fold first buffer before insertion
+ cx.update_multibuffer(|mb, cx| {
+ for buffer_id in buffer_ids.iter() {
+ let buffer = mb.buffer(*buffer_id).unwrap();
+ buffer.update(cx, |buffer, cx| {
+ buffer.edit([(0..buffer.len(), "1\n2\n3\n")], None, cx);
+ });
+ }
+ });
+
+ // Select "2" and select all matches
+ cx.update_editor(|editor, window, cx| {
+ editor.change_selections(None.into(), window, cx, |s| {
+ s.select_ranges([2..3]);
+ });
+ editor
+ .select_all_matches(&SelectAllMatches, window, cx)
+ .unwrap();
+ });
+
+ // Fold first buffer - should remove selections from folded buffer
+ cx.update_editor(|editor, _, cx| {
+ editor.fold_buffer(buffer_ids[0], cx);
+ });
+ cx.assert_excerpts_with_selections(indoc! {"
+ [EXCERPT]
+ [FOLDED]
+ [EXCERPT]
+ 1
+ 2Λ
+ 3
+ "});
+
+ // Insert text - should only affect second buffer
+ cx.update_editor(|editor, window, cx| {
+ editor.handle_input("Z", window, cx);
+ });
+ cx.update_editor(|editor, _, cx| {
+ editor.unfold_buffer(buffer_ids[0], cx);
+ });
+ cx.assert_excerpts_with_selections(indoc! {"
+ [EXCERPT]
+ 1
+ 2
+ 3
+ [EXCERPT]
+ 1
+ ZΛ
+ 3
+ "});
+
+ // Edge case scenario: fold all buffers, then try to insert
+ cx.update_editor(|editor, _, cx| {
+ editor.fold_buffer(buffer_ids[0], cx);
+ editor.fold_buffer(buffer_ids[1], cx);
+ });
+ cx.assert_excerpts_with_selections(indoc! {"
+ [EXCERPT]
+ Λ[FOLDED]
+ [EXCERPT]
+ [FOLDED]
+ "});
+
+ // Insert should work via default selection
+ cx.update_editor(|editor, window, cx| {
+ editor.handle_input("W", window, cx);
+ });
+ cx.update_editor(|editor, _, cx| {
+ editor.unfold_buffer(buffer_ids[0], cx);
+ editor.unfold_buffer(buffer_ids[1], cx);
+ });
+ cx.assert_excerpts_with_selections(indoc! {"
+ [EXCERPT]
+ WΛ1
+ 2
+ 3
+ [EXCERPT]
+ 1
+ Z
+ 3
+ "});
+}
@@ -8,8 +8,8 @@ use crate::{
HandleInput, HoveredCursor, InlayHintRefreshReason, JumpData, LineDown, LineHighlight, LineUp,
MAX_LINE_LEN, MINIMAP_FONT_SIZE, MULTI_BUFFER_EXCERPT_HEADER_HEIGHT, OpenExcerpts,
OpenExcerptsSplit, PageDown, PageUp, PhantomBreakpointIndicator, Point, RowExt, RowRangeExt,
- SelectPhase, SelectedTextHighlight, Selection, SelectionDragState, SizingBehavior, SoftWrap,
- StickyHeaderExcerpt, ToPoint, ToggleFold, ToggleFoldAll,
+ SelectPhase, SelectedTextHighlight, Selection, SelectionDragState, SelectionEffects,
+ SizingBehavior, SoftWrap, StickyHeaderExcerpt, ToPoint, ToggleFold, ToggleFoldAll,
code_context_menus::{CodeActionsMenu, MENU_ASIDE_MAX_WIDTH, MENU_ASIDE_MIN_WIDTH, MENU_GAP},
display_map::{
Block, BlockContext, BlockStyle, ChunkRendererId, DisplaySnapshot, EditorMargins,
@@ -29,7 +29,7 @@ use crate::{
items::BufferSearchHighlights,
mouse_context_menu::{self, MenuPosition},
scroll::{
- ActiveScrollbarState, ScrollOffset, ScrollPixelOffset, ScrollbarThumbState,
+ ActiveScrollbarState, Autoscroll, ScrollOffset, ScrollPixelOffset, ScrollbarThumbState,
scroll_amount::ScrollAmount,
},
};
@@ -3255,11 +3255,9 @@ impl EditorElement {
(newest_selection_head, relative)
});
- let relative_to = if relative.enabled() {
- Some(newest_selection_head.row())
- } else {
- None
- };
+ let relative_line_numbers_enabled = relative.enabled();
+ let relative_to = relative_line_numbers_enabled.then(|| newest_selection_head.row());
+
let relative_rows =
self.calculate_relative_line_numbers(snapshot, &rows, relative_to, relative.wrapped());
let mut line_number = String::new();
@@ -3271,17 +3269,18 @@ impl EditorElement {
} else {
row_info.buffer_row? + 1
};
- let number = relative_rows
- .get(&display_row)
- .unwrap_or(&non_relative_number);
- write!(&mut line_number, "{number}").unwrap();
- if row_info
- .diff_status
- .is_some_and(|status| status.is_deleted())
+ let relative_number = relative_rows.get(&display_row);
+ if !(relative_line_numbers_enabled && relative_number.is_some())
+ && row_info
+ .diff_status
+ .is_some_and(|status| status.is_deleted())
{
return None;
}
+ let number = relative_number.unwrap_or(&non_relative_number);
+ write!(&mut line_number, "{number}").unwrap();
+
let color = active_rows
.get(&display_row)
.map(|spec| {
@@ -4555,6 +4554,138 @@ impl EditorElement {
header
}
+ fn layout_sticky_headers(
+ &self,
+ snapshot: &EditorSnapshot,
+ editor_width: Pixels,
+ is_row_soft_wrapped: impl Copy + Fn(usize) -> bool,
+ line_height: Pixels,
+ scroll_pixel_position: gpui::Point<ScrollPixelOffset>,
+ content_origin: gpui::Point<Pixels>,
+ gutter_dimensions: &GutterDimensions,
+ gutter_hitbox: &Hitbox,
+ text_hitbox: &Hitbox,
+ window: &mut Window,
+ cx: &mut App,
+ ) -> Option<StickyHeaders> {
+ let show_line_numbers = snapshot
+ .show_line_numbers
+ .unwrap_or_else(|| EditorSettings::get_global(cx).gutter.line_numbers);
+
+ let rows = Self::sticky_headers(self.editor.read(cx), snapshot, cx);
+
+ let mut lines = Vec::<StickyHeaderLine>::new();
+
+ for StickyHeader {
+ item,
+ sticky_row,
+ start_point,
+ offset,
+ } in rows.into_iter().rev()
+ {
+ let line = layout_line(
+ sticky_row,
+ snapshot,
+ &self.style,
+ editor_width,
+ is_row_soft_wrapped,
+ window,
+ cx,
+ );
+
+ let line_number = show_line_numbers.then(|| {
+ let number = (start_point.row + 1).to_string();
+ let color = cx.theme().colors().editor_line_number;
+ self.shape_line_number(SharedString::from(number), color, window)
+ });
+
+ lines.push(StickyHeaderLine::new(
+ sticky_row,
+ line_height * offset as f32,
+ line,
+ line_number,
+ item.range.start,
+ line_height,
+ scroll_pixel_position,
+ content_origin,
+ gutter_hitbox,
+ text_hitbox,
+ window,
+ cx,
+ ));
+ }
+
+ lines.reverse();
+ if lines.is_empty() {
+ return None;
+ }
+
+ Some(StickyHeaders {
+ lines,
+ gutter_background: cx.theme().colors().editor_gutter_background,
+ content_background: self.style.background,
+ gutter_right_padding: gutter_dimensions.right_padding,
+ })
+ }
+
+ pub(crate) fn sticky_headers(
+ editor: &Editor,
+ snapshot: &EditorSnapshot,
+ cx: &App,
+ ) -> Vec<StickyHeader> {
+ let scroll_top = snapshot.scroll_position().y;
+
+ let mut end_rows = Vec::<DisplayRow>::new();
+ let mut rows = Vec::<StickyHeader>::new();
+
+ let items = editor.sticky_headers(cx).unwrap_or_default();
+
+ for item in items {
+ let start_point = item.range.start.to_point(snapshot.buffer_snapshot());
+ let end_point = item.range.end.to_point(snapshot.buffer_snapshot());
+
+ let sticky_row = snapshot
+ .display_snapshot
+ .point_to_display_point(start_point, Bias::Left)
+ .row();
+ let end_row = snapshot
+ .display_snapshot
+ .point_to_display_point(end_point, Bias::Left)
+ .row();
+ let max_sticky_row = end_row.previous_row();
+ if max_sticky_row <= sticky_row {
+ continue;
+ }
+
+ while end_rows
+ .last()
+ .is_some_and(|&last_end| last_end < sticky_row)
+ {
+ end_rows.pop();
+ }
+ let depth = end_rows.len();
+ let adjusted_scroll_top = scroll_top + depth as f64;
+
+ if sticky_row.as_f64() >= adjusted_scroll_top || end_row.as_f64() <= adjusted_scroll_top
+ {
+ continue;
+ }
+
+ let max_scroll_offset = max_sticky_row.as_f64() - scroll_top;
+ let offset = (depth as f64).min(max_scroll_offset);
+
+ end_rows.push(end_row);
+ rows.push(StickyHeader {
+ item,
+ sticky_row,
+ start_point,
+ offset,
+ });
+ }
+
+ rows
+ }
+
fn layout_cursor_popovers(
&self,
line_height: Pixels,
@@ -6407,6 +6538,89 @@ impl EditorElement {
}
}
+ fn paint_sticky_headers(
+ &mut self,
+ layout: &mut EditorLayout,
+ window: &mut Window,
+ cx: &mut App,
+ ) {
+ let Some(mut sticky_headers) = layout.sticky_headers.take() else {
+ return;
+ };
+
+ if sticky_headers.lines.is_empty() {
+ layout.sticky_headers = Some(sticky_headers);
+ return;
+ }
+
+ let whitespace_setting = self
+ .editor
+ .read(cx)
+ .buffer
+ .read(cx)
+ .language_settings(cx)
+ .show_whitespaces;
+ sticky_headers.paint(layout, whitespace_setting, window, cx);
+
+ let sticky_header_hitboxes: Vec<Hitbox> = sticky_headers
+ .lines
+ .iter()
+ .map(|line| line.hitbox.clone())
+ .collect();
+ let hovered_hitbox = sticky_header_hitboxes
+ .iter()
+ .find_map(|hitbox| hitbox.is_hovered(window).then_some(hitbox.id));
+
+ window.on_mouse_event(move |_: &MouseMoveEvent, phase, window, _cx| {
+ if !phase.bubble() {
+ return;
+ }
+
+ let current_hover = sticky_header_hitboxes
+ .iter()
+ .find_map(|hitbox| hitbox.is_hovered(window).then_some(hitbox.id));
+ if hovered_hitbox != current_hover {
+ window.refresh();
+ }
+ });
+
+ for (line_index, line) in sticky_headers.lines.iter().enumerate() {
+ let editor = self.editor.clone();
+ let hitbox = line.hitbox.clone();
+ let target_anchor = line.target_anchor;
+ window.on_mouse_event(move |event: &MouseDownEvent, phase, window, cx| {
+ if !phase.bubble() {
+ return;
+ }
+
+ if event.button == MouseButton::Left && hitbox.is_hovered(window) {
+ editor.update(cx, |editor, cx| {
+ editor.change_selections(
+ SelectionEffects::scroll(Autoscroll::top_relative(line_index)),
+ window,
+ cx,
+ |selections| selections.select_ranges([target_anchor..target_anchor]),
+ );
+ cx.stop_propagation();
+ });
+ }
+ });
+ }
+
+ let text_bounds = layout.position_map.text_hitbox.bounds;
+ let border_top = text_bounds.top()
+ + sticky_headers.lines.last().unwrap().offset
+ + layout.position_map.line_height;
+ let separator_height = px(1.);
+ let border_bounds = Bounds::from_corners(
+ point(layout.gutter_hitbox.bounds.left(), border_top),
+ point(text_bounds.right(), border_top + separator_height),
+ );
+ window.paint_quad(fill(border_bounds, cx.theme().colors().border_variant));
+
+ layout.sticky_headers = Some(sticky_headers);
+ }
+
fn paint_lines_background(
&mut self,
layout: &mut EditorLayout,
@@ -8107,6 +8321,27 @@ impl LineWithInvisibles {
cx: &mut App,
) {
let line_y = f32::from(line_height) * Pixels::from(row.as_f64() - scroll_position.y);
+ self.prepaint_with_custom_offset(
+ line_height,
+ scroll_pixel_position,
+ content_origin,
+ line_y,
+ line_elements,
+ window,
+ cx,
+ );
+ }
+
+ fn prepaint_with_custom_offset(
+ &mut self,
+ line_height: Pixels,
+ scroll_pixel_position: gpui::Point<ScrollPixelOffset>,
+ content_origin: gpui::Point<Pixels>,
+ line_y: Pixels,
+ line_elements: &mut SmallVec<[AnyElement; 1]>,
+ window: &mut Window,
+ cx: &mut App,
+ ) {
let mut fragment_origin =
content_origin + gpui::point(Pixels::from(-scroll_pixel_position.x), line_y);
for fragment in &mut self.fragments {
@@ -8141,9 +8376,31 @@ impl LineWithInvisibles {
window: &mut Window,
cx: &mut App,
) {
- let line_height = layout.position_map.line_height;
- let line_y = line_height * (row.as_f64() - layout.position_map.scroll_position.y) as f32;
+ self.draw_with_custom_offset(
+ layout,
+ row,
+ content_origin,
+ layout.position_map.line_height
+ * (row.as_f64() - layout.position_map.scroll_position.y) as f32,
+ whitespace_setting,
+ selection_ranges,
+ window,
+ cx,
+ );
+ }
+ fn draw_with_custom_offset(
+ &self,
+ layout: &EditorLayout,
+ row: DisplayRow,
+ content_origin: gpui::Point<Pixels>,
+ line_y: Pixels,
+ whitespace_setting: ShowWhitespaceSetting,
+ selection_ranges: &[Range<DisplayPoint>],
+ window: &mut Window,
+ cx: &mut App,
+ ) {
+ let line_height = layout.position_map.line_height;
let mut fragment_origin = content_origin
+ gpui::point(
Pixels::from(-layout.position_map.scroll_pixel_position.x),
@@ -8582,6 +8839,7 @@ impl Element for EditorElement {
};
let is_minimap = self.editor.read(cx).mode.is_minimap();
+ let is_singleton = self.editor.read(cx).buffer_kind(cx) == ItemBufferKind::Singleton;
if !is_minimap {
let focus_handle = self.editor.focus_handle(cx);
@@ -9228,6 +9486,26 @@ impl Element for EditorElement {
scroll_position.x * f64::from(em_advance),
scroll_position.y * f64::from(line_height),
);
+ let sticky_headers = if !is_minimap
+ && is_singleton
+ && EditorSettings::get_global(cx).sticky_scroll.enabled
+ {
+ self.layout_sticky_headers(
+ &snapshot,
+ editor_width,
+ is_row_soft_wrapped,
+ line_height,
+ scroll_pixel_position,
+ content_origin,
+ &gutter_dimensions,
+ &gutter_hitbox,
+ &text_hitbox,
+ window,
+ cx,
+ )
+ } else {
+ None
+ };
let indent_guides = self.layout_indent_guides(
content_origin,
text_hitbox.origin,
@@ -9697,6 +9975,7 @@ impl Element for EditorElement {
tab_invisible,
space_invisible,
sticky_buffer_header,
+ sticky_headers,
expand_toggles,
}
})
@@ -9767,6 +10046,7 @@ impl Element for EditorElement {
}
});
+ self.paint_sticky_headers(layout, window, cx);
self.paint_minimap(layout, window, cx);
self.paint_scrollbars(layout, window, cx);
self.paint_edit_prediction_popover(layout, window, cx);
@@ -9875,15 +10155,180 @@ pub struct EditorLayout {
tab_invisible: ShapedLine,
space_invisible: ShapedLine,
sticky_buffer_header: Option<AnyElement>,
+ sticky_headers: Option<StickyHeaders>,
document_colors: Option<(DocumentColorsRenderMode, Vec<(Range<DisplayPoint>, Hsla)>)>,
}
+struct StickyHeaders {
+ lines: Vec<StickyHeaderLine>,
+ gutter_background: Hsla,
+ content_background: Hsla,
+ gutter_right_padding: Pixels,
+}
+
+struct StickyHeaderLine {
+ row: DisplayRow,
+ offset: Pixels,
+ line: LineWithInvisibles,
+ line_number: Option<ShapedLine>,
+ elements: SmallVec<[AnyElement; 1]>,
+ available_text_width: Pixels,
+ target_anchor: Anchor,
+ hitbox: Hitbox,
+}
+
impl EditorLayout {
fn line_end_overshoot(&self) -> Pixels {
0.15 * self.position_map.line_height
}
}
+impl StickyHeaders {
+ fn paint(
+ &mut self,
+ layout: &mut EditorLayout,
+ whitespace_setting: ShowWhitespaceSetting,
+ window: &mut Window,
+ cx: &mut App,
+ ) {
+ let line_height = layout.position_map.line_height;
+
+ for line in self.lines.iter_mut().rev() {
+ window.paint_layer(
+ Bounds::new(
+ layout.gutter_hitbox.origin + point(Pixels::ZERO, line.offset),
+ size(line.hitbox.size.width, line_height),
+ ),
+ |window| {
+ let gutter_bounds = Bounds::new(
+ layout.gutter_hitbox.origin + point(Pixels::ZERO, line.offset),
+ size(layout.gutter_hitbox.size.width, line_height),
+ );
+ window.paint_quad(fill(gutter_bounds, self.gutter_background));
+
+ let text_bounds = Bounds::new(
+ layout.position_map.text_hitbox.origin + point(Pixels::ZERO, line.offset),
+ size(line.available_text_width, line_height),
+ );
+ window.paint_quad(fill(text_bounds, self.content_background));
+
+ if line.hitbox.is_hovered(window) {
+ let hover_overlay = cx.theme().colors().panel_overlay_hover;
+ window.paint_quad(fill(gutter_bounds, hover_overlay));
+ window.paint_quad(fill(text_bounds, hover_overlay));
+ }
+
+ line.paint(
+ layout,
+ self.gutter_right_padding,
+ line.available_text_width,
+ layout.content_origin,
+ line_height,
+ whitespace_setting,
+ window,
+ cx,
+ );
+ },
+ );
+
+ window.set_cursor_style(CursorStyle::PointingHand, &line.hitbox);
+ }
+ }
+}
+
+impl StickyHeaderLine {
+ fn new(
+ row: DisplayRow,
+ offset: Pixels,
+ mut line: LineWithInvisibles,
+ line_number: Option<ShapedLine>,
+ target_anchor: Anchor,
+ line_height: Pixels,
+ scroll_pixel_position: gpui::Point<ScrollPixelOffset>,
+ content_origin: gpui::Point<Pixels>,
+ gutter_hitbox: &Hitbox,
+ text_hitbox: &Hitbox,
+ window: &mut Window,
+ cx: &mut App,
+ ) -> Self {
+ let mut elements = SmallVec::<[AnyElement; 1]>::new();
+ line.prepaint_with_custom_offset(
+ line_height,
+ scroll_pixel_position,
+ content_origin,
+ offset,
+ &mut elements,
+ window,
+ cx,
+ );
+
+ let hitbox_bounds = Bounds::new(
+ gutter_hitbox.origin + point(Pixels::ZERO, offset),
+ size(text_hitbox.right() - gutter_hitbox.left(), line_height),
+ );
+ let available_text_width =
+ (hitbox_bounds.size.width - gutter_hitbox.size.width).max(Pixels::ZERO);
+
+ Self {
+ row,
+ offset,
+ line,
+ line_number,
+ elements,
+ available_text_width,
+ target_anchor,
+ hitbox: window.insert_hitbox(hitbox_bounds, HitboxBehavior::BlockMouseExceptScroll),
+ }
+ }
+
+ fn paint(
+ &mut self,
+ layout: &EditorLayout,
+ gutter_right_padding: Pixels,
+ available_text_width: Pixels,
+ content_origin: gpui::Point<Pixels>,
+ line_height: Pixels,
+ whitespace_setting: ShowWhitespaceSetting,
+ window: &mut Window,
+ cx: &mut App,
+ ) {
+ window.with_content_mask(
+ Some(ContentMask {
+ bounds: Bounds::new(
+ layout.position_map.text_hitbox.bounds.origin
+ + point(Pixels::ZERO, self.offset),
+ size(available_text_width, line_height),
+ ),
+ }),
+ |window| {
+ self.line.draw_with_custom_offset(
+ layout,
+ self.row,
+ content_origin,
+ self.offset,
+ whitespace_setting,
+ &[],
+ window,
+ cx,
+ );
+ for element in &mut self.elements {
+ element.paint(window, cx);
+ }
+ },
+ );
+
+ if let Some(line_number) = &self.line_number {
+ let gutter_origin = layout.gutter_hitbox.origin + point(Pixels::ZERO, self.offset);
+ let gutter_width = layout.gutter_hitbox.size.width;
+ let origin = point(
+ gutter_origin.x + gutter_width - gutter_right_padding - line_number.width,
+ gutter_origin.y,
+ );
+ line_number.paint(origin, line_height, window, cx).log_err();
+ }
+ }
+}
+
#[derive(Debug)]
struct LineNumberSegment {
shaped_line: ShapedLine,
@@ -10730,6 +11175,13 @@ impl HighlightedRange {
}
}
+pub(crate) struct StickyHeader {
+ pub item: language::OutlineItem<Anchor>,
+ pub sticky_row: DisplayRow,
+ pub start_point: Point,
+ pub offset: ScrollOffset,
+}
+
enum CursorPopoverType {
CodeContextMenu,
EditPrediction,
@@ -11002,6 +11454,46 @@ mod tests {
assert_eq!(relative_rows[&DisplayRow(0)], 5);
assert_eq!(relative_rows[&DisplayRow(1)], 4);
assert_eq!(relative_rows[&DisplayRow(2)], 3);
+
+ const DELETED_LINE: u32 = 3;
+ let layouts = cx
+ .update_window(*window, |_, window, cx| {
+ element.layout_line_numbers(
+ None,
+ GutterDimensions {
+ left_padding: Pixels::ZERO,
+ right_padding: Pixels::ZERO,
+ width: px(30.0),
+ margin: Pixels::ZERO,
+ git_blame_entries_width: None,
+ },
+ line_height,
+ gpui::Point::default(),
+ DisplayRow(0)..DisplayRow(6),
+ &(0..6)
+ .map(|row| RowInfo {
+ buffer_row: Some(row),
+ diff_status: (row == DELETED_LINE).then(|| {
+ DiffHunkStatus::deleted(
+ buffer_diff::DiffHunkSecondaryStatus::NoSecondaryHunk,
+ )
+ }),
+ ..Default::default()
+ })
+ .collect::<Vec<_>>(),
+ &BTreeMap::default(),
+ Some(DisplayPoint::new(DisplayRow(0), 0)),
+ &snapshot,
+ window,
+ cx,
+ )
+ })
+ .unwrap();
+ assert_eq!(layouts.len(), 5,);
+ assert!(
+ layouts.get(&MultiBufferRow(DELETED_LINE)).is_none(),
+ "Deleted line should not have a line number"
+ );
}
#[gpui::test]
@@ -11077,6 +11569,62 @@ mod tests {
// current line has no relative number
assert_eq!(relative_rows[&DisplayRow(4)], 1);
assert_eq!(relative_rows[&DisplayRow(5)], 2);
+
+ let layouts = cx
+ .update_window(*window, |_, window, cx| {
+ element.layout_line_numbers(
+ None,
+ GutterDimensions {
+ left_padding: Pixels::ZERO,
+ right_padding: Pixels::ZERO,
+ width: px(30.0),
+ margin: Pixels::ZERO,
+ git_blame_entries_width: None,
+ },
+ line_height,
+ gpui::Point::default(),
+ DisplayRow(0)..DisplayRow(6),
+ &(0..6)
+ .map(|row| RowInfo {
+ buffer_row: Some(row),
+ diff_status: Some(DiffHunkStatus::deleted(
+ buffer_diff::DiffHunkSecondaryStatus::NoSecondaryHunk,
+ )),
+ ..Default::default()
+ })
+ .collect::<Vec<_>>(),
+ &BTreeMap::from_iter([(DisplayRow(0), LineHighlightSpec::default())]),
+ Some(DisplayPoint::new(DisplayRow(0), 0)),
+ &snapshot,
+ window,
+ cx,
+ )
+ })
+ .unwrap();
+ assert!(
+ layouts.is_empty(),
+ "Deleted lines should have no line number"
+ );
+
+ let relative_rows = window
+ .update(cx, |editor, window, cx| {
+ let snapshot = editor.snapshot(window, cx);
+ element.calculate_relative_line_numbers(
+ &snapshot,
+ &(DisplayRow(0)..DisplayRow(6)),
+ Some(DisplayRow(3)),
+ true,
+ )
+ })
+ .unwrap();
+
+ // Deleted lines should still have relative numbers
+ assert_eq!(relative_rows[&DisplayRow(0)], 3);
+ assert_eq!(relative_rows[&DisplayRow(1)], 2);
+ assert_eq!(relative_rows[&DisplayRow(2)], 1);
+ // current line, even if deleted, has no relative number
+ assert_eq!(relative_rows[&DisplayRow(4)], 1);
+ assert_eq!(relative_rows[&DisplayRow(5)], 2);
}
#[gpui::test]
@@ -1796,6 +1796,14 @@ impl SearchableItem for Editor {
fn search_bar_visibility_changed(&mut self, _: bool, _: &mut Window, _: &mut Context<Self>) {
self.expect_bounds_change = self.last_bounds;
}
+
+ fn set_search_is_case_sensitive(
+ &mut self,
+ case_sensitive: Option<bool>,
+ _cx: &mut Context<Self>,
+ ) {
+ self.select_next_is_case_sensitive = case_sensitive;
+ }
}
pub fn active_match_index(
@@ -487,6 +487,43 @@ impl<'snap, 'a> MutableSelectionsCollection<'snap, 'a> {
self.selections_changed |= changed;
}
+ pub fn remove_selections_from_buffer(&mut self, buffer_id: language::BufferId) {
+ let mut changed = false;
+
+ let filtered_selections: Arc<[Selection<Anchor>]> = {
+ self.disjoint
+ .iter()
+ .filter(|selection| {
+ if let Some(selection_buffer_id) =
+ self.snapshot.buffer_id_for_anchor(selection.start)
+ {
+ let should_remove = selection_buffer_id == buffer_id;
+ changed |= should_remove;
+ !should_remove
+ } else {
+ true
+ }
+ })
+ .cloned()
+ .collect()
+ };
+
+ if filtered_selections.is_empty() {
+ let default_anchor = self.snapshot.anchor_before(0);
+ self.collection.disjoint = Arc::from([Selection {
+ id: post_inc(&mut self.collection.next_selection_id),
+ start: default_anchor,
+ end: default_anchor,
+ reversed: false,
+ goal: SelectionGoal::None,
+ }]);
+ } else {
+ self.collection.disjoint = filtered_selections;
+ }
+
+ self.selections_changed |= changed;
+ }
+
pub fn clear_pending(&mut self) {
if self.collection.pending.is_some() {
self.collection.pending = None;
@@ -322,7 +322,7 @@ impl ExampleInstance {
thread.add_default_tools(Rc::new(EvalThreadEnvironment {
project: project.clone(),
}), cx);
- thread.set_profile(meta.profile_id.clone());
+ thread.set_profile(meta.profile_id.clone(), cx);
thread.set_model(
LanguageModelInterceptor::new(
LanguageModelRegistry::read_global(cx).default_model().expect("Missing model").model.clone(),
@@ -31,8 +31,7 @@ use util::test::TempTree;
#[cfg(test)]
#[ctor::ctor]
fn init_logger() {
- // show info logs while we debug the extension_store tests hanging.
- zlog::init_test_with("info");
+ zlog::init_test();
}
#[gpui::test]
@@ -532,6 +531,7 @@ async fn test_extension_store(cx: &mut TestAppContext) {
#[gpui::test]
async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) {
+ log::info!("Initializing test");
init_test(cx);
cx.executor().allow_parking();
@@ -556,6 +556,8 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) {
let extensions_dir = extensions_tree.path().canonicalize().unwrap();
let project_dir = project_dir.path().canonicalize().unwrap();
+ log::info!("Setting up test");
+
let project = Project::test(fs.clone(), [project_dir.as_path()], cx).await;
let proxy = Arc::new(ExtensionHostProxy::new());
@@ -674,6 +676,8 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) {
)
});
+ log::info!("Flushing events");
+
// Ensure that debounces fire.
let mut events = cx.events(&extension_store);
let executor = cx.executor();
@@ -399,7 +399,12 @@ impl PickerDelegate for OpenPathDelegate {
}
})
.unwrap_or(false);
- if should_prepend_with_current_dir {
+
+ let current_dir_in_new_entries = new_entries
+ .iter()
+ .any(|entry| &entry.path.string == current_dir);
+
+ if should_prepend_with_current_dir && !current_dir_in_new_entries {
new_entries.insert(
0,
CandidateInfo {
@@ -4,6 +4,10 @@ mod mac_watcher;
#[cfg(not(target_os = "macos"))]
pub mod fs_watcher;
+use parking_lot::Mutex;
+use std::sync::atomic::{AtomicUsize, Ordering};
+use std::time::Instant;
+
use anyhow::{Context as _, Result, anyhow};
#[cfg(any(target_os = "linux", target_os = "freebsd"))]
use ashpd::desktop::trash;
@@ -12,6 +16,7 @@ use gpui::App;
use gpui::BackgroundExecutor;
use gpui::Global;
use gpui::ReadGlobal as _;
+use gpui::SharedString;
use std::borrow::Cow;
use util::command::new_smol_command;
@@ -51,8 +56,7 @@ use git::{
repository::{RepoPath, repo_path},
status::{FileStatus, StatusCode, TrackedStatus, UnmergedStatus},
};
-#[cfg(any(test, feature = "test-support"))]
-use parking_lot::Mutex;
+
#[cfg(any(test, feature = "test-support"))]
use smol::io::AsyncReadExt;
#[cfg(any(test, feature = "test-support"))]
@@ -148,6 +152,7 @@ pub trait Fs: Send + Sync {
async fn git_clone(&self, repo_url: &str, abs_work_directory: &Path) -> Result<()>;
fn is_fake(&self) -> bool;
async fn is_case_sensitive(&self) -> Result<bool>;
+ fn subscribe_to_jobs(&self) -> JobEventReceiver;
#[cfg(any(test, feature = "test-support"))]
fn as_fake(&self) -> Arc<FakeFs> {
@@ -215,6 +220,55 @@ pub struct Metadata {
#[serde(transparent)]
pub struct MTime(SystemTime);
+pub type JobId = usize;
+
+#[derive(Clone, Debug)]
+pub struct JobInfo {
+ pub start: Instant,
+ pub message: SharedString,
+ pub id: JobId,
+}
+
+#[derive(Debug, Clone)]
+pub enum JobEvent {
+ Started { info: JobInfo },
+ Completed { id: JobId },
+}
+
+pub type JobEventSender = futures::channel::mpsc::UnboundedSender<JobEvent>;
+pub type JobEventReceiver = futures::channel::mpsc::UnboundedReceiver<JobEvent>;
+
+struct JobTracker {
+ id: JobId,
+ subscribers: Arc<Mutex<Vec<JobEventSender>>>,
+}
+
+impl JobTracker {
+ fn new(info: JobInfo, subscribers: Arc<Mutex<Vec<JobEventSender>>>) -> Self {
+ let id = info.id;
+ {
+ let mut subs = subscribers.lock();
+ subs.retain(|sender| {
+ sender
+ .unbounded_send(JobEvent::Started { info: info.clone() })
+ .is_ok()
+ });
+ }
+ Self { id, subscribers }
+ }
+}
+
+impl Drop for JobTracker {
+ fn drop(&mut self) {
+ let mut subs = self.subscribers.lock();
+ subs.retain(|sender| {
+ sender
+ .unbounded_send(JobEvent::Completed { id: self.id })
+ .is_ok()
+ });
+ }
+}
+
impl MTime {
/// Conversion intended for persistence and testing.
pub fn from_seconds_and_nanos(secs: u64, nanos: u32) -> Self {
@@ -257,6 +311,8 @@ impl From<MTime> for proto::Timestamp {
pub struct RealFs {
bundled_git_binary_path: Option<PathBuf>,
executor: BackgroundExecutor,
+ next_job_id: Arc<AtomicUsize>,
+ job_event_subscribers: Arc<Mutex<Vec<JobEventSender>>>,
}
pub trait FileHandle: Send + Sync + std::fmt::Debug {
@@ -361,6 +417,8 @@ impl RealFs {
Self {
bundled_git_binary_path: git_binary_path,
executor,
+ next_job_id: Arc::new(AtomicUsize::new(0)),
+ job_event_subscribers: Arc::new(Mutex::new(Vec::new())),
}
}
}
@@ -719,9 +777,8 @@ impl Fs for RealFs {
{
Ok(metadata) => metadata,
Err(err) => {
- return match (err.kind(), err.raw_os_error()) {
- (io::ErrorKind::NotFound, _) => Ok(None),
- (io::ErrorKind::Other, Some(libc::ENOTDIR)) => Ok(None),
+ return match err.kind() {
+ io::ErrorKind::NotFound | io::ErrorKind::NotADirectory => Ok(None),
_ => Err(anyhow::Error::new(err)),
};
}
@@ -863,7 +920,6 @@ impl Fs for RealFs {
Pin<Box<dyn Send + Stream<Item = Vec<PathEvent>>>>,
Arc<dyn Watcher>,
) {
- use parking_lot::Mutex;
use util::{ResultExt as _, paths::SanitizedPath};
let (tx, rx) = smol::channel::unbounded();
@@ -960,6 +1016,15 @@ impl Fs for RealFs {
}
async fn git_clone(&self, repo_url: &str, abs_work_directory: &Path) -> Result<()> {
+ let job_id = self.next_job_id.fetch_add(1, Ordering::SeqCst);
+ let job_info = JobInfo {
+ id: job_id,
+ start: Instant::now(),
+ message: SharedString::from(format!("Cloning {}", repo_url)),
+ };
+
+ let _job_tracker = JobTracker::new(job_info, self.job_event_subscribers.clone());
+
let output = new_smol_command("git")
.current_dir(abs_work_directory)
.args(&["clone", repo_url])
@@ -980,6 +1045,12 @@ impl Fs for RealFs {
false
}
+ fn subscribe_to_jobs(&self) -> JobEventReceiver {
+ let (sender, receiver) = futures::channel::mpsc::unbounded();
+ self.job_event_subscribers.lock().push(sender);
+ receiver
+ }
+
/// Checks whether the file system is case sensitive by attempting to create two files
/// that have the same name except for the casing.
///
@@ -1050,6 +1121,7 @@ struct FakeFsState {
read_dir_call_count: usize,
path_write_counts: std::collections::HashMap<PathBuf, usize>,
moves: std::collections::HashMap<u64, PathBuf>,
+ job_event_subscribers: Arc<Mutex<Vec<JobEventSender>>>,
}
#[cfg(any(test, feature = "test-support"))]
@@ -1334,6 +1406,7 @@ impl FakeFs {
metadata_call_count: 0,
path_write_counts: Default::default(),
moves: Default::default(),
+ job_event_subscribers: Arc::new(Mutex::new(Vec::new())),
})),
});
@@ -2588,6 +2661,12 @@ impl Fs for FakeFs {
Ok(true)
}
+ fn subscribe_to_jobs(&self) -> JobEventReceiver {
+ let (sender, receiver) = futures::channel::mpsc::unbounded();
+ self.state.lock().job_event_subscribers.lock().push(sender);
+ receiver
+ }
+
#[cfg(any(test, feature = "test-support"))]
fn as_fake(&self) -> Arc<FakeFs> {
self.this.upgrade().unwrap()
@@ -3202,6 +3281,8 @@ mod tests {
let fs = RealFs {
bundled_git_binary_path: None,
executor,
+ next_job_id: Arc::new(AtomicUsize::new(0)),
+ job_event_subscribers: Arc::new(Mutex::new(Vec::new())),
};
let temp_dir = TempDir::new().unwrap();
let file_to_be_replaced = temp_dir.path().join("file.txt");
@@ -3220,6 +3301,8 @@ mod tests {
let fs = RealFs {
bundled_git_binary_path: None,
executor,
+ next_job_id: Arc::new(AtomicUsize::new(0)),
+ job_event_subscribers: Arc::new(Mutex::new(Vec::new())),
};
let temp_dir = TempDir::new().unwrap();
let file_to_be_replaced = temp_dir.path().join("file.txt");
@@ -169,6 +169,13 @@ impl Application {
self
}
+ /// Configures when the application should automatically quit.
+ /// By default, [`QuitMode::Default`] is used.
+ pub fn with_quit_mode(self, mode: QuitMode) -> Self {
+ self.0.borrow_mut().quit_mode = mode;
+ self
+ }
+
/// Start the application. The provided callback will be called once the
/// app is fully launched.
pub fn run<F>(self, on_finish_launching: F)
@@ -238,6 +245,18 @@ type WindowClosedHandler = Box<dyn FnMut(&mut App)>;
type ReleaseListener = Box<dyn FnOnce(&mut dyn Any, &mut App) + 'static>;
type NewEntityListener = Box<dyn FnMut(AnyEntity, &mut Option<&mut Window>, &mut App) + 'static>;
+/// Defines when the application should automatically quit.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
+pub enum QuitMode {
+ /// Use [`QuitMode::Explicit`] on macOS and [`QuitMode::LastWindowClosed`] on other platforms.
+ #[default]
+ Default,
+ /// Quit automatically when the last window is closed.
+ LastWindowClosed,
+ /// Quit only when requested via [`App::quit`].
+ Explicit,
+}
+
#[doc(hidden)]
#[derive(Clone, PartialEq, Eq)]
pub struct SystemWindowTab {
@@ -588,6 +607,7 @@ pub struct App {
pub(crate) inspector_element_registry: InspectorElementRegistry,
#[cfg(any(test, feature = "test-support", debug_assertions))]
pub(crate) name: Option<&'static str>,
+ quit_mode: QuitMode,
quitting: bool,
}
@@ -659,6 +679,7 @@ impl App {
inspector_renderer: None,
#[cfg(any(feature = "inspector", debug_assertions))]
inspector_element_registry: InspectorElementRegistry::default(),
+ quit_mode: QuitMode::default(),
quitting: false,
#[cfg(any(test, feature = "test-support", debug_assertions))]
@@ -1172,6 +1193,12 @@ impl App {
self.http_client = new_client;
}
+ /// Configures when the application should automatically quit.
+ /// By default, [`QuitMode::Default`] is used.
+ pub fn set_quit_mode(&mut self, mode: QuitMode) {
+ self.quit_mode = mode;
+ }
+
/// Returns the SVG renderer used by the application.
pub fn svg_renderer(&self) -> SvgRenderer {
self.svg_renderer.clone()
@@ -1379,6 +1406,16 @@ impl App {
callback(cx);
true
});
+
+ let quit_on_empty = match cx.quit_mode {
+ QuitMode::Explicit => false,
+ QuitMode::LastWindowClosed => true,
+ QuitMode::Default => !cfg!(macos),
+ };
+
+ if quit_on_empty && cx.windows.is_empty() {
+ cx.quit();
+ }
} else {
cx.windows.get_mut(id)?.replace(window);
}
@@ -10,7 +10,9 @@ use crate::{
use anyhow::{anyhow, bail};
use futures::{Stream, StreamExt, channel::oneshot};
use rand::{SeedableRng, rngs::StdRng};
-use std::{cell::RefCell, future::Future, ops::Deref, rc::Rc, sync::Arc, time::Duration};
+use std::{
+ cell::RefCell, future::Future, ops::Deref, path::PathBuf, rc::Rc, sync::Arc, time::Duration,
+};
/// A TestAppContext is provided to tests created with `#[gpui::test]`, it provides
/// an implementation of `Context` with additional methods that are useful in tests.
@@ -331,6 +333,13 @@ impl TestAppContext {
self.test_window(window_handle).simulate_resize(size);
}
+ /// Returns true if there's an alert dialog open.
+ pub fn expect_restart(&self) -> oneshot::Receiver<Option<PathBuf>> {
+ let (tx, rx) = futures::channel::oneshot::channel();
+ self.test_platform.expect_restart.borrow_mut().replace(tx);
+ rx
+ }
+
/// Causes the given sources to be returned if the application queries for screen
/// capture sources.
pub fn set_screen_capture_sources(&self, sources: Vec<TestScreenCaptureSource>) {
@@ -387,9 +387,6 @@ impl WaylandClientStatePtr {
{
state.keyboard_focused_window = Some(window);
}
- if state.windows.is_empty() {
- state.common.signal.stop();
- }
}
}
@@ -246,10 +246,6 @@ impl X11ClientStatePtr {
state.keyboard_focused_window = None;
}
state.cursor_styles.remove(&x_window);
-
- if state.windows.is_empty() {
- state.common.signal.stop();
- }
}
pub fn update_ime_position(&self, bounds: Bounds<Pixels>) {
@@ -36,6 +36,7 @@ pub(crate) struct TestPlatform {
screen_capture_sources: RefCell<Vec<TestScreenCaptureSource>>,
pub opened_url: RefCell<Option<String>>,
pub text_system: Arc<dyn PlatformTextSystem>,
+ pub expect_restart: RefCell<Option<oneshot::Sender<Option<PathBuf>>>>,
#[cfg(target_os = "windows")]
bitmap_factory: std::mem::ManuallyDrop<IWICImagingFactory>,
weak: Weak<Self>,
@@ -112,6 +113,7 @@ impl TestPlatform {
active_cursor: Default::default(),
active_display: Rc::new(TestDisplay::new()),
active_window: Default::default(),
+ expect_restart: Default::default(),
current_clipboard_item: Mutex::new(None),
#[cfg(any(target_os = "linux", target_os = "freebsd"))]
current_primary_item: Mutex::new(None),
@@ -250,8 +252,10 @@ impl Platform for TestPlatform {
fn quit(&self) {}
- fn restart(&self, _: Option<PathBuf>) {
- //
+ fn restart(&self, path: Option<PathBuf>) {
+ if let Some(tx) = self.expect_restart.take() {
+ tx.send(path).unwrap();
+ }
}
fn activate(&self, _ignoring_other_apps: bool) {
@@ -753,9 +753,7 @@ impl WindowsPlatformInner {
}
match message {
WM_GPUI_CLOSE_ONE_WINDOW => {
- if self.close_one_window(HWND(lparam.0 as _)) {
- unsafe { PostQuitMessage(0) };
- }
+ self.close_one_window(HWND(lparam.0 as _));
Some(0)
}
WM_GPUI_TASK_DISPATCHED_ON_MAIN_THREAD => self.run_foreground_task(),
@@ -453,8 +453,9 @@ impl WindowsWindow {
// Failure to create a `WindowsWindowState` can cause window creation to fail,
// so check the inner result first.
- let this = context.inner.take().unwrap()?;
+ let this = context.inner.take().transpose()?;
let hwnd = creation_result?;
+ let this = this.unwrap();
register_drag_drop(&this)?;
configure_dwm_dark_mode(hwnd, appearance);
@@ -31,6 +31,7 @@ parking_lot.workspace = true
reqwest.workspace = true
serde.workspace = true
serde_json.workspace = true
+serde_urlencoded.workspace = true
sha2.workspace = true
tempfile.workspace = true
url.workspace = true
@@ -13,6 +13,7 @@ use futures::{
future::{self, BoxFuture},
};
use parking_lot::Mutex;
+use serde::Serialize;
#[cfg(feature = "test-support")]
use std::fmt;
use std::{any::type_name, sync::Arc};
@@ -255,7 +256,7 @@ impl HttpClientWithUrl {
}
/// Builds a Zed Cloud URL using the given path.
- pub fn build_zed_cloud_url(&self, path: &str, query: &[(&str, &str)]) -> Result<Url> {
+ pub fn build_zed_cloud_url(&self, path: &str) -> Result<Url> {
let base_url = self.base_url();
let base_api_url = match base_url.as_ref() {
"https://zed.dev" => "https://cloud.zed.dev",
@@ -264,10 +265,20 @@ impl HttpClientWithUrl {
other => other,
};
- Ok(Url::parse_with_params(
- &format!("{}{}", base_api_url, path),
- query,
- )?)
+ Ok(Url::parse(&format!("{}{}", base_api_url, path))?)
+ }
+
+ /// Builds a Zed Cloud URL using the given path and query params.
+ pub fn build_zed_cloud_url_with_query(&self, path: &str, query: impl Serialize) -> Result<Url> {
+ let base_url = self.base_url();
+ let base_api_url = match base_url.as_ref() {
+ "https://zed.dev" => "https://cloud.zed.dev",
+ "https://staging.zed.dev" => "https://cloud.zed.dev",
+ "http://localhost:3000" => "http://localhost:8787",
+ other => other,
+ };
+ let query = serde_urlencoded::to_string(&query)?;
+ Ok(Url::parse(&format!("{}{}?{}", base_api_url, path, query))?)
}
/// Builds a Zed LLM URL using the given path.
@@ -2613,6 +2613,9 @@ pub fn rust_lang() -> Arc<Language> {
Some(tree_sitter_rust::LANGUAGE.into()),
)
.with_queries(LanguageQueries {
+ outline: Some(Cow::from(include_str!(
+ "../../languages/src/rust/outline.scm"
+ ))),
indents: Some(Cow::from(
r#"
[
@@ -195,7 +195,9 @@
(unary_expression "!" @operator)
operator: "/" @operator
-(lifetime) @lifetime
+(lifetime
+ "'" @lifetime
+ (identifier) @lifetime)
(parameter (identifier) @variable.parameter)
@@ -5,7 +5,7 @@ use fs::Fs;
use gpui::{Action, App, IntoElement};
use settings::{BaseKeymap, Settings, update_settings_file};
use theme::{
- Appearance, SystemAppearance, ThemeMode, ThemeName, ThemeRegistry, ThemeSelection,
+ Appearance, SystemAppearance, ThemeAppearanceMode, ThemeName, ThemeRegistry, ThemeSelection,
ThemeSettings,
};
use ui::{
@@ -44,8 +44,8 @@ fn render_theme_section(tab_index: &mut isize, cx: &mut App) -> impl IntoElement
let theme_mode = theme_selection
.mode()
.unwrap_or_else(|| match *system_appearance {
- Appearance::Light => ThemeMode::Light,
- Appearance::Dark => ThemeMode::Dark,
+ Appearance::Light => ThemeAppearanceMode::Light,
+ Appearance::Dark => ThemeAppearanceMode::Dark,
});
return v_flex()
@@ -54,7 +54,12 @@ fn render_theme_section(tab_index: &mut isize, cx: &mut App) -> impl IntoElement
h_flex().justify_between().child(Label::new("Theme")).child(
ToggleButtonGroup::single_row(
"theme-selector-onboarding-dark-light",
- [ThemeMode::Light, ThemeMode::Dark, ThemeMode::System].map(|mode| {
+ [
+ ThemeAppearanceMode::Light,
+ ThemeAppearanceMode::Dark,
+ ThemeAppearanceMode::System,
+ ]
+ .map(|mode| {
const MODE_NAMES: [SharedString; 3] = [
SharedString::new_static("Light"),
SharedString::new_static("Dark"),
@@ -100,13 +105,13 @@ fn render_theme_section(tab_index: &mut isize, cx: &mut App) -> impl IntoElement
let theme_mode = theme_selection
.mode()
.unwrap_or_else(|| match *system_appearance {
- Appearance::Light => ThemeMode::Light,
- Appearance::Dark => ThemeMode::Dark,
+ Appearance::Light => ThemeAppearanceMode::Light,
+ Appearance::Dark => ThemeAppearanceMode::Dark,
});
let appearance = match theme_mode {
- ThemeMode::Light => Appearance::Light,
- ThemeMode::Dark => Appearance::Dark,
- ThemeMode::System => *system_appearance,
+ ThemeAppearanceMode::Light => Appearance::Light,
+ ThemeAppearanceMode::Dark => Appearance::Dark,
+ ThemeAppearanceMode::System => *system_appearance,
};
let current_theme_name: SharedString = theme_selection.name(appearance).0.into();
@@ -164,7 +169,7 @@ fn render_theme_section(tab_index: &mut isize, cx: &mut App) -> impl IntoElement
}
})
.map(|this| {
- if theme_mode == ThemeMode::System {
+ if theme_mode == ThemeAppearanceMode::System {
let (light, dark) = (
theme_registry.get(LIGHT_THEMES[index]).unwrap(),
theme_registry.get(DARK_THEMES[index]).unwrap(),
@@ -189,23 +194,27 @@ fn render_theme_section(tab_index: &mut isize, cx: &mut App) -> impl IntoElement
})
}
- fn write_mode_change(mode: ThemeMode, cx: &mut App) {
+ fn write_mode_change(mode: ThemeAppearanceMode, cx: &mut App) {
let fs = <dyn Fs>::global(cx);
update_settings_file(fs, cx, move |settings, _cx| {
theme::set_mode(settings, mode);
});
}
- fn write_theme_change(theme: impl Into<Arc<str>>, theme_mode: ThemeMode, cx: &mut App) {
+ fn write_theme_change(
+ theme: impl Into<Arc<str>>,
+ theme_mode: ThemeAppearanceMode,
+ cx: &mut App,
+ ) {
let fs = <dyn Fs>::global(cx);
let theme = theme.into();
update_settings_file(fs, cx, move |settings, cx| {
- if theme_mode == ThemeMode::System {
+ if theme_mode == ThemeAppearanceMode::System {
let (light_theme, dark_theme) =
get_theme_family_themes(&theme).unwrap_or((theme.as_ref(), theme.as_ref()));
settings.theme.theme = Some(settings::ThemeSelection::Dynamic {
- mode: ThemeMode::System,
+ mode: ThemeAppearanceMode::System,
light: ThemeName(light_theme.into()),
dark: ThemeName(dark_theme.into()),
});
@@ -6619,13 +6619,11 @@ outline: struct OutlineEntryExcerpt
format!(
r#"frontend-project/
public/lottie/
- syntax-tree.json
- search: {{ "something": "Β«staticΒ»" }}
+ syntax-tree.json <==== selected
src/
app/(site)/
components/
- ErrorBoundary.tsx <==== selected
- search: Β«staticΒ»"#
+ ErrorBoundary.tsx"#
)
);
});
@@ -6667,7 +6665,7 @@ outline: struct OutlineEntryExcerpt
format!(
r#"frontend-project/
public/lottie/
- syntax-tree.json
+ syntax-tree.json <==== selected
search: {{ "something": "Β«staticΒ»" }}
src/
app/(site)/
@@ -6678,7 +6676,7 @@ outline: struct OutlineEntryExcerpt
page.tsx
search: Β«staticΒ»
components/
- ErrorBoundary.tsx <==== selected
+ ErrorBoundary.tsx
search: Β«staticΒ»"#
)
);
@@ -261,7 +261,10 @@ impl DapStore {
.get(&adapter.name());
let user_installed_path = dap_settings.and_then(|s| match &s.binary {
DapBinary::Default => None,
- DapBinary::Custom(binary) => Some(PathBuf::from(binary)),
+ DapBinary::Custom(binary) => {
+ // if `binary` is absolute, `.join()` will keep it unmodified
+ Some(worktree.read(cx).abs_path().join(PathBuf::from(binary)))
+ }
});
let user_args = dap_settings.map(|s| s.args.clone());
let user_env = dap_settings.map(|s| s.env.clone());
@@ -1419,7 +1419,44 @@ impl GitStore {
diffs.remove(buffer_id);
}
}
+ BufferStoreEvent::BufferChangedFilePath { buffer, .. } => {
+ // Whenever a buffer's file path changes, it's possible that the
+ // new path is actually a path that is being tracked by a git
+ // repository. In that case, we'll want to update the buffer's
+ // `BufferDiffState`, in case it already has one.
+ let buffer_id = buffer.read(cx).remote_id();
+ let diff_state = self.diffs.get(&buffer_id);
+ let repo = self.repository_and_path_for_buffer_id(buffer_id, cx);
+
+ if let Some(diff_state) = diff_state
+ && let Some((repo, repo_path)) = repo
+ {
+ let buffer = buffer.clone();
+ let diff_state = diff_state.clone();
+
+ cx.spawn(async move |_git_store, cx| {
+ async {
+ let diff_bases_change = repo
+ .update(cx, |repo, cx| {
+ repo.load_committed_text(buffer_id, repo_path, cx)
+ })?
+ .await?;
+ diff_state.update(cx, |diff_state, cx| {
+ let buffer_snapshot = buffer.read(cx).text_snapshot();
+ diff_state.diff_bases_changed(
+ buffer_snapshot,
+ Some(diff_bases_change),
+ cx,
+ );
+ })
+ }
+ .await
+ .log_err();
+ })
+ .detach();
+ }
+ }
_ => {}
}
}
@@ -563,8 +563,8 @@ impl LocalLspStore {
allow_binary_download: bool,
cx: &mut App,
) -> Task<Result<LanguageServerBinary>> {
- if let Some(settings) = settings.binary.as_ref()
- && settings.path.is_some()
+ if let Some(settings) = &settings.binary
+ && let Some(path) = settings.path.as_ref().map(PathBuf::from)
{
let settings = settings.clone();
@@ -573,7 +573,8 @@ impl LocalLspStore {
env.extend(settings.env.unwrap_or_default());
Ok(LanguageServerBinary {
- path: PathBuf::from(&settings.path.unwrap()),
+ // if `path` is absolute, `.join()` will keep it unmodified
+ path: delegate.worktree_root_path().join(path),
env: Some(env),
arguments: settings
.arguments
@@ -7650,7 +7651,10 @@ impl LspStore {
let buffer = buffer.read(cx);
let file = File::from_dyn(buffer.file())?;
let abs_path = file.as_local()?.abs_path(cx);
- let uri = lsp::Uri::from_file_path(abs_path).unwrap();
+ let uri = lsp::Uri::from_file_path(&abs_path)
+ .ok()
+ .with_context(|| format!("Failed to convert path to URI: {}", abs_path.display()))
+ .unwrap();
let next_snapshot = buffer.text_snapshot();
for language_server in language_servers {
let language_server = language_server.clone();
@@ -1208,6 +1208,73 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
);
}
+#[gpui::test]
+async fn test_language_server_relative_path(cx: &mut gpui::TestAppContext) {
+ init_test(cx);
+
+ let settings_json_contents = json!({
+ "languages": {
+ "Rust": {
+ "language_servers": ["my_fake_lsp"]
+ }
+ },
+ "lsp": {
+ "my_fake_lsp": {
+ "binary": {
+ "path": path!("relative_path/to/my_fake_lsp_binary.exe").to_string(),
+ }
+ }
+ },
+ });
+
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree(
+ path!("/the-root"),
+ json!({
+ ".zed": {
+ "settings.json": settings_json_contents.to_string(),
+ },
+ "relative_path": {
+ "to": {
+ "my_fake_lsp.exe": "",
+ },
+ },
+ "src": {
+ "main.rs": "",
+ }
+ }),
+ )
+ .await;
+
+ let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
+ let language_registry = project.read_with(cx, |project, _| project.languages().clone());
+ language_registry.add(rust_lang());
+
+ let mut fake_rust_servers = language_registry.register_fake_lsp(
+ "Rust",
+ FakeLspAdapter {
+ name: "my_fake_lsp",
+ ..Default::default()
+ },
+ );
+
+ cx.run_until_parked();
+
+ // Start the language server by opening a buffer with a compatible file extension.
+ project
+ .update(cx, |project, cx| {
+ project.open_local_buffer_with_lsp(path!("/the-root/src/main.rs"), cx)
+ })
+ .await
+ .unwrap();
+
+ let lsp_path = fake_rust_servers.next().await.unwrap().binary.path;
+ assert_eq!(
+ lsp_path.to_string_lossy(),
+ path!("/the-root/relative_path/to/my_fake_lsp_binary.exe"),
+ );
+}
+
#[gpui::test]
async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
init_test(cx);
@@ -10045,6 +10112,120 @@ async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
}
+#[gpui::test]
+async fn test_buffer_changed_file_path_updates_git_diff(cx: &mut gpui::TestAppContext) {
+ init_test(cx);
+
+ let file_1_committed = String::from(r#"file_1_committed"#);
+ let file_1_staged = String::from(r#"file_1_staged"#);
+ let file_2_committed = String::from(r#"file_2_committed"#);
+ let file_2_staged = String::from(r#"file_2_staged"#);
+ let buffer_contents = String::from(r#"buffer"#);
+
+ let fs = FakeFs::new(cx.background_executor.clone());
+ fs.insert_tree(
+ path!("/dir"),
+ json!({
+ ".git": {},
+ "src": {
+ "file_1.rs": file_1_committed.clone(),
+ "file_2.rs": file_2_committed.clone(),
+ }
+ }),
+ )
+ .await;
+
+ fs.set_head_for_repo(
+ path!("/dir/.git").as_ref(),
+ &[
+ ("src/file_1.rs", file_1_committed.clone()),
+ ("src/file_2.rs", file_2_committed.clone()),
+ ],
+ "deadbeef",
+ );
+ fs.set_index_for_repo(
+ path!("/dir/.git").as_ref(),
+ &[
+ ("src/file_1.rs", file_1_staged.clone()),
+ ("src/file_2.rs", file_2_staged.clone()),
+ ],
+ );
+
+ let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
+
+ let buffer = project
+ .update(cx, |project, cx| {
+ project.open_local_buffer(path!("/dir/src/file_1.rs"), cx)
+ })
+ .await
+ .unwrap();
+
+ buffer.update(cx, |buffer, cx| {
+ buffer.edit([(0..buffer.len(), buffer_contents.as_str())], None, cx);
+ });
+
+ let unstaged_diff = project
+ .update(cx, |project, cx| {
+ project.open_unstaged_diff(buffer.clone(), cx)
+ })
+ .await
+ .unwrap();
+
+ cx.run_until_parked();
+
+ unstaged_diff.update(cx, |unstaged_diff, _cx| {
+ let base_text = unstaged_diff.base_text_string().unwrap();
+ assert_eq!(base_text, file_1_staged, "Should start with file_1 staged");
+ });
+
+ // Save the buffer as `file_2.rs`, which should trigger the
+ // `BufferChangedFilePath` event.
+ project
+ .update(cx, |project, cx| {
+ let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
+ let path = ProjectPath {
+ worktree_id,
+ path: rel_path("src/file_2.rs").into(),
+ };
+ project.save_buffer_as(buffer.clone(), path, cx)
+ })
+ .await
+ .unwrap();
+
+ cx.run_until_parked();
+
+ // Verify that the diff bases have been updated to file_2's contents due to
+ // the `BufferChangedFilePath` event being handled.
+ unstaged_diff.update(cx, |unstaged_diff, cx| {
+ let snapshot = buffer.read(cx).snapshot();
+ let base_text = unstaged_diff.base_text_string().unwrap();
+ assert_eq!(
+ base_text, file_2_staged,
+ "Diff bases should be automatically updated to file_2 staged content"
+ );
+
+ let hunks: Vec<_> = unstaged_diff.hunks(&snapshot, cx).collect();
+ assert!(!hunks.is_empty(), "Should have diff hunks for file_2");
+ });
+
+ let uncommitted_diff = project
+ .update(cx, |project, cx| {
+ project.open_uncommitted_diff(buffer.clone(), cx)
+ })
+ .await
+ .unwrap();
+
+ cx.run_until_parked();
+
+ uncommitted_diff.update(cx, |uncommitted_diff, _cx| {
+ let base_text = uncommitted_diff.base_text_string().unwrap();
+ assert_eq!(
+ base_text, file_2_committed,
+ "Uncommitted diff should compare against file_2 committed content"
+ );
+ });
+}
+
async fn search(
project: &Entity<Project>,
query: SearchQuery,
@@ -486,10 +486,10 @@ impl remote::RemoteClientDelegate for RemoteClientDelegate {
let this = self.clone();
cx.spawn(async move |cx| {
AutoUpdater::download_remote_server_release(
- platform.os,
- platform.arch,
release_channel,
version,
+ platform.os,
+ platform.arch,
move |status, cx| this.set_status(Some(status), cx),
cx,
)
@@ -507,19 +507,19 @@ impl remote::RemoteClientDelegate for RemoteClientDelegate {
})
}
- fn get_download_params(
+ fn get_download_url(
&self,
platform: RemotePlatform,
release_channel: ReleaseChannel,
version: Option<SemanticVersion>,
cx: &mut AsyncApp,
- ) -> Task<Result<Option<(String, String)>>> {
+ ) -> Task<Result<Option<String>>> {
cx.spawn(async move |cx| {
AutoUpdater::get_remote_server_release_url(
- platform.os,
- platform.arch,
release_channel,
version,
+ platform.os,
+ platform.arch,
cx,
)
.await
@@ -126,6 +126,12 @@ pub fn init(app_version: SemanticVersion, cx: &mut App) {
cx.set_global(GlobalReleaseChannel(*RELEASE_CHANNEL))
}
+/// Initializes the release channel for tests that rely on fake release channel.
+pub fn init_test(app_version: SemanticVersion, release_channel: ReleaseChannel, cx: &mut App) {
+ cx.set_global(GlobalAppVersion(app_version));
+ cx.set_global(GlobalReleaseChannel(release_channel))
+}
+
impl ReleaseChannel {
/// Returns the global [`ReleaseChannel`].
pub fn global(cx: &App) -> Self {
@@ -67,13 +67,13 @@ pub trait RemoteClientDelegate: Send + Sync {
tx: oneshot::Sender<EncryptedPassword>,
cx: &mut AsyncApp,
);
- fn get_download_params(
+ fn get_download_url(
&self,
platform: RemotePlatform,
release_channel: ReleaseChannel,
version: Option<SemanticVersion>,
cx: &mut AsyncApp,
- ) -> Task<Result<Option<(String, String)>>>;
+ ) -> Task<Result<Option<String>>>;
fn download_server_binary_locally(
&self,
platform: RemotePlatform,
@@ -1669,13 +1669,13 @@ mod fake {
unreachable!()
}
- fn get_download_params(
+ fn get_download_url(
&self,
_platform: RemotePlatform,
_release_channel: ReleaseChannel,
_version: Option<SemanticVersion>,
_cx: &mut AsyncApp,
- ) -> Task<Result<Option<(String, String)>>> {
+ ) -> Task<Result<Option<String>>> {
unreachable!()
}
@@ -606,12 +606,12 @@ impl SshRemoteConnection {
.unwrap(),
);
if !self.socket.connection_options.upload_binary_over_ssh
- && let Some((url, body)) = delegate
- .get_download_params(self.ssh_platform, release_channel, wanted_version, cx)
+ && let Some(url) = delegate
+ .get_download_url(self.ssh_platform, release_channel, wanted_version, cx)
.await?
{
match self
- .download_binary_on_server(&url, &body, &tmp_path_gz, delegate, cx)
+ .download_binary_on_server(&url, &tmp_path_gz, delegate, cx)
.await
{
Ok(_) => {
@@ -644,7 +644,6 @@ impl SshRemoteConnection {
async fn download_binary_on_server(
&self,
url: &str,
- body: &str,
tmp_path_gz: &RelPath,
delegate: &Arc<dyn RemoteClientDelegate>,
cx: &mut AsyncApp,
@@ -670,12 +669,6 @@ impl SshRemoteConnection {
&[
"-f",
"-L",
- "-X",
- "GET",
- "-H",
- "Content-Type: application/json",
- "-d",
- body,
url,
"-o",
&tmp_path_gz.display(self.path_style()),
@@ -700,14 +693,7 @@ impl SshRemoteConnection {
.run_command(
self.ssh_shell_kind,
"wget",
- &[
- "--header=Content-Type: application/json",
- "--body-data",
- body,
- url,
- "-O",
- &tmp_path_gz.display(self.path_style()),
- ],
+ &[url, "-O", &tmp_path_gz.display(self.path_style())],
true,
)
.await
@@ -127,12 +127,6 @@ pub struct BufferSearchBar {
regex_language: Option<Arc<Language>>,
}
-impl BufferSearchBar {
- pub fn query_editor_focused(&self) -> bool {
- self.query_editor_focused
- }
-}
-
impl EventEmitter<Event> for BufferSearchBar {}
impl EventEmitter<workspace::ToolbarItemEvent> for BufferSearchBar {}
impl Render for BufferSearchBar {
@@ -521,6 +515,10 @@ impl ToolbarItemView for BufferSearchBar {
}
impl BufferSearchBar {
+ pub fn query_editor_focused(&self) -> bool {
+ self.query_editor_focused
+ }
+
pub fn register(registrar: &mut impl SearchActionsRegistrar) {
registrar.register_handler(ForDeployed(|this, _: &FocusSearch, window, cx| {
this.query_editor.focus_handle(cx).focus(window);
@@ -696,6 +694,8 @@ impl BufferSearchBar {
pub fn dismiss(&mut self, _: &Dismiss, window: &mut Window, cx: &mut Context<Self>) {
self.dismissed = true;
self.query_error = None;
+ self.sync_select_next_case_sensitivity(cx);
+
for searchable_item in self.searchable_items_with_matches.keys() {
if let Some(searchable_item) =
WeakSearchableItemHandle::upgrade(searchable_item.as_ref(), cx)
@@ -711,6 +711,7 @@ impl BufferSearchBar {
let handle = active_editor.item_focus_handle(cx);
self.focus(&handle, window);
}
+
cx.emit(Event::UpdateLocation);
cx.emit(ToolbarItemEvent::ChangeLocation(
ToolbarItemLocation::Hidden,
@@ -730,6 +731,7 @@ impl BufferSearchBar {
}
self.search_suggested(window, cx);
self.smartcase(window, cx);
+ self.sync_select_next_case_sensitivity(cx);
self.replace_enabled = deploy.replace_enabled;
self.selection_search_enabled = if deploy.selection_search_enabled {
Some(FilteredSearchRange::Default)
@@ -919,6 +921,7 @@ impl BufferSearchBar {
self.default_options = self.search_options;
drop(self.update_matches(false, false, window, cx));
self.adjust_query_regex_language(cx);
+ self.sync_select_next_case_sensitivity(cx);
cx.notify();
}
@@ -953,6 +956,7 @@ impl BufferSearchBar {
pub fn set_search_options(&mut self, search_options: SearchOptions, cx: &mut Context<Self>) {
self.search_options = search_options;
self.adjust_query_regex_language(cx);
+ self.sync_select_next_case_sensitivity(cx);
cx.notify();
}
@@ -1507,6 +1511,7 @@ impl BufferSearchBar {
.read(cx)
.as_singleton()
.expect("query editor should be backed by a singleton buffer");
+
if enable {
if let Some(regex_language) = self.regex_language.clone() {
query_buffer.update(cx, |query_buffer, cx| {
@@ -1519,6 +1524,24 @@ impl BufferSearchBar {
})
}
}
+
+ /// Updates the searchable item's case sensitivity option to match the
+ /// search bar's current case sensitivity setting. This ensures that
+ /// editor's `select_next`/ `select_previous` operations respect the buffer
+ /// search bar's search options.
+ ///
+ /// Clears the case sensitivity when the search bar is dismissed so that
+ /// only the editor's settings are respected.
+ fn sync_select_next_case_sensitivity(&self, cx: &mut Context<Self>) {
+ let case_sensitive = match self.dismissed {
+ true => None,
+ false => Some(self.search_options.contains(SearchOptions::CASE_SENSITIVE)),
+ };
+
+ if let Some(active_searchable_item) = self.active_searchable_item.as_ref() {
+ active_searchable_item.set_search_is_case_sensitive(case_sensitive, cx);
+ }
+ }
}
#[cfg(test)]
@@ -1528,7 +1551,7 @@ mod tests {
use super::*;
use editor::{
DisplayPoint, Editor, MultiBuffer, SearchSettings, SelectionEffects,
- display_map::DisplayRow,
+ display_map::DisplayRow, test::editor_test_context::EditorTestContext,
};
use gpui::{Hsla, TestAppContext, UpdateGlobal, VisualTestContext};
use language::{Buffer, Point};
@@ -2963,6 +2986,61 @@ mod tests {
});
}
+ #[gpui::test]
+ async fn test_select_occurrence_case_sensitivity(cx: &mut TestAppContext) {
+ let (editor, search_bar, cx) = init_test(cx);
+ let mut editor_cx = EditorTestContext::for_editor_in(editor, cx).await;
+
+ // Start with case sensitive search settings.
+ let mut search_settings = SearchSettings::default();
+ search_settings.case_sensitive = true;
+ update_search_settings(search_settings, cx);
+ search_bar.update(cx, |search_bar, cx| {
+ let mut search_options = search_bar.search_options;
+ search_options.insert(SearchOptions::CASE_SENSITIVE);
+ search_bar.set_search_options(search_options, cx);
+ });
+
+ editor_cx.set_state("Β«ΛfooΒ»\nFOO\nFoo\nfoo");
+ editor_cx.update_editor(|e, window, cx| {
+ e.select_next(&Default::default(), window, cx).unwrap();
+ });
+ editor_cx.assert_editor_state("Β«ΛfooΒ»\nFOO\nFoo\nΒ«ΛfooΒ»");
+
+ // Update the search bar's case sensitivite toggle, so we can later
+ // confirm that `select_next` will now be case-insensitive.
+ editor_cx.set_state("Β«ΛfooΒ»\nFOO\nFoo\nfoo");
+ search_bar.update_in(cx, |search_bar, window, cx| {
+ search_bar.toggle_case_sensitive(&Default::default(), window, cx);
+ });
+ editor_cx.update_editor(|e, window, cx| {
+ e.select_next(&Default::default(), window, cx).unwrap();
+ });
+ editor_cx.assert_editor_state("Β«ΛfooΒ»\nΒ«ΛFOOΒ»\nFoo\nfoo");
+
+ // Confirm that, after dismissing the search bar, only the editor's
+ // search settings actually affect the behavior of `select_next`.
+ search_bar.update_in(cx, |search_bar, window, cx| {
+ search_bar.dismiss(&Default::default(), window, cx);
+ });
+ editor_cx.set_state("Β«ΛfooΒ»\nFOO\nFoo\nfoo");
+ editor_cx.update_editor(|e, window, cx| {
+ e.select_next(&Default::default(), window, cx).unwrap();
+ });
+ editor_cx.assert_editor_state("Β«ΛfooΒ»\nFOO\nFoo\nΒ«ΛfooΒ»");
+
+ // Update the editor's search settings, disabling case sensitivity, to
+ // check that the value is respected.
+ let mut search_settings = SearchSettings::default();
+ search_settings.case_sensitive = false;
+ update_search_settings(search_settings, cx);
+ editor_cx.set_state("Β«ΛfooΒ»\nFOO\nFoo\nfoo");
+ editor_cx.update_editor(|e, window, cx| {
+ e.select_next(&Default::default(), window, cx).unwrap();
+ });
+ editor_cx.assert_editor_state("Β«ΛfooΒ»\nΒ«ΛFOOΒ»\nFoo\nfoo");
+ }
+
fn update_search_settings(search_settings: SearchSettings, cx: &mut TestAppContext) {
cx.update(|cx| {
SettingsStore::update_global(cx, |store, cx| {
@@ -176,6 +176,8 @@ pub struct AgentProfileContent {
pub enable_all_context_servers: Option<bool>,
#[serde(default)]
pub context_servers: IndexMap<Arc<str>, ContextServerPresetContent>,
+ /// The default language model selected when using this profile.
+ pub default_model: Option<LanguageModelSelection>,
}
#[skip_serializing_none]
@@ -96,6 +96,10 @@ pub struct EditorSettingsContent {
/// Default: 4.0
#[serde(serialize_with = "crate::serialize_optional_f32_with_two_decimal_places")]
pub fast_scroll_sensitivity: Option<f32>,
+ /// Settings for sticking scopes to the top of the editor.
+ ///
+ /// Default: sticky scroll is disabled
+ pub sticky_scroll: Option<StickyScrollContent>,
/// Whether the line numbers on editors gutter are relative or not.
/// When "enabled" shows relative number of buffer lines, when "wrapped" shows
/// relative number of display lines.
@@ -312,6 +316,16 @@ pub struct ScrollbarContent {
pub axes: Option<ScrollbarAxesContent>,
}
+/// Sticky scroll related settings
+#[skip_serializing_none]
+#[derive(Clone, Default, Debug, Serialize, Deserialize, JsonSchema, MergeFrom, PartialEq)]
+pub struct StickyScrollContent {
+ /// Whether sticky scroll is enabled.
+ ///
+ /// Default: false
+ pub enabled: Option<bool>,
+}
+
/// Minimap related settings
#[skip_serializing_none]
#[derive(Clone, Default, Debug, Serialize, Deserialize, JsonSchema, MergeFrom, PartialEq)]
@@ -745,9 +759,13 @@ pub enum SnippetSortOrder {
pub struct SearchSettingsContent {
/// Whether to show the project search button in the status bar.
pub button: Option<bool>,
+ /// Whether to only match on whole words.
pub whole_word: Option<bool>,
+ /// Whether to match case sensitively.
pub case_sensitive: Option<bool>,
+ /// Whether to include gitignored files in search results.
pub include_ignored: Option<bool>,
+ /// Whether to interpret the search query as a regular expression.
pub regex: Option<bool>,
/// Whether to center the cursor on each search match when navigating.
pub center_on_match: Option<bool>,
@@ -108,7 +108,17 @@ pub struct WorktreeSettingsContent {
#[serde(rename_all = "snake_case")]
pub struct LspSettings {
pub binary: Option<BinarySettings>,
+ /// Options passed to the language server at startup.
+ ///
+ /// Ref: https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#initialize
+ ///
+ /// Consult the documentation for the specific language server to see which settings are supported.
pub initialization_options: Option<serde_json::Value>,
+ /// Language server settings.
+ ///
+ /// Ref: https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#workspace_configuration
+ ///
+ /// Consult the documentation for the specific language server to see which settings are supported.
pub settings: Option<serde_json::Value>,
/// If the server supports sending tasks over LSP extensions,
/// this setting can be used to enable or disable them in Zed.
@@ -157,7 +157,7 @@ pub enum ThemeSelection {
Dynamic {
/// The mode used to determine which theme to use.
#[serde(default)]
- mode: ThemeMode,
+ mode: ThemeAppearanceMode,
/// The theme to use for light mode.
light: ThemeName,
/// The theme to use for dark mode.
@@ -186,7 +186,7 @@ pub enum IconThemeSelection {
Dynamic {
/// The mode used to determine which theme to use.
#[serde(default)]
- mode: ThemeMode,
+ mode: ThemeAppearanceMode,
/// The icon theme to use for light mode.
light: IconThemeName,
/// The icon theme to use for dark mode.
@@ -194,7 +194,6 @@ pub enum IconThemeSelection {
},
}
-// TODO: Rename ThemeMode -> ThemeAppearanceMode
/// The mode use to select a theme.
///
/// `Light` and `Dark` will select their respective themes.
@@ -215,7 +214,7 @@ pub enum IconThemeSelection {
strum::VariantNames,
)]
#[serde(rename_all = "snake_case")]
-pub enum ThemeMode {
+pub enum ThemeAppearanceMode {
/// Use the specified `light` theme.
Light,
@@ -255,6 +255,7 @@ impl VsCodeSettings {
excerpt_context_lines: None,
expand_excerpt_lines: None,
fast_scroll_sensitivity: self.read_f32("editor.fastScrollSensitivity"),
+ sticky_scroll: self.sticky_scroll_content(),
go_to_definition_fallback: None,
gutter: self.gutter_content(),
hide_mouse: None,
@@ -303,6 +304,12 @@ impl VsCodeSettings {
}
}
+ fn sticky_scroll_content(&self) -> Option<StickyScrollContent> {
+ skip_default(StickyScrollContent {
+ enabled: self.read_bool("editor.stickyScroll.enabled"),
+ })
+ }
+
fn gutter_content(&self) -> Option<GutterContent> {
skip_default(GutterContent {
line_numbers: self.read_enum("editor.lineNumbers", |s| match s {
@@ -300,9 +300,9 @@ pub(crate) fn settings_data(cx: &App) -> Vec<SettingsPage> {
settings::ThemeSelection::Static(_) => return,
settings::ThemeSelection::Dynamic { mode, light, dark } => {
match mode {
- theme::ThemeMode::Light => light.clone(),
- theme::ThemeMode::Dark => dark.clone(),
- theme::ThemeMode::System => dark.clone(), // no cx, can't determine correct choice
+ theme::ThemeAppearanceMode::Light => light.clone(),
+ theme::ThemeAppearanceMode::Dark => dark.clone(),
+ theme::ThemeAppearanceMode::System => dark.clone(), // no cx, can't determine correct choice
}
},
};
@@ -315,7 +315,7 @@ pub(crate) fn settings_data(cx: &App) -> Vec<SettingsPage> {
};
settings::ThemeSelection::Dynamic {
- mode: settings::ThemeMode::System,
+ mode: settings::ThemeAppearanceMode::System,
light: static_name.clone(),
dark: static_name,
}
@@ -470,9 +470,9 @@ pub(crate) fn settings_data(cx: &App) -> Vec<SettingsPage> {
settings::IconThemeSelection::Static(_) => return,
settings::IconThemeSelection::Dynamic { mode, light, dark } => {
match mode {
- theme::ThemeMode::Light => light.clone(),
- theme::ThemeMode::Dark => dark.clone(),
- theme::ThemeMode::System => dark.clone(), // no cx, can't determine correct choice
+ theme::ThemeAppearanceMode::Light => light.clone(),
+ theme::ThemeAppearanceMode::Dark => dark.clone(),
+ theme::ThemeAppearanceMode::System => dark.clone(), // no cx, can't determine correct choice
}
},
};
@@ -485,7 +485,7 @@ pub(crate) fn settings_data(cx: &App) -> Vec<SettingsPage> {
};
settings::IconThemeSelection::Dynamic {
- mode: settings::ThemeMode::System,
+ mode: settings::ThemeAppearanceMode::System,
light: static_name.clone(),
dark: static_name,
}
@@ -1352,6 +1352,21 @@ pub(crate) fn settings_data(cx: &App) -> Vec<SettingsPage> {
metadata: None,
files: USER,
}),
+ SettingsPageItem::SettingItem(SettingItem {
+ title: "Sticky Scroll",
+ description: "Whether to stick scopes to the top of the editor",
+ field: Box::new(SettingField {
+ json_path: Some("sticky_scroll.enabled"),
+ pick: |settings_content| {
+ settings_content.editor.sticky_scroll.as_ref().and_then(|sticky_scroll| sticky_scroll.enabled.as_ref())
+ },
+ write: |settings_content, value| {
+ settings_content.editor.sticky_scroll.get_or_insert_default().enabled = value;
+ },
+ }),
+ metadata: None,
+ files: USER,
+ }),
SettingsPageItem::SectionHeader("Signature Help"),
SettingsPageItem::SettingItem(SettingItem {
title: "Auto Signature Help",
@@ -486,7 +486,7 @@ fn init_renderers(cx: &mut App) {
.add_basic_renderer::<settings::PaneSplitDirectionVertical>(render_dropdown)
.add_basic_renderer::<settings::DocumentColorsRenderMode>(render_dropdown)
.add_basic_renderer::<settings::ThemeSelectionDiscriminants>(render_dropdown)
- .add_basic_renderer::<settings::ThemeMode>(render_dropdown)
+ .add_basic_renderer::<settings::ThemeAppearanceMode>(render_dropdown)
.add_basic_renderer::<settings::ThemeName>(render_theme_picker)
.add_basic_renderer::<settings::IconThemeSelectionDiscriminants>(render_dropdown)
.add_basic_renderer::<settings::IconThemeName>(render_icon_theme_picker)
@@ -46,7 +46,7 @@ impl AlacModifiers {
pub fn to_esc_str(
keystroke: &Keystroke,
mode: &TermMode,
- alt_is_meta: bool,
+ option_as_meta: bool,
) -> Option<Cow<'static, str>> {
let modifiers = AlacModifiers::new(keystroke);
@@ -218,7 +218,7 @@ pub fn to_esc_str(
}
}
- if alt_is_meta {
+ if !cfg!(target_os = "macos") || option_as_meta {
let is_alt_lowercase_ascii = modifiers == AlacModifiers::Alt && keystroke.key.is_ascii();
let is_alt_uppercase_ascii =
keystroke.modifiers.alt && keystroke.modifiers.shift && keystroke.key.is_ascii();
@@ -383,7 +383,7 @@ impl TerminalBuilder {
selection_phase: SelectionPhase::Ended,
hyperlink_regex_searches: RegexSearches::new(),
vi_mode_enabled: false,
- is_ssh_terminal: false,
+ is_remote_terminal: false,
last_mouse_move_time: Instant::now(),
last_hyperlink_search_position: None,
#[cfg(windows)]
@@ -415,7 +415,7 @@ impl TerminalBuilder {
cursor_shape: CursorShape,
alternate_scroll: AlternateScroll,
max_scroll_history_lines: Option<usize>,
- is_ssh_terminal: bool,
+ is_remote_terminal: bool,
window_id: u64,
completion_tx: Option<Sender<Option<ExitStatus>>>,
cx: &App,
@@ -601,7 +601,7 @@ impl TerminalBuilder {
selection_phase: SelectionPhase::Ended,
hyperlink_regex_searches: RegexSearches::new(),
vi_mode_enabled: false,
- is_ssh_terminal,
+ is_remote_terminal,
last_mouse_move_time: Instant::now(),
last_hyperlink_search_position: None,
#[cfg(windows)]
@@ -646,7 +646,7 @@ impl TerminalBuilder {
})
};
// the thread we spawn things on has an effect on signal handling
- if cfg!(target_os = "unix") {
+ if !cfg!(target_os = "windows") {
cx.spawn(async move |_| fut.await)
} else {
cx.background_spawn(fut)
@@ -828,7 +828,7 @@ pub struct Terminal {
hyperlink_regex_searches: RegexSearches,
task: Option<TaskState>,
vi_mode_enabled: bool,
- is_ssh_terminal: bool,
+ is_remote_terminal: bool,
last_mouse_move_time: Instant,
last_hyperlink_search_position: Option<Point<Pixels>>,
#[cfg(windows)]
@@ -1490,14 +1490,14 @@ impl Terminal {
}
}
- pub fn try_keystroke(&mut self, keystroke: &Keystroke, alt_is_meta: bool) -> bool {
+ pub fn try_keystroke(&mut self, keystroke: &Keystroke, option_as_meta: bool) -> bool {
if self.vi_mode_enabled {
self.vi_motion(keystroke);
return true;
}
// Keep default terminal behavior
- let esc = to_esc_str(keystroke, &self.last_content.mode, alt_is_meta);
+ let esc = to_esc_str(keystroke, &self.last_content.mode, option_as_meta);
if let Some(esc) = esc {
match esc {
Cow::Borrowed(string) => self.input(string.as_bytes()),
@@ -1959,7 +1959,7 @@ impl Terminal {
}
pub fn working_directory(&self) -> Option<PathBuf> {
- if self.is_ssh_terminal {
+ if self.is_remote_terminal {
// We can't yet reliably detect the working directory of a shell on the
// SSH host. Until we can do that, it doesn't make sense to display
// the working directory on the client and persist that.
@@ -2158,7 +2158,7 @@ impl Terminal {
self.template.cursor_shape,
self.template.alternate_scroll,
self.template.max_scroll_history_lines,
- self.is_ssh_terminal,
+ self.is_remote_terminal,
self.template.window_id,
None,
cx,
@@ -11,7 +11,7 @@ use gpui::{
use refineable::Refineable;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
-pub use settings::{FontFamilyName, IconThemeName, ThemeMode, ThemeName};
+pub use settings::{FontFamilyName, IconThemeName, ThemeAppearanceMode, ThemeName};
use settings::{RegisterSetting, Settings, SettingsContent};
use std::sync::Arc;
@@ -208,7 +208,7 @@ pub enum ThemeSelection {
Dynamic {
/// The mode used to determine which theme to use.
#[serde(default)]
- mode: ThemeMode,
+ mode: ThemeAppearanceMode,
/// The theme to use for light mode.
light: ThemeName,
/// The theme to use for dark mode.
@@ -233,9 +233,9 @@ impl ThemeSelection {
match self {
Self::Static(theme) => theme.clone(),
Self::Dynamic { mode, light, dark } => match mode {
- ThemeMode::Light => light.clone(),
- ThemeMode::Dark => dark.clone(),
- ThemeMode::System => match system_appearance {
+ ThemeAppearanceMode::Light => light.clone(),
+ ThemeAppearanceMode::Dark => dark.clone(),
+ ThemeAppearanceMode::System => match system_appearance {
Appearance::Light => light.clone(),
Appearance::Dark => dark.clone(),
},
@@ -244,7 +244,7 @@ impl ThemeSelection {
}
/// Returns the [ThemeMode] for the [ThemeSelection].
- pub fn mode(&self) -> Option<ThemeMode> {
+ pub fn mode(&self) -> Option<ThemeAppearanceMode> {
match self {
ThemeSelection::Static(_) => None,
ThemeSelection::Dynamic { mode, .. } => Some(*mode),
@@ -260,7 +260,7 @@ pub enum IconThemeSelection {
/// A dynamic icon theme selection, which can change based on the [`ThemeMode`].
Dynamic {
/// The mode used to determine which theme to use.
- mode: ThemeMode,
+ mode: ThemeAppearanceMode,
/// The icon theme to use for light mode.
light: IconThemeName,
/// The icon theme to use for dark mode.
@@ -285,9 +285,9 @@ impl IconThemeSelection {
match self {
Self::Static(theme) => theme.clone(),
Self::Dynamic { mode, light, dark } => match mode {
- ThemeMode::Light => light.clone(),
- ThemeMode::Dark => dark.clone(),
- ThemeMode::System => match system_appearance {
+ ThemeAppearanceMode::Light => light.clone(),
+ ThemeAppearanceMode::Dark => dark.clone(),
+ ThemeAppearanceMode::System => match system_appearance {
Appearance::Light => light.clone(),
Appearance::Dark => dark.clone(),
},
@@ -296,7 +296,7 @@ impl IconThemeSelection {
}
/// Returns the [`ThemeMode`] for the [`IconThemeSelection`].
- pub fn mode(&self) -> Option<ThemeMode> {
+ pub fn mode(&self) -> Option<ThemeAppearanceMode> {
match self {
IconThemeSelection::Static(_) => None,
IconThemeSelection::Dynamic { mode, .. } => Some(*mode),
@@ -315,9 +315,9 @@ pub fn set_theme(
let theme_to_update = match selection {
settings::ThemeSelection::Static(theme) => theme,
settings::ThemeSelection::Dynamic { mode, light, dark } => match mode {
- ThemeMode::Light => light,
- ThemeMode::Dark => dark,
- ThemeMode::System => match appearance {
+ ThemeAppearanceMode::Light => light,
+ ThemeAppearanceMode::Dark => dark,
+ ThemeAppearanceMode::System => match appearance {
Appearance::Light => light,
Appearance::Dark => dark,
},
@@ -342,9 +342,9 @@ pub fn set_icon_theme(
let icon_theme_to_update = match selection {
settings::IconThemeSelection::Static(theme) => theme,
settings::IconThemeSelection::Dynamic { mode, light, dark } => match mode {
- ThemeMode::Light => light,
- ThemeMode::Dark => dark,
- ThemeMode::System => match appearance {
+ ThemeAppearanceMode::Light => light,
+ ThemeAppearanceMode::Dark => dark,
+ ThemeAppearanceMode::System => match appearance {
Appearance::Light => light,
Appearance::Dark => dark,
},
@@ -358,7 +358,7 @@ pub fn set_icon_theme(
}
/// Sets the mode for the theme.
-pub fn set_mode(content: &mut SettingsContent, mode: ThemeMode) {
+pub fn set_mode(content: &mut SettingsContent, mode: ThemeAppearanceMode) {
let theme = content.theme.as_mut();
if let Some(selection) = theme.theme.as_mut() {
@@ -227,6 +227,15 @@ impl SystemWindowTabs {
window.activate_window();
});
})
+ .on_mouse_up(MouseButton::Middle, move |_, window, cx| {
+ if item.handle.window_id() == window.window_handle().window_id() {
+ window.dispatch_action(Box::new(CloseWindow), cx);
+ } else {
+ let _ = item.handle.update(cx, |_, window, cx| {
+ window.dispatch_action(Box::new(CloseWindow), cx);
+ });
+ }
+ })
.child(label)
.map(|this| match show_close_button {
ShowCloseButton::Hidden => this,
@@ -725,6 +725,8 @@ struct VimCommand {
args: Option<
Box<dyn Fn(Box<dyn Action>, String) -> Option<Box<dyn Action>> + Send + Sync + 'static>,
>,
+ /// Optional range Range to use if no range is specified.
+ default_range: Option<CommandRange>,
range: Option<
Box<
dyn Fn(Box<dyn Action>, &CommandRange) -> Option<Box<dyn Action>>
@@ -793,6 +795,11 @@ impl VimCommand {
self
}
+ fn default_range(mut self, range: CommandRange) -> Self {
+ self.default_range = Some(range);
+ self
+ }
+
fn count(mut self) -> Self {
self.has_count = true;
self
@@ -923,6 +930,7 @@ impl VimCommand {
self.args.as_ref()?(action, args)?
};
+ let range = range.as_ref().or(self.default_range.as_ref());
if let Some(range) = range {
self.range.as_ref().and_then(|f| f(action, range))
} else {
@@ -1121,6 +1129,7 @@ impl CommandRange {
self.end.as_ref().unwrap_or(&self.start)
}
+ /// Convert the `CommandRange` into a `Range<MultiBufferRow>`.
pub(crate) fn buffer_range(
&self,
vim: &Vim,
@@ -1152,6 +1161,14 @@ impl CommandRange {
None
}
}
+
+ /// The `CommandRange` representing the entire buffer.
+ fn buffer() -> Self {
+ Self {
+ start: Position::Line { row: 1, offset: 0 },
+ end: Some(Position::LastLine { offset: 0 }),
+ }
+ }
}
fn generate_commands(_: &App) -> Vec<VimCommand> {
@@ -1421,8 +1438,12 @@ fn generate_commands(_: &App) -> Vec<VimCommand> {
VimCommand::new(("delm", "arks"), ArgumentRequired)
.bang(DeleteMarks::AllLocal)
.args(|_, args| Some(DeleteMarks::Marks(args).boxed_clone())),
- VimCommand::new(("sor", "t"), SortLinesCaseSensitive).range(select_range),
- VimCommand::new(("sort i", ""), SortLinesCaseInsensitive).range(select_range),
+ VimCommand::new(("sor", "t"), SortLinesCaseSensitive)
+ .range(select_range)
+ .default_range(CommandRange::buffer()),
+ VimCommand::new(("sort i", ""), SortLinesCaseInsensitive)
+ .range(select_range)
+ .default_range(CommandRange::buffer()),
VimCommand::str(("E", "xplore"), "project_panel::ToggleFocus"),
VimCommand::str(("H", "explore"), "project_panel::ToggleFocus"),
VimCommand::str(("L", "explore"), "project_panel::ToggleFocus"),
@@ -2898,4 +2919,112 @@ mod test {
);
});
}
+
+ #[gpui::test]
+ async fn test_sort_commands(cx: &mut TestAppContext) {
+ let mut cx = VimTestContext::new(cx, true).await;
+
+ cx.set_state(
+ indoc! {"
+ Β«hornet
+ quirrel
+ elderbug
+ cornifer
+ idaΛΒ»
+ "},
+ Mode::Visual,
+ );
+
+ cx.simulate_keystrokes(": sort");
+ cx.simulate_keystrokes("enter");
+
+ cx.assert_state(
+ indoc! {"
+ Λcornifer
+ elderbug
+ hornet
+ ida
+ quirrel
+ "},
+ Mode::Normal,
+ );
+
+ // Assert that, by default, `:sort` takes case into consideration.
+ cx.set_state(
+ indoc! {"
+ Β«hornet
+ quirrel
+ Elderbug
+ cornifer
+ idaΛΒ»
+ "},
+ Mode::Visual,
+ );
+
+ cx.simulate_keystrokes(": sort");
+ cx.simulate_keystrokes("enter");
+
+ cx.assert_state(
+ indoc! {"
+ ΛElderbug
+ cornifer
+ hornet
+ ida
+ quirrel
+ "},
+ Mode::Normal,
+ );
+
+ // Assert that, if the `i` option is passed, `:sort` ignores case.
+ cx.set_state(
+ indoc! {"
+ Β«hornet
+ quirrel
+ Elderbug
+ cornifer
+ idaΛΒ»
+ "},
+ Mode::Visual,
+ );
+
+ cx.simulate_keystrokes(": sort space i");
+ cx.simulate_keystrokes("enter");
+
+ cx.assert_state(
+ indoc! {"
+ Λcornifer
+ Elderbug
+ hornet
+ ida
+ quirrel
+ "},
+ Mode::Normal,
+ );
+
+ // When no range is provided, sorts the whole buffer.
+ cx.set_state(
+ indoc! {"
+ Λhornet
+ quirrel
+ elderbug
+ cornifer
+ ida
+ "},
+ Mode::Normal,
+ );
+
+ cx.simulate_keystrokes(": sort");
+ cx.simulate_keystrokes("enter");
+
+ cx.assert_state(
+ indoc! {"
+ Λcornifer
+ elderbug
+ hornet
+ ida
+ quirrel
+ "},
+ Mode::Normal,
+ );
+ }
}
@@ -76,17 +76,18 @@ impl Vim {
Point::new(row, snapshot.line_len(multi_buffer::MultiBufferRow(row)))
};
- let number_result = if !selection.is_empty() {
- find_number_in_range(&snapshot, start, end)
+ let find_result = if !selection.is_empty() {
+ find_target(&snapshot, start, end, true)
} else {
- find_number(&snapshot, start)
+ find_target(&snapshot, start, end, false)
};
- if let Some((range, num, radix)) = number_result {
+ if let Some((range, target, radix)) = find_result {
let replace = match radix {
- 10 => increment_decimal_string(&num, delta),
- 16 => increment_hex_string(&num, delta),
- 2 => increment_binary_string(&num, delta),
+ 10 => increment_decimal_string(&target, delta),
+ 16 => increment_hex_string(&target, delta),
+ 2 => increment_binary_string(&target, delta),
+ 0 => increment_toggle_string(&target),
_ => unreachable!(),
};
delta += step as i64;
@@ -94,13 +95,6 @@ impl Vim {
if selection.is_empty() {
new_anchors.push((false, snapshot.anchor_after(range.end)))
}
- } else if let Some((range, boolean)) = find_boolean(&snapshot, start) {
- let replace = toggle_boolean(&boolean);
- delta += step as i64;
- edits.push((range.clone(), replace));
- if selection.is_empty() {
- new_anchors.push((false, snapshot.anchor_after(range.end)))
- }
} else if selection.is_empty() {
new_anchors.push((true, snapshot.anchor_after(start)))
}
@@ -200,83 +194,127 @@ fn increment_binary_string(num: &str, delta: i64) -> String {
format!("{:0width$b}", result, width = num.len())
}
-fn find_number_in_range(
+fn find_target(
snapshot: &MultiBufferSnapshot,
start: Point,
end: Point,
+ need_range: bool,
) -> Option<(Range<Point>, String, u32)> {
let start_offset = start.to_offset(snapshot);
let end_offset = end.to_offset(snapshot);
let mut offset = start_offset;
+ let mut first_char_is_num = snapshot
+ .chars_at(offset)
+ .next()
+ .map_or(false, |ch| ch.is_ascii_hexdigit());
+ let mut pre_char = String::new();
// Backward scan to find the start of the number, but stop at start_offset
- for ch in snapshot.reversed_chars_at(offset) {
- if ch.is_ascii_hexdigit() || ch == '-' || ch == 'b' || ch == 'x' {
- if offset == 0 {
- break;
- }
- offset -= ch.len_utf8();
- if offset < start_offset {
- offset = start_offset;
+ for ch in snapshot.reversed_chars_at(offset + 1) {
+ // Search boundaries
+ if offset == 0 || ch.is_whitespace() || (need_range && offset <= start_offset) {
+ break;
+ }
+
+ // Avoid the influence of hexadecimal letters
+ if first_char_is_num
+ && !ch.is_ascii_hexdigit()
+ && (ch != 'b' && ch != 'B')
+ && (ch != 'x' && ch != 'X')
+ && ch != '-'
+ {
+ // Used to determine if the initial character is a number.
+ if is_numeric_string(&pre_char) {
break;
+ } else {
+ first_char_is_num = false;
}
- } else {
- break;
}
+
+ pre_char.insert(0, ch);
+ offset -= ch.len_utf8();
}
let mut begin = None;
- let mut end_num = None;
- let mut num = String::new();
+ let mut end = None;
+ let mut target = String::new();
let mut radix = 10;
+ let mut is_num = false;
let mut chars = snapshot.chars_at(offset).peekable();
while let Some(ch) = chars.next() {
- if offset >= end_offset {
+ if need_range && offset >= end_offset {
break; // stop at end of selection
}
- if num == "0" && ch == 'b' && chars.peek().is_some() && chars.peek().unwrap().is_digit(2) {
+ if target == "0"
+ && (ch == 'b' || ch == 'B')
+ && chars.peek().is_some()
+ && chars.peek().unwrap().is_digit(2)
+ {
radix = 2;
begin = None;
- num = String::new();
- } else if num == "0"
- && ch == 'x'
+ target = String::new();
+ } else if target == "0"
+ && (ch == 'x' || ch == 'X')
&& chars.peek().is_some()
&& chars.peek().unwrap().is_ascii_hexdigit()
{
radix = 16;
begin = None;
- num = String::new();
- }
-
- if ch.is_digit(radix)
- || (begin.is_none()
+ target = String::new();
+ } else if ch == '.' {
+ is_num = false;
+ begin = None;
+ target = String::new();
+ } else if ch.is_digit(radix)
+ || ((begin.is_none() || !is_num)
&& ch == '-'
&& chars.peek().is_some()
&& chars.peek().unwrap().is_digit(radix))
{
+ if !is_num {
+ is_num = true;
+ begin = Some(offset);
+ target = String::new();
+ } else if begin.is_none() {
+ begin = Some(offset);
+ }
+ target.push(ch);
+ } else if ch.is_ascii_alphabetic() && !is_num {
if begin.is_none() {
begin = Some(offset);
}
- num.push(ch);
- } else if begin.is_some() {
- end_num = Some(offset);
+ target.push(ch);
+ } else if begin.is_some() && (is_num || !is_num && is_toggle_word(&target)) {
+ // End of matching
+ end = Some(offset);
break;
} else if ch == '\n' {
break;
+ } else {
+ // To match the next word
+ is_num = false;
+ begin = None;
+ target = String::new();
}
offset += ch.len_utf8();
}
- if let Some(begin) = begin {
- let end_num = end_num.unwrap_or(offset);
+ if let Some(begin) = begin
+ && (is_num || !is_num && is_toggle_word(&target))
+ {
+ if !is_num {
+ radix = 0;
+ }
+
+ let end = end.unwrap_or(offset);
Some((
- begin.to_point(snapshot)..end_num.to_point(snapshot),
- num,
+ begin.to_point(snapshot)..end.to_point(snapshot),
+ target,
radix,
))
} else {
@@ -284,133 +322,38 @@ fn find_number_in_range(
}
}
-fn find_number(
- snapshot: &MultiBufferSnapshot,
- start: Point,
-) -> Option<(Range<Point>, String, u32)> {
- let mut offset = start.to_offset(snapshot);
-
- let ch0 = snapshot.chars_at(offset).next();
- if ch0.as_ref().is_some_and(char::is_ascii_hexdigit) || matches!(ch0, Some('-' | 'b' | 'x')) {
- // go backwards to the start of any number the selection is within
- for ch in snapshot.reversed_chars_at(offset) {
- if ch.is_ascii_hexdigit() || ch == '-' || ch == 'b' || ch == 'x' {
- offset -= ch.len_utf8();
- continue;
- }
- break;
- }
+fn is_numeric_string(s: &str) -> bool {
+ if s.is_empty() {
+ return false;
}
- let mut begin = None;
- let mut end = None;
- let mut num = String::new();
- let mut radix = 10;
-
- let mut chars = snapshot.chars_at(offset).peekable();
- // find the next number on the line (may start after the original cursor position)
- while let Some(ch) = chars.next() {
- if num == "0" && ch == 'b' && chars.peek().is_some() && chars.peek().unwrap().is_digit(2) {
- radix = 2;
- begin = None;
- num = String::new();
- }
- if num == "0"
- && ch == 'x'
- && chars.peek().is_some()
- && chars.peek().unwrap().is_ascii_hexdigit()
- {
- radix = 16;
- begin = None;
- num = String::new();
- }
+ let (_, rest) = if let Some(r) = s.strip_prefix('-') {
+ (true, r)
+ } else {
+ (false, s)
+ };
- if ch.is_digit(radix)
- || (begin.is_none()
- && ch == '-'
- && chars.peek().is_some()
- && chars.peek().unwrap().is_digit(radix))
- {
- if begin.is_none() {
- begin = Some(offset);
- }
- num.push(ch);
- } else if begin.is_some() {
- end = Some(offset);
- break;
- } else if ch == '\n' {
- break;
- }
- offset += ch.len_utf8();
+ if rest.is_empty() {
+ return false;
}
- if let Some(begin) = begin {
- let end = end.unwrap_or(offset);
- Some((begin.to_point(snapshot)..end.to_point(snapshot), num, radix))
+
+ if let Some(digits) = rest.strip_prefix("0b").or_else(|| rest.strip_prefix("0B")) {
+ digits.is_empty() || digits.chars().all(|c| c == '0' || c == '1')
+ } else if let Some(digits) = rest.strip_prefix("0x").or_else(|| rest.strip_prefix("0X")) {
+ digits.is_empty() || digits.chars().all(|c| c.is_ascii_hexdigit())
} else {
- None
+ !rest.is_empty() && rest.chars().all(|c| c.is_ascii_digit())
}
}
-fn find_boolean(snapshot: &MultiBufferSnapshot, start: Point) -> Option<(Range<Point>, String)> {
- let mut offset = start.to_offset(snapshot);
-
- let ch0 = snapshot.chars_at(offset).next();
- if ch0.as_ref().is_some_and(|c| c.is_ascii_alphabetic()) {
- for ch in snapshot.reversed_chars_at(offset) {
- if ch.is_ascii_alphabetic() {
- offset -= ch.len_utf8();
- continue;
- }
- break;
- }
- }
-
- let mut begin = None;
- let mut end = None;
- let mut word = String::new();
-
- let chars = snapshot.chars_at(offset);
-
- for ch in chars {
- if ch.is_ascii_alphabetic() {
- if begin.is_none() {
- begin = Some(offset);
- }
- word.push(ch);
- } else if begin.is_some() {
- end = Some(offset);
- let word_lower = word.to_lowercase();
- if BOOLEAN_PAIRS
- .iter()
- .any(|(a, b)| word_lower == *a || word_lower == *b)
- {
- return Some((
- begin.unwrap().to_point(snapshot)..end.unwrap().to_point(snapshot),
- word,
- ));
- }
- begin = None;
- end = None;
- word = String::new();
- } else if ch == '\n' {
- break;
- }
- offset += ch.len_utf8();
- }
- if let Some(begin) = begin {
- let end = end.unwrap_or(offset);
- let word_lower = word.to_lowercase();
- if BOOLEAN_PAIRS
- .iter()
- .any(|(a, b)| word_lower == *a || word_lower == *b)
- {
- return Some((begin.to_point(snapshot)..end.to_point(snapshot), word));
- }
- }
- None
+fn is_toggle_word(word: &str) -> bool {
+ let lower = word.to_lowercase();
+ BOOLEAN_PAIRS
+ .iter()
+ .any(|(a, b)| lower == *a || lower == *b)
}
-fn toggle_boolean(boolean: &str) -> String {
+fn increment_toggle_string(boolean: &str) -> String {
let lower = boolean.to_lowercase();
let target = BOOLEAN_PAIRS
@@ -802,7 +745,7 @@ mod test {
}
#[gpui::test]
- async fn test_toggle_boolean(cx: &mut gpui::TestAppContext) {
+ async fn test_increment_toggle(cx: &mut gpui::TestAppContext) {
let mut cx = VimTestContext::new(cx, true).await;
cx.set_state("let enabled = trΛue;", Mode::Normal);
@@ -860,6 +803,23 @@ mod test {
cx.assert_state("let enabled = ΛOff;", Mode::Normal);
}
+ #[gpui::test]
+ async fn test_increment_order(cx: &mut gpui::TestAppContext) {
+ let mut cx = VimTestContext::new(cx, true).await;
+
+ cx.set_state("aaΛa false 1 2 3", Mode::Normal);
+ cx.simulate_keystrokes("ctrl-a");
+ cx.assert_state("aaa truΛe 1 2 3", Mode::Normal);
+
+ cx.set_state("aaΛa 1 false 2 3", Mode::Normal);
+ cx.simulate_keystrokes("ctrl-a");
+ cx.assert_state("aaa Λ2 false 2 3", Mode::Normal);
+
+ cx.set_state("trueΛ 1 2 3", Mode::Normal);
+ cx.simulate_keystrokes("ctrl-a");
+ cx.assert_state("true Λ2 2 3", Mode::Normal);
+ }
+
#[gpui::test]
async fn test_increment_visual_partial_number(cx: &mut gpui::TestAppContext) {
let mut cx = NeovimBackedTestContext::new(cx).await;
@@ -166,6 +166,7 @@ pub trait SearchableItem: Item + EventEmitter<SearchEvent> {
window: &mut Window,
cx: &mut Context<Self>,
) -> Option<usize>;
+ fn set_search_is_case_sensitive(&mut self, _: Option<bool>, _: &mut Context<Self>) {}
}
pub trait SearchableItemHandle: ItemHandle {
@@ -234,6 +235,8 @@ pub trait SearchableItemHandle: ItemHandle {
window: &mut Window,
cx: &mut App,
);
+
+ fn set_search_is_case_sensitive(&self, is_case_sensitive: Option<bool>, cx: &mut App);
}
impl<T: SearchableItem> SearchableItemHandle for Entity<T> {
@@ -390,6 +393,11 @@ impl<T: SearchableItem> SearchableItemHandle for Entity<T> {
this.toggle_filtered_search_ranges(enabled, window, cx)
});
}
+ fn set_search_is_case_sensitive(&self, enabled: Option<bool>, cx: &mut App) {
+ self.update(cx, |this, cx| {
+ this.set_search_is_case_sensitive(enabled, cx)
+ });
+ }
}
impl From<Box<dyn SearchableItemHandle>> for AnyView {
@@ -2352,8 +2352,8 @@ impl Snapshot {
self.entries_by_path.first()
}
- /// TODO: what's the difference between `root_dir` and `abs_path`?
- /// is there any? if so, document it.
+ /// Returns `None` for a single file worktree, or `Some(self.abs_path())` if
+ /// it is a directory.
pub fn root_dir(&self) -> Option<Arc<Path>> {
self.root_entry()
.filter(|entry| entry.is_dir())
@@ -15,7 +15,7 @@ use extension::ExtensionHostProxy;
use fs::{Fs, RealFs};
use futures::{StreamExt, channel::oneshot, future};
use git::GitHostingProviderRegistry;
-use gpui::{App, AppContext, Application, AsyncApp, Focusable as _, UpdateGlobal as _};
+use gpui::{App, AppContext, Application, AsyncApp, Focusable as _, QuitMode, UpdateGlobal as _};
use gpui_tokio::Tokio;
use language::LanguageRegistry;
@@ -87,31 +87,33 @@ fn files_not_created_on_launch(errors: HashMap<io::ErrorKind, Vec<&Path>>) {
.collect::<Vec<_>>().join("\n\n");
eprintln!("{message}: {error_details}");
- Application::new().run(move |cx| {
- if let Ok(window) = cx.open_window(gpui::WindowOptions::default(), |_, cx| {
- cx.new(|_| gpui::Empty)
- }) {
- window
- .update(cx, |_, window, cx| {
- let response = window.prompt(
- gpui::PromptLevel::Critical,
- message,
- Some(&error_details),
- &["Exit"],
- cx,
- );
-
- cx.spawn_in(window, async move |_, cx| {
- response.await?;
- cx.update(|_, cx| cx.quit())
+ Application::new()
+ .with_quit_mode(QuitMode::Explicit)
+ .run(move |cx| {
+ if let Ok(window) = cx.open_window(gpui::WindowOptions::default(), |_, cx| {
+ cx.new(|_| gpui::Empty)
+ }) {
+ window
+ .update(cx, |_, window, cx| {
+ let response = window.prompt(
+ gpui::PromptLevel::Critical,
+ message,
+ Some(&error_details),
+ &["Exit"],
+ cx,
+ );
+
+ cx.spawn_in(window, async move |_, cx| {
+ response.await?;
+ cx.update(|_, cx| cx.quit())
+ })
+ .detach_and_log_err(cx);
})
- .detach_and_log_err(cx);
- })
- .log_err();
- } else {
- fail_to_open_window(anyhow::anyhow!("{message}: {error_details}"), cx)
- }
- })
+ .log_err();
+ } else {
+ fail_to_open_window(anyhow::anyhow!("{message}: {error_details}"), cx)
+ }
+ })
}
fn fail_to_open_window_async(e: anyhow::Error, cx: &mut AsyncApp) {
@@ -537,7 +539,7 @@ pub fn main() {
});
AppState::set_global(Arc::downgrade(&app_state), cx);
- auto_update::init(client.http_client(), cx);
+ auto_update::init(client.clone(), cx);
dap_adapters::init(cx);
auto_update_ui::init(cx);
reliability::init(
@@ -274,16 +274,27 @@ pub fn init(cx: &mut App) {
}
fn bind_on_window_closed(cx: &mut App) -> Option<gpui::Subscription> {
- WorkspaceSettings::get_global(cx)
- .on_last_window_closed
- .is_quit_app()
- .then(|| {
- cx.on_window_closed(|cx| {
- if cx.windows().is_empty() {
- cx.quit();
- }
+ #[cfg(target_os = "macos")]
+ {
+ WorkspaceSettings::get_global(cx)
+ .on_last_window_closed
+ .is_quit_app()
+ .then(|| {
+ cx.on_window_closed(|cx| {
+ if cx.windows().is_empty() {
+ cx.quit();
+ }
+ })
})
- })
+ }
+ #[cfg(not(target_os = "macos"))]
+ {
+ Some(cx.on_window_closed(|cx| {
+ if cx.windows().is_empty() {
+ cx.quit();
+ }
+ }))
+ }
}
pub fn build_window_options(display_uuid: Option<Uuid>, cx: &mut App) -> WindowOptions {
@@ -11,6 +11,9 @@ workspace = true
[lib]
path = "src/zeta2.rs"
+[features]
+llm-response-cache = []
+
[dependencies]
anyhow.workspace = true
arrayvec.workspace = true
@@ -0,0 +1,197 @@
+use anyhow::{Context as _, Result, anyhow};
+use language::{Anchor, BufferSnapshot, OffsetRangeExt as _, TextBufferSnapshot};
+use std::ops::Range;
+use std::path::Path;
+use std::sync::Arc;
+
+pub async fn parse_xml_edits<'a>(
+ mut input: &'a str,
+ get_buffer: impl Fn(&Path) -> Option<(&'a BufferSnapshot, &'a [Range<Anchor>])> + Send,
+) -> Result<(&'a BufferSnapshot, Vec<(Range<Anchor>, Arc<str>)>)> {
+ let edits_tag = parse_tag(&mut input, "edits")?.context("No edits tag")?;
+
+ input = edits_tag.body;
+
+ let file_path = edits_tag
+ .attributes
+ .trim_start()
+ .strip_prefix("path")
+ .context("no file attribute on edits tag")?
+ .trim_end()
+ .strip_prefix('=')
+ .context("no value for path attribute")?
+ .trim()
+ .trim_start_matches('"')
+ .trim_end_matches('"');
+
+ let (buffer, context_ranges) = get_buffer(file_path.as_ref())
+ .with_context(|| format!("no buffer for file {file_path}"))?;
+
+ let mut edits = vec![];
+ while let Some(old_text_tag) = parse_tag(&mut input, "old_text")? {
+ let new_text_tag =
+ parse_tag(&mut input, "new_text")?.context("no new_text tag following old_text")?;
+ edits.extend(resolve_new_text_old_text_in_buffer(
+ new_text_tag.body,
+ old_text_tag.body,
+ buffer,
+ context_ranges,
+ )?);
+ }
+
+ Ok((buffer, edits))
+}
+
+fn resolve_new_text_old_text_in_buffer(
+ new_text: &str,
+ old_text: &str,
+ buffer: &TextBufferSnapshot,
+ ranges: &[Range<Anchor>],
+) -> Result<impl Iterator<Item = (Range<Anchor>, Arc<str>)>, anyhow::Error> {
+ let context_offset = if old_text.is_empty() {
+ Ok(0)
+ } else {
+ let mut offset = None;
+ for range in ranges {
+ let range = range.to_offset(buffer);
+ let text = buffer.text_for_range(range.clone()).collect::<String>();
+ for (match_offset, _) in text.match_indices(old_text) {
+ if offset.is_some() {
+ anyhow::bail!("old_text is not unique enough:\n{}", old_text);
+ }
+ offset = Some(range.start + match_offset);
+ }
+ }
+ offset.ok_or_else(|| anyhow!("Failed to match old_text:\n{}", old_text))
+ }?;
+
+ let edits_within_hunk = language::text_diff(&old_text, &new_text);
+ Ok(edits_within_hunk
+ .into_iter()
+ .map(move |(inner_range, inner_text)| {
+ (
+ buffer.anchor_after(context_offset + inner_range.start)
+ ..buffer.anchor_before(context_offset + inner_range.end),
+ inner_text,
+ )
+ }))
+}
+
+struct ParsedTag<'a> {
+ attributes: &'a str,
+ body: &'a str,
+}
+
+fn parse_tag<'a>(input: &mut &'a str, tag: &str) -> Result<Option<ParsedTag<'a>>> {
+ let open_tag = format!("<{}", tag);
+ let close_tag = format!("</{}>", tag);
+ let Some(start_ix) = input.find(&open_tag) else {
+ return Ok(None);
+ };
+ let start_ix = start_ix + open_tag.len();
+ let closing_bracket_ix = start_ix
+ + input[start_ix..]
+ .find('>')
+ .with_context(|| format!("missing > after {tag}"))?;
+ let attributes = &input[start_ix..closing_bracket_ix].trim();
+ let end_ix = closing_bracket_ix
+ + input[closing_bracket_ix..]
+ .find(&close_tag)
+ .with_context(|| format!("no `{close_tag}` tag"))?;
+ let body = &input[closing_bracket_ix + '>'.len_utf8()..end_ix];
+ let body = body.strip_prefix('\n').unwrap_or(body);
+ *input = &input[end_ix + close_tag.len()..];
+ Ok(Some(ParsedTag { attributes, body }))
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use gpui::TestAppContext;
+ use indoc::indoc;
+ use language::Point;
+ use project::{FakeFs, Project};
+ use serde_json::json;
+ use settings::SettingsStore;
+ use util::path;
+
+ #[test]
+ fn test_parse_tags() {
+ let mut input = indoc! {r#"
+ Prelude
+ <tag attr="foo">
+ tag value
+ </tag>
+ "# };
+ let parsed = parse_tag(&mut input, "tag").unwrap().unwrap();
+ assert_eq!(parsed.attributes, "attr=\"foo\"");
+ assert_eq!(parsed.body, "tag value\n");
+ assert_eq!(input, "\n");
+ }
+
+ #[gpui::test]
+ async fn test_parse_xml_edits(cx: &mut TestAppContext) {
+ let fs = init_test(cx);
+
+ let buffer_1_text = indoc! {r#"
+ one two three four
+ five six seven eight
+ nine ten eleven twelve
+ "# };
+
+ fs.insert_tree(
+ path!("/root"),
+ json!({
+ "file1": buffer_1_text,
+ }),
+ )
+ .await;
+
+ let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
+ let buffer = project
+ .update(cx, |project, cx| {
+ project.open_local_buffer(path!("/root/file1"), cx)
+ })
+ .await
+ .unwrap();
+ let buffer_snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
+
+ let edits = indoc! {r#"
+ <edits path="root/file1">
+ <old_text>
+ five six seven eight
+ </old_text>
+ <new_text>
+ five SIX seven eight!
+ </new_text>
+ </edits>
+ "#};
+
+ let (buffer, edits) = parse_xml_edits(edits, |_path| {
+ Some((&buffer_snapshot, &[(Anchor::MIN..Anchor::MAX)] as &[_]))
+ })
+ .await
+ .unwrap();
+
+ let edits = edits
+ .into_iter()
+ .map(|(range, text)| (range.to_point(&buffer), text))
+ .collect::<Vec<_>>();
+ assert_eq!(
+ edits,
+ &[
+ (Point::new(1, 5)..Point::new(1, 8), "SIX".into()),
+ (Point::new(1, 20)..Point::new(1, 20), "!".into())
+ ]
+ );
+ }
+
+ fn init_test(cx: &mut TestAppContext) -> Arc<FakeFs> {
+ cx.update(|cx| {
+ let settings_store = SettingsStore::test(cx);
+ cx.set_global(settings_store);
+ });
+
+ FakeFs::new(cx.background_executor.clone())
+ }
+}
@@ -1,4 +1,4 @@
-use anyhow::{Context as _, Result, anyhow};
+use anyhow::{Context as _, Result, anyhow, bail};
use chrono::TimeDelta;
use client::{Client, EditPredictionUsage, UserStore};
use cloud_llm_client::predict_edits_v3::{self, PromptFormat, Signature};
@@ -6,8 +6,8 @@ use cloud_llm_client::{
AcceptEditPredictionBody, EXPIRED_LLM_TOKEN_HEADER_NAME, MINIMUM_REQUIRED_VERSION_HEADER_NAME,
ZED_VERSION_HEADER_NAME,
};
-use cloud_zeta2_prompt::DEFAULT_MAX_PROMPT_BYTES;
use cloud_zeta2_prompt::retrieval_prompt::{SearchToolInput, SearchToolQuery};
+use cloud_zeta2_prompt::{CURSOR_MARKER, DEFAULT_MAX_PROMPT_BYTES};
use collections::HashMap;
use edit_prediction_context::{
DeclarationId, DeclarationStyle, EditPredictionContext, EditPredictionContextOptions,
@@ -47,6 +47,7 @@ mod prediction;
mod provider;
pub mod retrieval_search;
pub mod udiff;
+mod xml_edits;
use crate::merge_excerpts::merge_excerpts;
use crate::prediction::EditPrediction;
@@ -131,6 +132,15 @@ pub struct Zeta {
options: ZetaOptions,
update_required: bool,
debug_tx: Option<mpsc::UnboundedSender<ZetaDebugInfo>>,
+ #[cfg(feature = "llm-response-cache")]
+ llm_response_cache: Option<Arc<dyn LlmResponseCache>>,
+}
+
+#[cfg(feature = "llm-response-cache")]
+pub trait LlmResponseCache: Send + Sync {
+ fn get_key(&self, url: &gpui::http_client::Url, body: &str) -> u64;
+ fn read_response(&self, key: u64) -> Option<String>;
+ fn write_response(&self, key: u64, value: &str);
}
#[derive(Debug, Clone, PartialEq)]
@@ -359,9 +369,16 @@ impl Zeta {
),
update_required: false,
debug_tx: None,
+ #[cfg(feature = "llm-response-cache")]
+ llm_response_cache: None,
}
}
+ #[cfg(feature = "llm-response-cache")]
+ pub fn with_llm_response_cache(&mut self, cache: Arc<dyn LlmResponseCache>) {
+ self.llm_response_cache = Some(cache);
+ }
+
pub fn debug_info(&mut self) -> mpsc::UnboundedReceiver<ZetaDebugInfo> {
let (debug_watch_tx, debug_watch_rx) = mpsc::unbounded();
self.debug_tx = Some(debug_watch_tx);
@@ -734,6 +751,9 @@ impl Zeta {
})
.collect::<Vec<_>>();
+ #[cfg(feature = "llm-response-cache")]
+ let llm_response_cache = self.llm_response_cache.clone();
+
let request_task = cx.background_spawn({
let active_buffer = active_buffer.clone();
async move {
@@ -923,8 +943,15 @@ impl Zeta {
log::trace!("Sending edit prediction request");
let before_request = chrono::Utc::now();
- let response =
- Self::send_raw_llm_request(client, llm_token, app_version, request).await;
+ let response = Self::send_raw_llm_request(
+ request,
+ client,
+ llm_token,
+ app_version,
+ #[cfg(feature = "llm-response-cache")]
+ llm_response_cache,
+ )
+ .await;
let request_time = chrono::Utc::now() - before_request;
log::trace!("Got edit prediction response");
@@ -943,23 +970,39 @@ impl Zeta {
let (res, usage) = response?;
let request_id = EditPredictionId(res.id.clone().into());
- let Some(output_text) = text_from_response(res) else {
- return Ok((None, usage))
+ let Some(mut output_text) = text_from_response(res) else {
+ return Ok((None, usage));
};
- let (edited_buffer_snapshot, edits) =
- crate::udiff::parse_diff(&output_text, |path| {
- included_files
- .iter()
- .find_map(|(_, buffer, probe_path, ranges)| {
- if probe_path.as_ref() == path {
- Some((buffer, ranges.as_slice()))
- } else {
- None
- }
- })
- })
- .await?;
+ if output_text.contains(CURSOR_MARKER) {
+ log::trace!("Stripping out {CURSOR_MARKER} from response");
+ output_text = output_text.replace(CURSOR_MARKER, "");
+ }
+
+ let get_buffer_from_context = |path: &Path| {
+ included_files
+ .iter()
+ .find_map(|(_, buffer, probe_path, ranges)| {
+ if probe_path.as_ref() == path {
+ Some((buffer, ranges.as_slice()))
+ } else {
+ None
+ }
+ })
+ };
+
+ let (edited_buffer_snapshot, edits) = match options.prompt_format {
+ PromptFormat::NumLinesUniDiff => {
+ crate::udiff::parse_diff(&output_text, get_buffer_from_context).await?
+ }
+ PromptFormat::OldTextNewText => {
+ crate::xml_edits::parse_xml_edits(&output_text, get_buffer_from_context)
+ .await?
+ }
+ _ => {
+ bail!("unsupported prompt format {}", options.prompt_format)
+ }
+ };
let edited_buffer = included_files
.iter()
@@ -970,9 +1013,17 @@ impl Zeta {
None
}
})
- .context("Failed to find buffer in included_buffers, even though we just found the snapshot")?;
-
- anyhow::Ok((Some((request_id, edited_buffer, edited_buffer_snapshot.clone(), edits)), usage))
+ .context("Failed to find buffer in included_buffers")?;
+
+ anyhow::Ok((
+ Some((
+ request_id,
+ edited_buffer,
+ edited_buffer_snapshot.clone(),
+ edits,
+ )),
+ usage,
+ ))
}
});
@@ -994,10 +1045,13 @@ impl Zeta {
}
async fn send_raw_llm_request(
+ request: open_ai::Request,
client: Arc<Client>,
llm_token: LlmApiToken,
app_version: SemanticVersion,
- request: open_ai::Request,
+ #[cfg(feature = "llm-response-cache")] llm_response_cache: Option<
+ Arc<dyn LlmResponseCache>,
+ >,
) -> Result<(open_ai::Response, Option<EditPredictionUsage>)> {
let url = if let Some(predict_edits_url) = PREDICT_EDITS_URL.as_ref() {
http_client::Url::parse(&predict_edits_url)?
@@ -1007,7 +1061,21 @@ impl Zeta {
.build_zed_llm_url("/predict_edits/raw", &[])?
};
- Self::send_api_request(
+ #[cfg(feature = "llm-response-cache")]
+ let cache_key = if let Some(cache) = llm_response_cache {
+ let request_json = serde_json::to_string(&request)?;
+ let key = cache.get_key(&url, &request_json);
+
+ if let Some(response_str) = cache.read_response(key) {
+ return Ok((serde_json::from_str(&response_str)?, None));
+ }
+
+ Some((cache, key))
+ } else {
+ None
+ };
+
+ let (response, usage) = Self::send_api_request(
|builder| {
let req = builder
.uri(url.as_ref())
@@ -1018,7 +1086,14 @@ impl Zeta {
llm_token,
app_version,
)
- .await
+ .await?;
+
+ #[cfg(feature = "llm-response-cache")]
+ if let Some((cache, key)) = cache_key {
+ cache.write_response(key, &serde_json::to_string(&response)?);
+ }
+
+ Ok((response, usage))
}
fn handle_api_response<T>(
@@ -1286,10 +1361,20 @@ impl Zeta {
reasoning_effort: None,
};
+ #[cfg(feature = "llm-response-cache")]
+ let llm_response_cache = self.llm_response_cache.clone();
+
cx.spawn(async move |this, cx| {
log::trace!("Sending search planning request");
- let response =
- Self::send_raw_llm_request(client, llm_token, app_version, request).await;
+ let response = Self::send_raw_llm_request(
+ request,
+ client,
+ llm_token,
+ app_version,
+ #[cfg(feature = "llm-response-cache")]
+ llm_response_cache,
+ )
+ .await;
let mut response = Self::handle_api_response(&this, response, cx)?;
log::trace!("Got search planning response");
@@ -1317,7 +1402,8 @@ impl Zeta {
continue;
}
- let input: SearchToolInput = serde_json::from_str(&function.arguments)?;
+ let input: SearchToolInput = serde_json::from_str(&function.arguments)
+ .with_context(|| format!("invalid search json {}", &function.arguments))?;
queries.extend(input.queries);
}
@@ -1377,6 +1463,16 @@ impl Zeta {
})
}
+ pub fn set_context(
+ &mut self,
+ project: Entity<Project>,
+ context: HashMap<Entity<Buffer>, Vec<Range<Anchor>>>,
+ ) {
+ if let Some(zeta_project) = self.projects.get_mut(&project.entity_id()) {
+ zeta_project.context = Some(context);
+ }
+ }
+
fn gather_nearby_diagnostics(
cursor_offset: usize,
diagnostic_sets: &[(LanguageServerId, DiagnosticSet)],
@@ -54,7 +54,7 @@ toml.workspace = true
util.workspace = true
watch.workspace = true
zeta.workspace = true
-zeta2.workspace = true
+zeta2 = { workspace = true, features = ["llm-response-cache"] }
zlog.workspace = true
[dev-dependencies]
@@ -1,5 +1,4 @@
use std::{
- fs,
io::IsTerminal,
path::{Path, PathBuf},
sync::Arc,
@@ -12,9 +11,9 @@ use gpui::AsyncApp;
use zeta2::udiff::DiffLine;
use crate::{
+ PromptFormat,
example::{Example, NamedExample},
headless::ZetaCliAppState,
- paths::CACHE_DIR,
predict::{PredictionDetails, zeta2_predict},
};
@@ -22,7 +21,11 @@ use crate::{
pub struct EvaluateArguments {
example_paths: Vec<PathBuf>,
#[clap(long)]
- re_run: bool,
+ skip_cache: bool,
+ #[arg(long, value_enum, default_value_t = PromptFormat::default())]
+ prompt_format: PromptFormat,
+ #[arg(long)]
+ use_expected_context: bool,
}
pub async fn run_evaluate(
@@ -33,7 +36,17 @@ pub async fn run_evaluate(
let example_len = args.example_paths.len();
let all_tasks = args.example_paths.into_iter().map(|path| {
let app_state = app_state.clone();
- cx.spawn(async move |cx| run_evaluate_one(&path, args.re_run, app_state.clone(), cx).await)
+ cx.spawn(async move |cx| {
+ run_evaluate_one(
+ &path,
+ args.skip_cache,
+ args.prompt_format,
+ args.use_expected_context,
+ app_state.clone(),
+ cx,
+ )
+ .await
+ })
});
let all_results = futures::future::try_join_all(all_tasks).await.unwrap();
@@ -51,35 +64,23 @@ pub async fn run_evaluate(
pub async fn run_evaluate_one(
example_path: &Path,
- re_run: bool,
+ skip_cache: bool,
+ prompt_format: PromptFormat,
+ use_expected_context: bool,
app_state: Arc<ZetaCliAppState>,
cx: &mut AsyncApp,
) -> Result<EvaluationResult> {
let example = NamedExample::load(&example_path).unwrap();
- let example_cache_path = CACHE_DIR.join(&example_path.file_name().unwrap());
-
- let predictions = if !re_run && example_cache_path.exists() {
- let file_contents = fs::read_to_string(&example_cache_path)?;
- let as_json = serde_json::from_str::<PredictionDetails>(&file_contents)?;
- log::debug!(
- "Loaded predictions from cache: {}",
- example_cache_path.display()
- );
- as_json
- } else {
- zeta2_predict(example.clone(), &app_state, cx)
- .await
- .unwrap()
- };
-
- if !example_cache_path.exists() {
- fs::create_dir_all(&*CACHE_DIR).unwrap();
- fs::write(
- example_cache_path,
- serde_json::to_string(&predictions).unwrap(),
- )
- .unwrap();
- }
+ let predictions = zeta2_predict(
+ example.clone(),
+ skip_cache,
+ prompt_format,
+ use_expected_context,
+ &app_state,
+ cx,
+ )
+ .await
+ .unwrap();
let evaluation_result = evaluate(&example.example, &predictions);
@@ -158,19 +158,20 @@ fn syntax_args_to_options(
}),
max_diagnostic_bytes: zeta2_args.max_diagnostic_bytes,
max_prompt_bytes: zeta2_args.max_prompt_bytes,
- prompt_format: zeta2_args.prompt_format.clone().into(),
+ prompt_format: zeta2_args.prompt_format.into(),
file_indexing_parallelism: zeta2_args.file_indexing_parallelism,
buffer_change_grouping_interval: Duration::ZERO,
}
}
-#[derive(clap::ValueEnum, Default, Debug, Clone)]
+#[derive(clap::ValueEnum, Default, Debug, Clone, Copy)]
enum PromptFormat {
MarkedExcerpt,
LabeledSections,
OnlySnippets,
#[default]
NumberedLines,
+ OldTextNewText,
}
impl Into<predict_edits_v3::PromptFormat> for PromptFormat {
@@ -180,6 +181,7 @@ impl Into<predict_edits_v3::PromptFormat> for PromptFormat {
Self::LabeledSections => predict_edits_v3::PromptFormat::LabeledSections,
Self::OnlySnippets => predict_edits_v3::PromptFormat::OnlySnippets,
Self::NumberedLines => predict_edits_v3::PromptFormat::NumLinesUniDiff,
+ Self::OldTextNewText => predict_edits_v3::PromptFormat::OldTextNewText,
}
}
}
@@ -2,7 +2,15 @@ use std::{env, path::PathBuf, sync::LazyLock};
static TARGET_DIR: LazyLock<PathBuf> = LazyLock::new(|| env::current_dir().unwrap().join("target"));
pub static CACHE_DIR: LazyLock<PathBuf> =
- LazyLock::new(|| TARGET_DIR.join("zeta-prediction-cache"));
+ LazyLock::new(|| TARGET_DIR.join("zeta-llm-response-cache"));
pub static REPOS_DIR: LazyLock<PathBuf> = LazyLock::new(|| TARGET_DIR.join("zeta-repos"));
pub static WORKTREES_DIR: LazyLock<PathBuf> = LazyLock::new(|| TARGET_DIR.join("zeta-worktrees"));
pub static LOGS_DIR: LazyLock<PathBuf> = LazyLock::new(|| TARGET_DIR.join("zeta-logs"));
+pub static LOGS_SEARCH_PROMPT: LazyLock<PathBuf> =
+ LazyLock::new(|| LOGS_DIR.join("search_prompt.md"));
+pub static LOGS_SEARCH_QUERIES: LazyLock<PathBuf> =
+ LazyLock::new(|| LOGS_DIR.join("search_queries.json"));
+pub static LOGS_PREDICTION_PROMPT: LazyLock<PathBuf> =
+ LazyLock::new(|| LOGS_DIR.join("prediction_prompt.md"));
+pub static LOGS_PREDICTION_RESPONSE: LazyLock<PathBuf> =
+ LazyLock::new(|| LOGS_DIR.join("prediction_response.md"));
@@ -1,27 +1,43 @@
-use crate::example::{ActualExcerpt, NamedExample};
+use crate::PromptFormat;
+use crate::example::{ActualExcerpt, ExpectedExcerpt, NamedExample};
use crate::headless::ZetaCliAppState;
-use crate::paths::LOGS_DIR;
+use crate::paths::{
+ CACHE_DIR, LOGS_DIR, LOGS_PREDICTION_PROMPT, LOGS_PREDICTION_RESPONSE, LOGS_SEARCH_PROMPT,
+ LOGS_SEARCH_QUERIES,
+};
use ::serde::Serialize;
use anyhow::{Result, anyhow};
use clap::Args;
+use collections::HashMap;
+use gpui::http_client::Url;
+use language::{Anchor, Buffer, Point};
+// use cloud_llm_client::predict_edits_v3::PromptFormat;
use cloud_zeta2_prompt::{CURSOR_MARKER, write_codeblock};
use futures::StreamExt as _;
-use gpui::{AppContext, AsyncApp};
+use gpui::{AppContext, AsyncApp, Entity};
use project::Project;
use serde::Deserialize;
use std::cell::Cell;
use std::fs;
use std::io::Write;
+use std::ops::Range;
use std::path::PathBuf;
use std::sync::Arc;
use std::sync::Mutex;
use std::time::{Duration, Instant};
+use zeta2::LlmResponseCache;
#[derive(Debug, Args)]
pub struct PredictArguments {
- example_path: PathBuf,
+ #[arg(long, value_enum, default_value_t = PromptFormat::default())]
+ prompt_format: PromptFormat,
+ #[arg(long)]
+ use_expected_context: bool,
#[clap(long, short, value_enum, default_value_t = PredictionsOutputFormat::Md)]
format: PredictionsOutputFormat,
+ example_path: PathBuf,
+ #[clap(long)]
+ skip_cache: bool,
}
#[derive(clap::ValueEnum, Debug, Clone)]
@@ -30,14 +46,33 @@ pub enum PredictionsOutputFormat {
Md,
Diff,
}
+
pub async fn run_zeta2_predict(
args: PredictArguments,
app_state: &Arc<ZetaCliAppState>,
cx: &mut AsyncApp,
) {
let example = NamedExample::load(args.example_path).unwrap();
- let result = zeta2_predict(example, &app_state, cx).await.unwrap();
+ let result = zeta2_predict(
+ example,
+ args.skip_cache,
+ args.prompt_format,
+ args.use_expected_context,
+ &app_state,
+ cx,
+ )
+ .await
+ .unwrap();
result.write(args.format, std::io::stdout()).unwrap();
+
+ println!("## Logs\n");
+ println!("Search prompt: {}", LOGS_SEARCH_PROMPT.display());
+ println!("Search queries: {}", LOGS_SEARCH_QUERIES.display());
+ println!("Prediction prompt: {}", LOGS_PREDICTION_PROMPT.display());
+ println!(
+ "Prediction response: {}",
+ LOGS_PREDICTION_RESPONSE.display()
+ );
}
thread_local! {
@@ -46,6 +81,9 @@ thread_local! {
pub async fn zeta2_predict(
example: NamedExample,
+ skip_cache: bool,
+ prompt_format: PromptFormat,
+ use_expected_context: bool,
app_state: &Arc<ZetaCliAppState>,
cx: &mut AsyncApp,
) -> Result<PredictionDetails> {
@@ -88,6 +126,10 @@ pub async fn zeta2_predict(
let zeta = cx.update(|cx| zeta2::Zeta::global(&app_state.client, &app_state.user_store, cx))?;
+ zeta.update(cx, |zeta, _cx| {
+ zeta.with_llm_response_cache(Arc::new(Cache { skip_cache }));
+ })?;
+
cx.subscribe(&buffer_store, {
let project = project.clone();
move |_, event, cx| match event {
@@ -110,33 +152,31 @@ pub async fn zeta2_predict(
let debug_task = cx.background_spawn({
let result = result.clone();
async move {
- let mut context_retrieval_started_at = None;
- let mut context_retrieval_finished_at = None;
+ let mut start_time = None;
let mut search_queries_generated_at = None;
let mut search_queries_executed_at = None;
while let Some(event) = debug_rx.next().await {
match event {
zeta2::ZetaDebugInfo::ContextRetrievalStarted(info) => {
- context_retrieval_started_at = Some(info.timestamp);
- fs::write(LOGS_DIR.join("search_prompt.md"), &info.search_prompt)?;
+ start_time = Some(info.timestamp);
+ fs::write(&*LOGS_SEARCH_PROMPT, &info.search_prompt)?;
}
zeta2::ZetaDebugInfo::SearchQueriesGenerated(info) => {
search_queries_generated_at = Some(info.timestamp);
fs::write(
- LOGS_DIR.join("search_queries.json"),
+ &*LOGS_SEARCH_QUERIES,
serde_json::to_string_pretty(&info.search_queries).unwrap(),
)?;
}
zeta2::ZetaDebugInfo::SearchQueriesExecuted(info) => {
search_queries_executed_at = Some(info.timestamp);
}
- zeta2::ZetaDebugInfo::ContextRetrievalFinished(info) => {
- context_retrieval_finished_at = Some(info.timestamp);
- }
+ zeta2::ZetaDebugInfo::ContextRetrievalFinished(_info) => {}
zeta2::ZetaDebugInfo::EditPredictionRequested(request) => {
let prediction_started_at = Instant::now();
+ start_time.get_or_insert(prediction_started_at);
fs::write(
- LOGS_DIR.join("prediction_prompt.md"),
+ &*LOGS_PREDICTION_PROMPT,
&request.local_prompt.unwrap_or_default(),
)?;
@@ -170,19 +210,20 @@ pub async fn zeta2_predict(
let response = request.response_rx.await?.0.map_err(|err| anyhow!(err))?;
let response = zeta2::text_from_response(response).unwrap_or_default();
let prediction_finished_at = Instant::now();
- fs::write(LOGS_DIR.join("prediction_response.md"), &response)?;
+ fs::write(&*LOGS_PREDICTION_RESPONSE, &response)?;
let mut result = result.lock().unwrap();
- result.planning_search_time = search_queries_generated_at.unwrap()
- - context_retrieval_started_at.unwrap();
- result.running_search_time = search_queries_executed_at.unwrap()
- - search_queries_generated_at.unwrap();
- result.filtering_search_time = context_retrieval_finished_at.unwrap()
- - search_queries_executed_at.unwrap();
+ if !use_expected_context {
+ result.planning_search_time =
+ Some(search_queries_generated_at.unwrap() - start_time.unwrap());
+ result.running_search_time = Some(
+ search_queries_executed_at.unwrap()
+ - search_queries_generated_at.unwrap(),
+ );
+ }
result.prediction_time = prediction_finished_at - prediction_started_at;
- result.total_time =
- prediction_finished_at - context_retrieval_started_at.unwrap();
+ result.total_time = prediction_finished_at - start_time.unwrap();
break;
}
@@ -192,10 +233,42 @@ pub async fn zeta2_predict(
}
});
- zeta.update(cx, |zeta, cx| {
- zeta.refresh_context(project.clone(), cursor_buffer.clone(), cursor_anchor, cx)
- })?
- .await?;
+ zeta.update(cx, |zeta, _cx| {
+ let mut options = zeta.options().clone();
+ options.prompt_format = prompt_format.into();
+ zeta.set_options(options);
+ })?;
+
+ if use_expected_context {
+ let context_excerpts_tasks = example
+ .example
+ .expected_context
+ .iter()
+ .flat_map(|section| {
+ section.alternatives[0].excerpts.iter().map(|excerpt| {
+ resolve_context_entry(project.clone(), excerpt.clone(), cx.clone())
+ })
+ })
+ .collect::<Vec<_>>();
+ let context_excerpts_vec = futures::future::try_join_all(context_excerpts_tasks).await?;
+
+ let mut context_excerpts = HashMap::default();
+ for (buffer, mut excerpts) in context_excerpts_vec {
+ context_excerpts
+ .entry(buffer)
+ .or_insert(Vec::new())
+ .append(&mut excerpts);
+ }
+
+ zeta.update(cx, |zeta, _cx| {
+ zeta.set_context(project.clone(), context_excerpts)
+ })?;
+ } else {
+ zeta.update(cx, |zeta, cx| {
+ zeta.refresh_context(project.clone(), cursor_buffer.clone(), cursor_anchor, cx)
+ })?
+ .await?;
+ }
let prediction = zeta
.update(cx, |zeta, cx| {
@@ -223,14 +296,90 @@ pub async fn zeta2_predict(
anyhow::Ok(result)
}
+async fn resolve_context_entry(
+ project: Entity<Project>,
+ excerpt: ExpectedExcerpt,
+ mut cx: AsyncApp,
+) -> Result<(Entity<Buffer>, Vec<Range<Anchor>>)> {
+ let buffer = project
+ .update(&mut cx, |project, cx| {
+ let project_path = project.find_project_path(&excerpt.path, cx).unwrap();
+ project.open_buffer(project_path, cx)
+ })?
+ .await?;
+
+ let ranges = buffer.read_with(&mut cx, |buffer, _| {
+ let full_text = buffer.text();
+ let offset = full_text
+ .find(&excerpt.text)
+ .expect("Expected context not found");
+ let point = buffer.offset_to_point(offset);
+ excerpt
+ .required_lines
+ .iter()
+ .map(|line| {
+ let row = point.row + line.0;
+ let range = Point::new(row, 0)..Point::new(row + 1, 0);
+ buffer.anchor_after(range.start)..buffer.anchor_before(range.end)
+ })
+ .collect()
+ })?;
+
+ Ok((buffer, ranges))
+}
+
+struct Cache {
+ skip_cache: bool,
+}
+
+impl Cache {
+ fn path(key: u64) -> PathBuf {
+ CACHE_DIR.join(format!("{key:x}.json"))
+ }
+}
+
+impl LlmResponseCache for Cache {
+ fn get_key(&self, url: &Url, body: &str) -> u64 {
+ use collections::FxHasher;
+ use std::hash::{Hash, Hasher};
+
+ let mut hasher = FxHasher::default();
+ url.hash(&mut hasher);
+ body.hash(&mut hasher);
+ hasher.finish()
+ }
+
+ fn read_response(&self, key: u64) -> Option<String> {
+ let path = Cache::path(key);
+ if path.exists() {
+ if self.skip_cache {
+ log::info!("Skipping existing cached LLM response: {}", path.display());
+ None
+ } else {
+ log::info!("Using LLM response from cache: {}", path.display());
+ Some(fs::read_to_string(path).unwrap())
+ }
+ } else {
+ None
+ }
+ }
+
+ fn write_response(&self, key: u64, value: &str) {
+ fs::create_dir_all(&*CACHE_DIR).unwrap();
+
+ let path = Cache::path(key);
+ log::info!("Writing LLM response to cache: {}", path.display());
+ fs::write(path, value).unwrap();
+ }
+}
+
#[derive(Clone, Debug, Default, Serialize, Deserialize)]
pub struct PredictionDetails {
pub diff: String,
pub excerpts: Vec<ActualExcerpt>,
pub excerpts_text: String, // TODO: contains the worktree root path. Drop this field and compute it on the fly
- pub planning_search_time: Duration,
- pub filtering_search_time: Duration,
- pub running_search_time: Duration,
+ pub planning_search_time: Option<Duration>,
+ pub running_search_time: Option<Duration>,
pub prediction_time: Duration,
pub total_time: Duration,
}
@@ -247,8 +396,7 @@ impl PredictionDetails {
}
pub fn to_markdown(&self) -> String {
- let inference_time =
- self.planning_search_time + self.filtering_search_time + self.prediction_time;
+ let inference_time = self.planning_search_time.unwrap_or_default() + self.prediction_time;
format!(
"## Excerpts\n\n\
@@ -258,16 +406,14 @@ impl PredictionDetails {
## Time\n\n\
Planning searches: {}ms\n\
Running searches: {}ms\n\
- Filtering context results: {}ms\n\
Making Prediction: {}ms\n\n\
-------------------\n\n\
Total: {}ms\n\
Inference: {}ms ({:.2}%)\n",
self.excerpts_text,
self.diff,
- self.planning_search_time.as_millis(),
- self.running_search_time.as_millis(),
- self.filtering_search_time.as_millis(),
+ self.planning_search_time.unwrap_or_default().as_millis(),
+ self.running_search_time.unwrap_or_default().as_millis(),
self.prediction_time.as_millis(),
self.total_time.as_millis(),
inference_time.as_millis(),
@@ -30,16 +30,17 @@ pub fn init_test() {
}
}
-pub fn init_test_with(filter: &str) {
- if try_init(Some(filter.to_owned())).is_ok() {
- init_output_stdout();
- }
-}
-
fn get_env_config() -> Option<String> {
std::env::var("ZED_LOG")
.or_else(|_| std::env::var("RUST_LOG"))
.ok()
+ .or_else(|| {
+ if std::env::var("CI").is_ok() {
+ Some("info".to_owned())
+ } else {
+ None
+ }
+ })
}
pub fn process_env(filter: Option<String>) {
@@ -587,7 +587,7 @@ These routing controls let you fineβtune cost, capability, and reliability tra
### Vercel v0 {#vercel-v0}
-[Vercel v0](https://vercel.com/docs/v0/api) is an expert model for generating full-stack apps, with framework-aware completions optimized for modern stacks like Next.js and Vercel.
+[Vercel v0](https://v0.app/docs/api/model) is an expert model for generating full-stack apps, with framework-aware completions optimized for modern stacks like Next.js and Vercel.
It supports text and image inputs and provides fast streaming responses.
The v0 models are [OpenAI-compatible models](/#openai-api-compatible), but Vercel is listed as first-class provider in the panel's settings view.
@@ -3184,13 +3184,53 @@ Non-negative `integer` values
```json [settings]
"search": {
+ "button": true,
"whole_word": false,
"case_sensitive": false,
"include_ignored": false,
- "regex": false
+ "regex": false,
+ "center_on_match": false
},
```
+### Button
+
+- Description: Whether to show the project search button in the status bar.
+- Setting: `button`
+- Default: `true`
+
+### Whole Word
+
+- Description: Whether to only match on whole words.
+- Setting: `whole_word`
+- Default: `false`
+
+### Case Sensitive
+
+- Description: Whether to match case sensitively. This setting affects both
+ searches and editor actions like "Select Next Occurrence", "Select Previous
+ Occurrence", and "Select All Occurrences".
+- Setting: `case_sensitive`
+- Default: `false`
+
+### Include Ignore
+
+- Description: Whether to include gitignored files in search results.
+- Setting: `include_ignored`
+- Default: `false`
+
+### Regex
+
+- Description: Whether to interpret the search query as a regular expression.
+- Setting: `regex`
+- Default: `false`
+
+### Center On Match
+
+- Description: Whether to center the cursor on each search match when navigating.
+- Setting: `center_on_match`
+- Default: `false`
+
## Search Wrap
- Description: If `search_wrap` is disabled, search result do not wrap around the end of the file
@@ -10,7 +10,7 @@ Release Notes:
- N/A _or_ Added/Fixed/Improved ...
```
-On Wednesdays, we run a [`get-preview-channel-changes`](https://github.com/zed-industries/zed/blob/main/script/get-preview-channel-changes) script that scrapes `Release Notes` lines from pull requests landing in preview, as documented in our [Release](https://zed.dev/docs/development/releases) docs.
+On Wednesdays, we run a [`get-preview-channel-changes`](https://github.com/zed-industries/zed/blob/main/script/get-preview-channel-changes) script that scrapes `Release Notes` lines from pull requests landing in preview, as documented in our [Release](https://zed.dev/docs/development/release-notes) docs.
The script outputs everything below the `Release Notes` line, including additional data such as the pull request author (if not a Zed team member) and a link to the pull request.
If you use `N/A`, the script skips your pull request entirely.
@@ -53,7 +53,7 @@ If instead you wanted to restrict yourself only to [Zed Language-Specific Docume
### Implicit Wildcards
-When using the "Include" / "Exclude" filters on a Project Search each glob is wrapped in implicit wildcards. For example to exclude any files with license in the path or filename from your search just type type `license` in the exclude box. Behind the scenes Zed transforms `license` to `**license**`. This means that files named `license.*`, `*.license` or inside a `license` subdirectory will all be filtered out. This enables users to easily filter for `*.ts` without having to remember to type `**/*.ts` every time.
+When using the "Include" / "Exclude" filters on a Project Search each glob is wrapped in implicit wildcards. For example to exclude any files with license in the path or filename from your search just type `license` in the exclude box. Behind the scenes Zed transforms `license` to `**license**`. This means that files named `license.*`, `*.license` or inside a `license` subdirectory will all be filtered out. This enables users to easily filter for `*.ts` without having to remember to type `**/*.ts` every time.
Alternatively, if in your Zed settings you wanted a [`file_types`](./configuring-zed.md#file-types) override which only applied to a certain directory you must explicitly include the wildcard globs. For example, if you had a directory of template files with the `html` extension that you wanted to recognize as Jinja2 template you could use the following:
@@ -33,4 +33,4 @@ Once you have the cli, simply from a terminal, navigate to your project and run
zed .
```
-Voila! You should have Zed running with OCaml support, no additional setup required.
+VoilΓ ! You should have Zed running with OCaml support, no additional setup required.
@@ -72,9 +72,10 @@ If you'd prefer, you can install Zed by downloading our pre-built .tar.gz. This
Download the `.tar.gz` file:
-- [zed-linux-x86_64.tar.gz](https://zed.dev/api/releases/stable/latest/zed-linux-x86_64.tar.gz) ([preview](https://zed.dev/api/releases/preview/latest/zed-linux-x86_64.tar.gz))
-- [zed-linux-aarch64.tar.gz](https://zed.dev/api/releases/stable/latest/zed-linux-aarch64.tar.gz)
- ([preview](https://zed.dev/api/releases/preview/latest/zed-linux-aarch64.tar.gz))
+- [zed-linux-x86_64.tar.gz](https://cloud.zed.dev/releases/stable/latest/download?asset=zed&arch=x86_64&os=linux&source=docs)
+ ([preview](https://cloud.zed.dev/releases/preview/latest/download?asset=zed&arch=x86_64&os=linux&source=docs))
+- [zed-linux-aarch64.tar.gz](https://cloud.zed.dev/releases/stable/latest/download?asset=zed&arch=aarch64&os=linux&source=docs)
+ ([preview](https://cloud.zed.dev/releases/preview/latest/download?asset=zed&arch=aarch64&os=linux&source=docs))
Then ensure that the `zed` binary in the tarball is on your path. The easiest way is to unpack the tarball and create a symlink:
@@ -110,4 +110,4 @@ If you encounter issues during uninstallation:
- **Linux**: If the uninstall script fails, check the error message and consider manual removal of the directories listed above.
- **All platforms**: If you want to start fresh while keeping Zed installed, you can delete the configuration directories instead of uninstalling the application entirely.
-For additional help, see our [Linux-specific documentation](./linux.md) or visit the [Zed community](https://zed.dev/community).
+For additional help, see our [Linux-specific documentation](./linux.md) or visit the [Zed community](https://zed.dev/community-links).
@@ -218,6 +218,10 @@ TBD: Centered layout related settings
"active_line_width": 1, // Width of active guide in pixels [1-10]
"coloring": "fixed", // disabled, fixed, indent_aware
"background_coloring": "disabled" // disabled, indent_aware
+ },
+
+ "sticky_scroll": {
+ "enabled": false // Whether to stick scopes to the top of the editor. Disabled by default.
}
```
@@ -22,7 +22,7 @@ Build the application bundle for macOS.
Options:
-d Compile in debug mode
-o Open dir with the resulting DMG or launch the app itself in local mode.
- -i Install the resulting DMG into /Applications in local mode. Noop without -l.
+ -i Install the resulting DMG into /Applications.
-h Display this help and exit.
"
}
@@ -209,16 +209,6 @@ function sign_app_binaries() {
codesign --force --deep --entitlements "${app_path}/Contents/Resources/zed.entitlements" --sign ${MACOS_SIGNING_KEY:- -} "${app_path}" -v
fi
- if [[ "$target_dir" = "debug" ]]; then
- if [ "$open_result" = true ]; then
- open "$app_path"
- else
- echo "Created application bundle:"
- echo "$app_path"
- fi
- exit 0
- fi
-
bundle_name=$(basename "$app_path")
if [ "$local_install" = true ]; then
@@ -229,6 +219,16 @@ function sign_app_binaries() {
echo "Opening /Applications/$bundle_name"
open "/Applications/$bundle_name"
fi
+ elif [ "$open_result" = true ]; then
+ open "$app_path"
+ fi
+
+ if [[ "$target_dir" = "debug" ]]; then
+ echo "Debug build detected - skipping DMG creation and signing"
+ if [ "$local_install" = false ]; then
+ echo "Created application bundle:"
+ echo "$app_path"
+ fi
else
dmg_target_directory="target/${target_triple}/${target_dir}"
dmg_source_directory="${dmg_target_directory}/dmg"
@@ -0,0 +1,60 @@
+#!/bin/bash
+
+# Check if ./target/wasi-sdk exists
+if [ ! -d "./target/wasi-sdk" ]; then
+ echo "WASI SDK not found, downloading v25..."
+
+ # Determine OS and architecture
+ OS=$(uname -s | tr '[:upper:]' '[:lower:]')
+ ARCH=$(uname -m)
+
+ # Map architecture names to WASI SDK format
+ case $ARCH in
+ x86_64)
+ ARCH="x86_64"
+ ;;
+ arm64|aarch64)
+ ARCH="arm64"
+ ;;
+ *)
+ echo "Unsupported architecture: $ARCH"
+ exit 1
+ ;;
+ esac
+
+ # Map OS names to WASI SDK format
+ case $OS in
+ darwin)
+ OS="macos"
+ ;;
+ linux)
+ OS="linux"
+ ;;
+ mingw*|msys*|cygwin*)
+ OS="mingw"
+ ;;
+ *)
+ echo "Unsupported OS: $OS"
+ exit 1
+ ;;
+ esac
+
+ # Construct download URL
+ WASI_SDK_VERSION="25"
+ WASI_SDK_URL="https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-${WASI_SDK_VERSION}/wasi-sdk-${WASI_SDK_VERSION}.0-${ARCH}-${OS}.tar.gz"
+
+ echo "Downloading from: $WASI_SDK_URL"
+
+ # Create target directory if it doesn't exist
+ mkdir -p ./target
+
+ # Download and extract
+ curl -L "$WASI_SDK_URL" | tar -xz -C ./target
+
+ # Rename the extracted directory to wasi-sdk
+ mv "./target/wasi-sdk-${WASI_SDK_VERSION}.0-${ARCH}-${OS}" "./target/wasi-sdk"
+
+ echo "WASI SDK v25 installed successfully"
+else
+ echo "WASI SDK already exists at ./target/wasi-sdk"
+fi
@@ -18,4 +18,4 @@ case $channel in
;;
esac
-curl -s "https://zed.dev/api/releases/latest?asset=zed&os=macos&arch=aarch64$query" | jq -r .version
+curl -s "https://cloud.zed.dev/releases/$channel/latest/asset?asset=zed&os=macos&arch=aarch64" | jq -r .version
@@ -82,7 +82,7 @@ linux() {
cp "$ZED_BUNDLE_PATH" "$temp/zed-linux-$arch.tar.gz"
else
echo "Downloading Zed"
- curl "https://zed.dev/api/releases/$channel/latest/zed-linux-$arch.tar.gz" > "$temp/zed-linux-$arch.tar.gz"
+ curl "https://cloud.zed.dev/releases/$channel/latest/download?asset=zed&arch=$arch&os=linux&source=install.sh" > "$temp/zed-linux-$arch.tar.gz"
fi
suffix=""
@@ -135,7 +135,7 @@ linux() {
macos() {
echo "Downloading Zed"
- curl "https://zed.dev/api/releases/$channel/latest/Zed-$arch.dmg" > "$temp/Zed-$arch.dmg"
+ curl "https://cloud.zed.dev/releases/$channel/latest/download?asset=zed&os=macos&arch=$arch&source=install.sh" > "$temp/Zed-$arch.dmg"
hdiutil attach -quiet "$temp/Zed-$arch.dmg" -mountpoint "$temp/mount"
app="$(cd "$temp/mount/"; echo *.app)"
echo "Installing $app"
@@ -1,26 +1,19 @@
-use gh_workflow::{
- Event, Expression, Job, PullRequest, PullRequestType, Run, Schedule, Step, Use, Workflow,
- WorkflowDispatch,
-};
+use gh_workflow::{Event, Expression, Job, Run, Schedule, Step, Use, Workflow, WorkflowDispatch};
use crate::tasks::workflows::{
runners::{self, Platform},
steps::{self, FluentBuilder as _, NamedJob, named, setup_cargo_config},
- vars,
+ vars::{self, Input},
};
pub(crate) fn run_agent_evals() -> Workflow {
let agent_evals = agent_evals();
+ let model_name = Input::string("model_name", None);
named::workflow()
- .on(Event::default()
- .schedule([Schedule::default().cron("0 0 * * *")])
- .pull_request(PullRequest::default().add_branch("**").types([
- PullRequestType::Synchronize,
- PullRequestType::Reopened,
- PullRequestType::Labeled,
- ]))
- .workflow_dispatch(WorkflowDispatch::default()))
+ .on(Event::default().workflow_dispatch(
+ WorkflowDispatch::default().add_input(model_name.name, model_name.input()),
+ ))
.concurrency(vars::one_workflow_per_non_main_branch())
.add_env(("CARGO_TERM_COLOR", "always"))
.add_env(("CARGO_INCREMENTAL", 0))
@@ -28,29 +21,28 @@ pub(crate) fn run_agent_evals() -> Workflow {
.add_env(("ANTHROPIC_API_KEY", vars::ANTHROPIC_API_KEY))
.add_env(("ZED_CLIENT_CHECKSUM_SEED", vars::ZED_CLIENT_CHECKSUM_SEED))
.add_env(("ZED_EVAL_TELEMETRY", 1))
+ .add_env(("MODEL_NAME", model_name.to_string()))
.add_job(agent_evals.name, agent_evals.job)
}
fn agent_evals() -> NamedJob {
fn run_eval() -> Step<Run> {
- named::bash("cargo run --package=eval -- --repetitions=8 --concurrency=1")
+ named::bash(
+ "cargo run --package=eval -- --repetitions=8 --concurrency=1 --model \"${MODEL_NAME}\"",
+ )
}
named::job(
Job::default()
- .cond(Expression::new(indoc::indoc!{r#"
- github.repository_owner == 'zed-industries' &&
- (github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'run-eval'))
- "#}))
.runs_on(runners::LINUX_DEFAULT)
- .timeout_minutes(60_u32)
+ .timeout_minutes(60_u32 * 10)
.add_step(steps::checkout_repo())
.add_step(steps::cache_rust_dependencies_namespace())
.map(steps::install_linux_dependencies)
.add_step(setup_cargo_config(Platform::Linux))
.add_step(steps::script("cargo build --package=eval"))
.add_step(run_eval())
- .add_step(steps::cleanup_cargo_config(Platform::Linux))
+ .add_step(steps::cleanup_cargo_config(Platform::Linux)),
)
}
@@ -292,8 +292,8 @@ fn check_workspace_binaries() -> NamedJob {
.runs_on(runners::LINUX_LARGE)
.add_step(steps::checkout_repo())
.add_step(steps::setup_cargo_config(Platform::Linux))
- .map(steps::install_linux_dependencies)
.add_step(steps::cache_rust_dependencies_namespace())
+ .map(steps::install_linux_dependencies)
.add_step(steps::script("cargo build -p collab"))
.add_step(steps::script("cargo build --workspace --bins --examples"))
.add_step(steps::cleanup_cargo_config(Platform::Linux)),
@@ -312,13 +312,13 @@ pub(crate) fn run_platform_tests(platform: Platform) -> NamedJob {
.runs_on(runner)
.add_step(steps::checkout_repo())
.add_step(steps::setup_cargo_config(platform))
+ .when(platform == Platform::Linux, |this| {
+ this.add_step(steps::cache_rust_dependencies_namespace())
+ })
.when(
platform == Platform::Linux,
steps::install_linux_dependencies,
)
- .when(platform == Platform::Linux, |this| {
- this.add_step(steps::cache_rust_dependencies_namespace())
- })
.add_step(steps::setup_node())
.add_step(steps::clippy(platform))
.add_step(steps::cargo_install_nextest(platform))
@@ -113,8 +113,14 @@ fn install_mold() -> Step<Run> {
named::bash("./script/install-mold")
}
+fn download_wasi_sdk() -> Step<Run> {
+ named::bash("./script/download-wasi-sdk")
+}
+
pub(crate) fn install_linux_dependencies(job: Job) -> Job {
- job.add_step(setup_linux()).add_step(install_mold())
+ job.add_step(setup_linux())
+ .add_step(install_mold())
+ .add_step(download_wasi_sdk())
}
pub fn script(name: &str) -> Step<Run> {