Detailed changes
@@ -221,6 +221,7 @@ dependencies = [
"chrono",
"client",
"collections",
+ "command_palette_hooks",
"component",
"context_server",
"db",
@@ -242,6 +243,7 @@ dependencies = [
"jsonschema",
"language",
"language_model",
+ "language_models",
"languages",
"log",
"lsp",
@@ -280,6 +282,7 @@ dependencies = [
"time_format",
"tree-sitter-md",
"ui",
+ "ui_input",
"unindent",
"urlencoding",
"util",
@@ -1880,9 +1883,7 @@ version = "0.1.0"
dependencies = [
"aws-smithy-runtime-api",
"aws-smithy-types",
- "futures 0.3.31",
"http_client",
- "tokio",
"workspace-hack",
]
@@ -6369,6 +6370,7 @@ dependencies = [
"buffer_diff",
"call",
"chrono",
+ "client",
"collections",
"command_palette_hooks",
"component",
@@ -7864,6 +7866,7 @@ dependencies = [
"derive_more 0.99.19",
"futures 0.3.31",
"http 1.3.1",
+ "http-body 1.0.1",
"log",
"serde",
"serde_json",
@@ -9108,11 +9111,11 @@ dependencies = [
"client",
"collections",
"component",
+ "convert_case 0.8.0",
"copilot",
"credentials_provider",
"deepseek",
"editor",
- "fs",
"futures 0.3.31",
"google_ai",
"gpui",
@@ -483,6 +483,7 @@ heed = { version = "0.21.0", features = ["read-txn-no-tls"] }
hex = "0.4.3"
html5ever = "0.27.0"
http = "1.1"
+http-body = "1.0"
hyper = "0.14"
ignore = "0.4.22"
image = "0.25.1"
@@ -0,0 +1,4 @@
+<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
@@ -15,7 +15,7 @@
{
"context": "Editor && vim_mode == insert && !menu",
"bindings": {
- // "j k": "vim::SwitchToNormalMode"
+ // "j k": "vim::NormalBefore"
}
}
]
@@ -6,7 +6,7 @@
}
},
{
- "context": "Editor",
+ "context": "Editor && mode == full",
"bindings": {
"cmd-l": "go_to_line::Toggle",
"ctrl-shift-d": "editor::DuplicateLineDown",
@@ -15,7 +15,12 @@
"cmd-enter": "editor::NewlineBelow",
"cmd-alt-enter": "editor::NewlineAbove",
"cmd-shift-l": "editor::SelectLine",
- "cmd-shift-t": "outline::Toggle",
+ "cmd-shift-t": "outline::Toggle"
+ }
+ },
+ {
+ "context": "Editor",
+ "bindings": {
"alt-backspace": "editor::DeleteToPreviousWordStart",
"alt-shift-backspace": "editor::DeleteToNextWordEnd",
"alt-delete": "editor::DeleteToNextWordEnd",
@@ -39,10 +44,6 @@
"ctrl-_": "editor::ConvertToSnakeCase"
}
},
- {
- "context": "Editor && mode == full",
- "bindings": {}
- },
{
"context": "BufferSearchBar",
"bindings": {
@@ -1076,6 +1076,10 @@
// Send anonymized usage data like what languages you're using Zed with.
"metrics": true
},
+ // Whether to disable all AI features in Zed.
+ //
+ // Default: false
+ "disable_ai": false,
// Automatically update Zed. This setting may be ignored on Linux if
// installed through a package manager.
"auto_update": true,
@@ -1712,6 +1716,7 @@
"openai": {
"api_url": "https://api.openai.com/v1"
},
+ "openai_compatible": {},
"open_router": {
"api_url": "https://openrouter.ai/api/v1"
},
@@ -15,13 +15,15 @@
"adapter": "JavaScript",
"program": "$ZED_FILE",
"request": "launch",
- "cwd": "$ZED_WORKTREE_ROOT"
+ "cwd": "$ZED_WORKTREE_ROOT",
+ "type": "pwa-node"
},
{
"label": "JavaScript debug terminal",
"adapter": "JavaScript",
"request": "launch",
"cwd": "$ZED_WORKTREE_ROOT",
- "console": "integratedTerminal"
+ "console": "integratedTerminal",
+ "type": "pwa-node"
}
]
@@ -47,7 +47,7 @@ use std::{
time::{Duration, Instant},
};
use thiserror::Error;
-use util::{ResultExt as _, debug_panic, post_inc};
+use util::{ResultExt as _, post_inc};
use uuid::Uuid;
use zed_llm_client::{CompletionIntent, CompletionRequestStatus, UsageLimit};
@@ -1582,23 +1582,21 @@ impl Thread {
model: Arc<dyn LanguageModel>,
cx: &mut App,
) -> Option<PendingToolUse> {
- let action_log = self.action_log.read(cx);
-
- if !action_log.has_unnotified_user_edits() {
- return None;
- }
-
// Represent notification as a simulated `project_notifications` tool call
let tool_name = Arc::from("project_notifications");
- let Some(tool) = self.tools.read(cx).tool(&tool_name, cx) else {
- debug_panic!("`project_notifications` tool not found");
- return None;
- };
+ let tool = self.tools.read(cx).tool(&tool_name, cx)?;
if !self.profile.is_tool_enabled(tool.source(), tool.name(), cx) {
return None;
}
+ if self
+ .action_log
+ .update(cx, |log, cx| log.unnotified_user_edits(cx).is_none())
+ {
+ return None;
+ }
+
let input = serde_json::json!({});
let request = Arc::new(LanguageModelRequest::default()); // unused
let window = None;
@@ -5492,7 +5490,7 @@ fn main() {{
let thread = thread_store.update(cx, |store, cx| store.create_thread(cx));
let context_store = cx.new(|_cx| ContextStore::new(project.downgrade(), None));
- let provider = Arc::new(FakeLanguageModelProvider);
+ let provider = Arc::new(FakeLanguageModelProvider::default());
let model = provider.test_model();
let model: Arc<dyn LanguageModel> = Arc::new(model);
@@ -439,10 +439,6 @@ pub struct EditToolParams {
pub new_text: String,
}
-#[derive(Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct EditToolResponse;
-
#[derive(Deserialize, JsonSchema, Debug)]
pub struct ReadToolParams {
/// The absolute path to the file to read.
@@ -455,12 +451,6 @@ pub struct ReadToolParams {
pub limit: Option<u32>,
}
-#[derive(Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct ReadToolResponse {
- pub content: String,
-}
-
#[derive(Deserialize, JsonSchema, Debug)]
pub struct WriteToolParams {
/// Absolute path for new file
@@ -15,8 +15,9 @@ use serde::{Deserialize, Serialize};
use util::debug_panic;
// todo! use shared tool inference?
-use crate::claude::tools::{
- ClaudeTool, EditToolParams, EditToolResponse, ReadToolParams, ReadToolResponse,
+use crate::claude::{
+ McpServerConfig,
+ tools::{ClaudeTool, EditToolParams, ReadToolParams},
};
pub struct ZedMcpServer {
@@ -174,11 +175,9 @@ impl ZedMcpServer {
let input =
serde_json::from_value(request.arguments.context("Arguments required")?)?;
- let result = Self::handle_read_tool_call(input, delegate, cx).await?;
+ let content = Self::handle_read_tool_call(input, delegate, cx).await?;
Ok(CallToolResponse {
- content: vec![ToolResponseContent::Text {
- text: serde_json::to_string(&result)?,
- }],
+ content,
is_error: None,
meta: None,
})
@@ -186,11 +185,9 @@ impl ZedMcpServer {
let input =
serde_json::from_value(request.arguments.context("Arguments required")?)?;
- let result = Self::handle_edit_tool_call(input, delegate, cx).await?;
+ Self::handle_edit_tool_call(input, delegate, cx).await?;
Ok(CallToolResponse {
- content: vec![ToolResponseContent::Text {
- text: serde_json::to_string(&result)?,
- }],
+ content: vec![],
is_error: None,
meta: None,
})
@@ -204,7 +201,7 @@ impl ZedMcpServer {
params: ReadToolParams,
delegate: OldAcpClientDelegate,
cx: &AsyncApp,
- ) -> Task<Result<ReadToolResponse>> {
+ ) -> Task<Result<Vec<ToolResponseContent>>> {
cx.foreground_executor().spawn(async move {
let response = delegate
.read_text_file(acp_old::ReadTextFileParams {
@@ -214,9 +211,9 @@ impl ZedMcpServer {
})
.await?;
- Ok(ReadToolResponse {
- content: response.content,
- })
+ Ok(vec![ToolResponseContent::Text {
+ text: response.content,
+ }])
})
}
@@ -224,7 +221,7 @@ impl ZedMcpServer {
params: EditToolParams,
delegate: OldAcpClientDelegate,
cx: &AsyncApp,
- ) -> Task<Result<EditToolResponse>> {
+ ) -> Task<Result<()>> {
cx.foreground_executor().spawn(async move {
let response = delegate
.read_text_file_reusing_snapshot(acp_old::ReadTextFileParams {
@@ -246,7 +243,7 @@ impl ZedMcpServer {
})
.await?;
- Ok(EditToolResponse)
+ Ok(())
})
}
@@ -32,6 +32,7 @@ buffer_diff.workspace = true
chrono.workspace = true
client.workspace = true
collections.workspace = true
+command_palette_hooks.workspace = true
component.workspace = true
context_server.workspace = true
db.workspace = true
@@ -53,6 +54,7 @@ itertools.workspace = true
jsonschema.workspace = true
language.workspace = true
language_model.workspace = true
+language_models.workspace = true
log.workspace = true
lsp.workspace = true
markdown.workspace = true
@@ -87,6 +89,7 @@ theme.workspace = true
time.workspace = true
time_format.workspace = true
ui.workspace = true
+ui_input.workspace = true
urlencoding.workspace = true
util.workspace = true
uuid.workspace = true
@@ -3895,7 +3895,7 @@ mod tests {
LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
registry.set_default_model(
Some(ConfiguredModel {
- provider: Arc::new(FakeLanguageModelProvider),
+ provider: Arc::new(FakeLanguageModelProvider::default()),
model,
}),
cx,
@@ -3979,7 +3979,7 @@ mod tests {
LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
registry.set_default_model(
Some(ConfiguredModel {
- provider: Arc::new(FakeLanguageModelProvider),
+ provider: Arc::new(FakeLanguageModelProvider::default()),
model: model.clone(),
}),
cx,
@@ -1,3 +1,4 @@
+mod add_llm_provider_modal;
mod configure_context_server_modal;
mod manage_profiles_modal;
mod tool_picker;
@@ -28,7 +29,7 @@ use proto::Plan;
use settings::{Settings, update_settings_file};
use ui::{
Chip, ContextMenu, Disclosure, Divider, DividerColor, ElevationIndex, Indicator, PopoverMenu,
- Scrollbar, ScrollbarState, Switch, SwitchColor, Tooltip, prelude::*,
+ Scrollbar, ScrollbarState, Switch, SwitchColor, SwitchField, Tooltip, prelude::*,
};
use util::ResultExt as _;
use workspace::Workspace;
@@ -37,7 +38,10 @@ use zed_actions::ExtensionCategoryFilter;
pub(crate) use configure_context_server_modal::ConfigureContextServerModal;
pub(crate) use manage_profiles_modal::ManageProfilesModal;
-use crate::AddContextServer;
+use crate::{
+ AddContextServer,
+ agent_configuration::add_llm_provider_modal::{AddLlmProviderModal, LlmCompatibleProvider},
+};
pub struct AgentConfiguration {
fs: Arc<dyn Fs>,
@@ -304,16 +308,55 @@ impl AgentConfiguration {
v_flex()
.child(
- v_flex()
+ h_flex()
.p(DynamicSpacing::Base16.rems(cx))
.pr(DynamicSpacing::Base20.rems(cx))
.pb_0()
.mb_2p5()
- .gap_0p5()
- .child(Headline::new("LLM Providers"))
+ .items_start()
+ .justify_between()
+ .child(
+ v_flex()
+ .gap_0p5()
+ .child(Headline::new("LLM Providers"))
+ .child(
+ Label::new("Add at least one provider to use AI-powered features.")
+ .color(Color::Muted),
+ ),
+ )
.child(
- Label::new("Add at least one provider to use AI-powered features.")
- .color(Color::Muted),
+ PopoverMenu::new("add-provider-popover")
+ .trigger(
+ Button::new("add-provider", "Add Provider")
+ .icon_position(IconPosition::Start)
+ .icon(IconName::Plus)
+ .icon_size(IconSize::Small)
+ .icon_color(Color::Muted)
+ .label_size(LabelSize::Small),
+ )
+ .anchor(gpui::Corner::TopRight)
+ .menu({
+ let workspace = self.workspace.clone();
+ move |window, cx| {
+ Some(ContextMenu::build(window, cx, |menu, _window, _cx| {
+ menu.header("Compatible APIs").entry("OpenAI", None, {
+ let workspace = workspace.clone();
+ move |window, cx| {
+ workspace
+ .update(cx, |workspace, cx| {
+ AddLlmProviderModal::toggle(
+ LlmCompatibleProvider::OpenAi,
+ workspace,
+ window,
+ cx,
+ );
+ })
+ .log_err();
+ }
+ })
+ }))
+ }
+ }),
),
)
.child(
@@ -330,119 +373,74 @@ impl AgentConfiguration {
fn render_command_permission(&mut self, cx: &mut Context<Self>) -> impl IntoElement {
let always_allow_tool_actions = AgentSettings::get_global(cx).always_allow_tool_actions;
+ let fs = self.fs.clone();
- h_flex()
- .gap_4()
- .justify_between()
- .flex_wrap()
- .child(
- v_flex()
- .gap_0p5()
- .max_w_5_6()
- .child(Label::new("Allow running editing tools without asking for confirmation"))
- .child(
- Label::new(
- "The agent can perform potentially destructive actions without asking for your confirmation.",
- )
- .color(Color::Muted),
- ),
- )
- .child(
- Switch::new(
- "always-allow-tool-actions-switch",
- always_allow_tool_actions.into(),
- )
- .color(SwitchColor::Accent)
- .on_click({
- let fs = self.fs.clone();
- move |state, _window, cx| {
- let allow = state == &ToggleState::Selected;
- update_settings_file::<AgentSettings>(
- fs.clone(),
- cx,
- move |settings, _| {
- settings.set_always_allow_tool_actions(allow);
- },
- );
- }
- }),
- )
+ SwitchField::new(
+ "single-file-review",
+ "Enable single-file agent reviews",
+ "Agent edits are also displayed in single-file editors for review.",
+ always_allow_tool_actions,
+ move |state, _window, cx| {
+ let allow = state == &ToggleState::Selected;
+ update_settings_file::<AgentSettings>(fs.clone(), cx, move |settings, _| {
+ settings.set_always_allow_tool_actions(allow);
+ });
+ },
+ )
}
fn render_single_file_review(&mut self, cx: &mut Context<Self>) -> impl IntoElement {
let single_file_review = AgentSettings::get_global(cx).single_file_review;
+ let fs = self.fs.clone();
- h_flex()
- .gap_4()
- .justify_between()
- .flex_wrap()
- .child(
- v_flex()
- .gap_0p5()
- .max_w_5_6()
- .child(Label::new("Enable single-file agent reviews"))
- .child(
- Label::new(
- "Agent edits are also displayed in single-file editors for review.",
- )
- .color(Color::Muted),
- ),
- )
- .child(
- Switch::new("single-file-review-switch", single_file_review.into())
- .color(SwitchColor::Accent)
- .on_click({
- let fs = self.fs.clone();
- move |state, _window, cx| {
- let allow = state == &ToggleState::Selected;
- update_settings_file::<AgentSettings>(
- fs.clone(),
- cx,
- move |settings, _| {
- settings.set_single_file_review(allow);
- },
- );
- }
- }),
- )
+ SwitchField::new(
+ "single-file-review",
+ "Enable single-file agent reviews",
+ "Agent edits are also displayed in single-file editors for review.",
+ single_file_review,
+ move |state, _window, cx| {
+ let allow = state == &ToggleState::Selected;
+ update_settings_file::<AgentSettings>(fs.clone(), cx, move |settings, _| {
+ settings.set_single_file_review(allow);
+ });
+ },
+ )
}
fn render_sound_notification(&mut self, cx: &mut Context<Self>) -> impl IntoElement {
let play_sound_when_agent_done = AgentSettings::get_global(cx).play_sound_when_agent_done;
+ let fs = self.fs.clone();
- h_flex()
- .gap_4()
- .justify_between()
- .flex_wrap()
- .child(
- v_flex()
- .gap_0p5()
- .max_w_5_6()
- .child(Label::new("Play sound when finished generating"))
- .child(
- Label::new(
- "Hear a notification sound when the agent is done generating changes or needs your input.",
- )
- .color(Color::Muted),
- ),
- )
- .child(
- Switch::new("play-sound-notification-switch", play_sound_when_agent_done.into())
- .color(SwitchColor::Accent)
- .on_click({
- let fs = self.fs.clone();
- move |state, _window, cx| {
- let allow = state == &ToggleState::Selected;
- update_settings_file::<AgentSettings>(
- fs.clone(),
- cx,
- move |settings, _| {
- settings.set_play_sound_when_agent_done(allow);
- },
- );
- }
- }),
- )
+ SwitchField::new(
+ "sound-notification",
+ "Play sound when finished generating",
+ "Hear a notification sound when the agent is done generating changes or needs your input.",
+ play_sound_when_agent_done,
+ move |state, _window, cx| {
+ let allow = state == &ToggleState::Selected;
+ update_settings_file::<AgentSettings>(fs.clone(), cx, move |settings, _| {
+ settings.set_play_sound_when_agent_done(allow);
+ });
+ },
+ )
+ }
+
+ fn render_modifier_to_send(&mut self, cx: &mut Context<Self>) -> impl IntoElement {
+ let use_modifier_to_send = AgentSettings::get_global(cx).use_modifier_to_send;
+ let fs = self.fs.clone();
+
+ SwitchField::new(
+ "modifier-send",
+ "Use modifier to submit a message",
+ "Make a modifier (cmd-enter on macOS, ctrl-enter on Linux) required to send messages.",
+ use_modifier_to_send,
+ move |state, _window, cx| {
+ let allow = state == &ToggleState::Selected;
+ update_settings_file::<AgentSettings>(fs.clone(), cx, move |settings, _| {
+ settings.set_use_modifier_to_send(allow);
+ });
+ },
+ )
}
fn render_general_settings_section(&mut self, cx: &mut Context<Self>) -> impl IntoElement {
@@ -456,6 +454,7 @@ impl AgentConfiguration {
.child(self.render_command_permission(cx))
.child(self.render_single_file_review(cx))
.child(self.render_sound_notification(cx))
+ .child(self.render_modifier_to_send(cx))
}
fn render_zed_plan_info(&self, plan: Option<Plan>, cx: &mut Context<Self>) -> impl IntoElement {
@@ -0,0 +1,639 @@
+use std::sync::Arc;
+
+use anyhow::Result;
+use collections::HashSet;
+use fs::Fs;
+use gpui::{DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Render, Task};
+use language_model::LanguageModelRegistry;
+use language_models::{
+ AllLanguageModelSettings, OpenAiCompatibleSettingsContent,
+ provider::open_ai_compatible::AvailableModel,
+};
+use settings::update_settings_file;
+use ui::{Banner, KeyBinding, Modal, ModalFooter, ModalHeader, Section, prelude::*};
+use ui_input::SingleLineInput;
+use workspace::{ModalView, Workspace};
+
+#[derive(Clone, Copy)]
+pub enum LlmCompatibleProvider {
+ OpenAi,
+}
+
+impl LlmCompatibleProvider {
+ fn name(&self) -> &'static str {
+ match self {
+ LlmCompatibleProvider::OpenAi => "OpenAI",
+ }
+ }
+
+ fn api_url(&self) -> &'static str {
+ match self {
+ LlmCompatibleProvider::OpenAi => "https://api.openai.com/v1",
+ }
+ }
+}
+
+struct AddLlmProviderInput {
+ provider_name: Entity<SingleLineInput>,
+ api_url: Entity<SingleLineInput>,
+ api_key: Entity<SingleLineInput>,
+ models: Vec<ModelInput>,
+}
+
+impl AddLlmProviderInput {
+ fn new(provider: LlmCompatibleProvider, window: &mut Window, cx: &mut App) -> Self {
+ let provider_name = single_line_input("Provider Name", provider.name(), None, window, cx);
+ let api_url = single_line_input("API URL", provider.api_url(), None, window, cx);
+ let api_key = single_line_input(
+ "API Key",
+ "000000000000000000000000000000000000000000000000",
+ None,
+ window,
+ cx,
+ );
+
+ Self {
+ provider_name,
+ api_url,
+ api_key,
+ models: vec![ModelInput::new(window, cx)],
+ }
+ }
+
+ fn add_model(&mut self, window: &mut Window, cx: &mut App) {
+ self.models.push(ModelInput::new(window, cx));
+ }
+
+ fn remove_model(&mut self, index: usize) {
+ self.models.remove(index);
+ }
+}
+
+struct ModelInput {
+ name: Entity<SingleLineInput>,
+ max_completion_tokens: Entity<SingleLineInput>,
+ max_output_tokens: Entity<SingleLineInput>,
+ max_tokens: Entity<SingleLineInput>,
+}
+
+impl ModelInput {
+ fn new(window: &mut Window, cx: &mut App) -> Self {
+ let model_name = single_line_input(
+ "Model Name",
+ "e.g. gpt-4o, claude-opus-4, gemini-2.5-pro",
+ None,
+ window,
+ cx,
+ );
+ let max_completion_tokens = single_line_input(
+ "Max Completion Tokens",
+ "200000",
+ Some("200000"),
+ window,
+ cx,
+ );
+ let max_output_tokens = single_line_input(
+ "Max Output Tokens",
+ "Max Output Tokens",
+ Some("32000"),
+ window,
+ cx,
+ );
+ let max_tokens = single_line_input("Max Tokens", "Max Tokens", Some("200000"), window, cx);
+ Self {
+ name: model_name,
+ max_completion_tokens,
+ max_output_tokens,
+ max_tokens,
+ }
+ }
+
+ fn parse(&self, cx: &App) -> Result<AvailableModel, SharedString> {
+ let name = self.name.read(cx).text(cx);
+ if name.is_empty() {
+ return Err(SharedString::from("Model Name cannot be empty"));
+ }
+ Ok(AvailableModel {
+ name,
+ display_name: None,
+ max_completion_tokens: Some(
+ self.max_completion_tokens
+ .read(cx)
+ .text(cx)
+ .parse::<u64>()
+ .map_err(|_| SharedString::from("Max Completion Tokens must be a number"))?,
+ ),
+ max_output_tokens: Some(
+ self.max_output_tokens
+ .read(cx)
+ .text(cx)
+ .parse::<u64>()
+ .map_err(|_| SharedString::from("Max Output Tokens must be a number"))?,
+ ),
+ max_tokens: self
+ .max_tokens
+ .read(cx)
+ .text(cx)
+ .parse::<u64>()
+ .map_err(|_| SharedString::from("Max Tokens must be a number"))?,
+ })
+ }
+}
+
+fn single_line_input(
+ label: impl Into<SharedString>,
+ placeholder: impl Into<SharedString>,
+ text: Option<&str>,
+ window: &mut Window,
+ cx: &mut App,
+) -> Entity<SingleLineInput> {
+ cx.new(|cx| {
+ let input = SingleLineInput::new(window, cx, placeholder).label(label);
+ if let Some(text) = text {
+ input
+ .editor()
+ .update(cx, |editor, cx| editor.set_text(text, window, cx));
+ }
+ input
+ })
+}
+
+fn save_provider_to_settings(
+ input: &AddLlmProviderInput,
+ cx: &mut App,
+) -> Task<Result<(), SharedString>> {
+ let provider_name: Arc<str> = input.provider_name.read(cx).text(cx).into();
+ if provider_name.is_empty() {
+ return Task::ready(Err("Provider Name cannot be empty".into()));
+ }
+
+ if LanguageModelRegistry::read_global(cx)
+ .providers()
+ .iter()
+ .any(|provider| {
+ provider.id().0.as_ref() == provider_name.as_ref()
+ || provider.name().0.as_ref() == provider_name.as_ref()
+ })
+ {
+ return Task::ready(Err(
+ "Provider Name is already taken by another provider".into()
+ ));
+ }
+
+ let api_url = input.api_url.read(cx).text(cx);
+ if api_url.is_empty() {
+ return Task::ready(Err("API URL cannot be empty".into()));
+ }
+
+ let api_key = input.api_key.read(cx).text(cx);
+ if api_key.is_empty() {
+ return Task::ready(Err("API Key cannot be empty".into()));
+ }
+
+ let mut models = Vec::new();
+ let mut model_names: HashSet<String> = HashSet::default();
+ for model in &input.models {
+ match model.parse(cx) {
+ Ok(model) => {
+ if !model_names.insert(model.name.clone()) {
+ return Task::ready(Err("Model Names must be unique".into()));
+ }
+ models.push(model)
+ }
+ Err(err) => return Task::ready(Err(err)),
+ }
+ }
+
+ let fs = <dyn Fs>::global(cx);
+ let task = cx.write_credentials(&api_url, "Bearer", api_key.as_bytes());
+ cx.spawn(async move |cx| {
+ task.await
+ .map_err(|_| "Failed to write API key to keychain")?;
+ cx.update(|cx| {
+ update_settings_file::<AllLanguageModelSettings>(fs, cx, |settings, _cx| {
+ settings.openai_compatible.get_or_insert_default().insert(
+ provider_name,
+ OpenAiCompatibleSettingsContent {
+ api_url,
+ available_models: models,
+ },
+ );
+ });
+ })
+ .ok();
+ Ok(())
+ })
+}
+
+pub struct AddLlmProviderModal {
+ provider: LlmCompatibleProvider,
+ input: AddLlmProviderInput,
+ focus_handle: FocusHandle,
+ last_error: Option<SharedString>,
+}
+
+impl AddLlmProviderModal {
+ pub fn toggle(
+ provider: LlmCompatibleProvider,
+ workspace: &mut Workspace,
+ window: &mut Window,
+ cx: &mut Context<Workspace>,
+ ) {
+ workspace.toggle_modal(window, cx, |window, cx| Self::new(provider, window, cx));
+ }
+
+ fn new(provider: LlmCompatibleProvider, window: &mut Window, cx: &mut Context<Self>) -> Self {
+ Self {
+ input: AddLlmProviderInput::new(provider, window, cx),
+ provider,
+ last_error: None,
+ focus_handle: cx.focus_handle(),
+ }
+ }
+
+ fn confirm(&mut self, _: &menu::Confirm, _: &mut Window, cx: &mut Context<Self>) {
+ let task = save_provider_to_settings(&self.input, cx);
+ cx.spawn(async move |this, cx| {
+ let result = task.await;
+ this.update(cx, |this, cx| match result {
+ Ok(_) => {
+ cx.emit(DismissEvent);
+ }
+ Err(error) => {
+ this.last_error = Some(error);
+ cx.notify();
+ }
+ })
+ })
+ .detach_and_log_err(cx);
+ }
+
+ fn cancel(&mut self, _: &menu::Cancel, _: &mut Window, cx: &mut Context<Self>) {
+ cx.emit(DismissEvent);
+ }
+
+ fn render_section(&self) -> Section {
+ Section::new()
+ .child(self.input.provider_name.clone())
+ .child(self.input.api_url.clone())
+ .child(self.input.api_key.clone())
+ }
+
+ fn render_model_section(&self, cx: &mut Context<Self>) -> Section {
+ Section::new().child(
+ v_flex()
+ .gap_2()
+ .child(
+ h_flex()
+ .justify_between()
+ .child(Label::new("Models").size(LabelSize::Small))
+ .child(
+ Button::new("add-model", "Add Model")
+ .icon(IconName::Plus)
+ .icon_position(IconPosition::Start)
+ .icon_size(IconSize::XSmall)
+ .icon_color(Color::Muted)
+ .label_size(LabelSize::Small)
+ .on_click(cx.listener(|this, _, window, cx| {
+ this.input.add_model(window, cx);
+ cx.notify();
+ })),
+ ),
+ )
+ .children(
+ self.input
+ .models
+ .iter()
+ .enumerate()
+ .map(|(ix, _)| self.render_model(ix, cx)),
+ ),
+ )
+ }
+
+ fn render_model(&self, ix: usize, cx: &mut Context<Self>) -> impl IntoElement + use<> {
+ let has_more_than_one_model = self.input.models.len() > 1;
+ let model = &self.input.models[ix];
+
+ v_flex()
+ .p_2()
+ .gap_2()
+ .rounded_sm()
+ .border_1()
+ .border_dashed()
+ .border_color(cx.theme().colors().border.opacity(0.6))
+ .bg(cx.theme().colors().element_active.opacity(0.15))
+ .child(model.name.clone())
+ .child(
+ h_flex()
+ .gap_2()
+ .child(model.max_completion_tokens.clone())
+ .child(model.max_output_tokens.clone()),
+ )
+ .child(model.max_tokens.clone())
+ .when(has_more_than_one_model, |this| {
+ this.child(
+ Button::new(("remove-model", ix), "Remove Model")
+ .icon(IconName::Trash)
+ .icon_position(IconPosition::Start)
+ .icon_size(IconSize::XSmall)
+ .icon_color(Color::Muted)
+ .label_size(LabelSize::Small)
+ .style(ButtonStyle::Outlined)
+ .full_width()
+ .on_click(cx.listener(move |this, _, _window, cx| {
+ this.input.remove_model(ix);
+ cx.notify();
+ })),
+ )
+ })
+ }
+}
+
+impl EventEmitter<DismissEvent> for AddLlmProviderModal {}
+
+impl Focusable for AddLlmProviderModal {
+ fn focus_handle(&self, _cx: &App) -> FocusHandle {
+ self.focus_handle.clone()
+ }
+}
+
+impl ModalView for AddLlmProviderModal {}
+
+impl Render for AddLlmProviderModal {
+ fn render(&mut self, window: &mut ui::Window, cx: &mut ui::Context<Self>) -> impl IntoElement {
+ let focus_handle = self.focus_handle(cx);
+
+ div()
+ .id("add-llm-provider-modal")
+ .key_context("AddLlmProviderModal")
+ .w(rems(34.))
+ .elevation_3(cx)
+ .on_action(cx.listener(Self::cancel))
+ .capture_any_mouse_down(cx.listener(|this, _, window, cx| {
+ this.focus_handle(cx).focus(window);
+ }))
+ .child(
+ Modal::new("configure-context-server", None)
+ .header(ModalHeader::new().headline("Add LLM Provider").description(
+ match self.provider {
+ LlmCompatibleProvider::OpenAi => {
+ "This provider will use an OpenAI compatible API."
+ }
+ },
+ ))
+ .when_some(self.last_error.clone(), |this, error| {
+ this.section(
+ Section::new().child(
+ Banner::new()
+ .severity(ui::Severity::Warning)
+ .child(div().text_xs().child(error)),
+ ),
+ )
+ })
+ .child(
+ v_flex()
+ .id("modal_content")
+ .max_h_128()
+ .overflow_y_scroll()
+ .gap_2()
+ .child(self.render_section())
+ .child(self.render_model_section(cx)),
+ )
+ .footer(
+ ModalFooter::new().end_slot(
+ h_flex()
+ .gap_1()
+ .child(
+ Button::new("cancel", "Cancel")
+ .key_binding(
+ KeyBinding::for_action_in(
+ &menu::Cancel,
+ &focus_handle,
+ window,
+ cx,
+ )
+ .map(|kb| kb.size(rems_from_px(12.))),
+ )
+ .on_click(cx.listener(|this, _event, window, cx| {
+ this.cancel(&menu::Cancel, window, cx)
+ })),
+ )
+ .child(
+ Button::new("save-server", "Save Provider")
+ .key_binding(
+ KeyBinding::for_action_in(
+ &menu::Confirm,
+ &focus_handle,
+ window,
+ cx,
+ )
+ .map(|kb| kb.size(rems_from_px(12.))),
+ )
+ .on_click(cx.listener(|this, _event, window, cx| {
+ this.confirm(&menu::Confirm, window, cx)
+ })),
+ ),
+ ),
+ ),
+ )
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use editor::EditorSettings;
+ use fs::FakeFs;
+ use gpui::{TestAppContext, VisualTestContext};
+ use language::language_settings;
+ use language_model::{
+ LanguageModelProviderId, LanguageModelProviderName,
+ fake_provider::FakeLanguageModelProvider,
+ };
+ use project::Project;
+ use settings::{Settings as _, SettingsStore};
+ use util::path;
+
+ #[gpui::test]
+ async fn test_save_provider_invalid_inputs(cx: &mut TestAppContext) {
+ let cx = setup_test(cx).await;
+
+ assert_eq!(
+ save_provider_validation_errors("", "someurl", "somekey", vec![], cx,).await,
+ Some("Provider Name cannot be empty".into())
+ );
+
+ assert_eq!(
+ save_provider_validation_errors("someprovider", "", "somekey", vec![], cx,).await,
+ Some("API URL cannot be empty".into())
+ );
+
+ assert_eq!(
+ save_provider_validation_errors("someprovider", "someurl", "", vec![], cx,).await,
+ Some("API Key cannot be empty".into())
+ );
+
+ assert_eq!(
+ save_provider_validation_errors(
+ "someprovider",
+ "someurl",
+ "somekey",
+ vec![("", "200000", "200000", "32000")],
+ cx,
+ )
+ .await,
+ Some("Model Name cannot be empty".into())
+ );
+
+ assert_eq!(
+ save_provider_validation_errors(
+ "someprovider",
+ "someurl",
+ "somekey",
+ vec![("somemodel", "abc", "200000", "32000")],
+ cx,
+ )
+ .await,
+ Some("Max Tokens must be a number".into())
+ );
+
+ assert_eq!(
+ save_provider_validation_errors(
+ "someprovider",
+ "someurl",
+ "somekey",
+ vec![("somemodel", "200000", "abc", "32000")],
+ cx,
+ )
+ .await,
+ Some("Max Completion Tokens must be a number".into())
+ );
+
+ assert_eq!(
+ save_provider_validation_errors(
+ "someprovider",
+ "someurl",
+ "somekey",
+ vec![("somemodel", "200000", "200000", "abc")],
+ cx,
+ )
+ .await,
+ Some("Max Output Tokens must be a number".into())
+ );
+
+ assert_eq!(
+ save_provider_validation_errors(
+ "someprovider",
+ "someurl",
+ "somekey",
+ vec![
+ ("somemodel", "200000", "200000", "32000"),
+ ("somemodel", "200000", "200000", "32000"),
+ ],
+ cx,
+ )
+ .await,
+ Some("Model Names must be unique".into())
+ );
+ }
+
+ #[gpui::test]
+ async fn test_save_provider_name_conflict(cx: &mut TestAppContext) {
+ let cx = setup_test(cx).await;
+
+ cx.update(|_window, cx| {
+ LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
+ registry.register_provider(
+ FakeLanguageModelProvider::new(
+ LanguageModelProviderId::new("someprovider"),
+ LanguageModelProviderName::new("Some Provider"),
+ ),
+ cx,
+ );
+ });
+ });
+
+ assert_eq!(
+ save_provider_validation_errors(
+ "someprovider",
+ "someurl",
+ "someapikey",
+ vec![("somemodel", "200000", "200000", "32000")],
+ cx,
+ )
+ .await,
+ Some("Provider Name is already taken by another provider".into())
+ );
+ }
+
+ async fn setup_test(cx: &mut TestAppContext) -> &mut VisualTestContext {
+ cx.update(|cx| {
+ let store = SettingsStore::test(cx);
+ cx.set_global(store);
+ workspace::init_settings(cx);
+ Project::init_settings(cx);
+ theme::init(theme::LoadThemes::JustBase, cx);
+ language_settings::init(cx);
+ EditorSettings::register(cx);
+ language_model::init_settings(cx);
+ language_models::init_settings(cx);
+ });
+
+ let fs = FakeFs::new(cx.executor());
+ cx.update(|cx| <dyn Fs>::set_global(fs.clone(), cx));
+ let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
+ let (_, cx) =
+ cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx));
+
+ cx
+ }
+
+ async fn save_provider_validation_errors(
+ provider_name: &str,
+ api_url: &str,
+ api_key: &str,
+ models: Vec<(&str, &str, &str, &str)>,
+ cx: &mut VisualTestContext,
+ ) -> Option<SharedString> {
+ fn set_text(
+ input: &Entity<SingleLineInput>,
+ text: &str,
+ window: &mut Window,
+ cx: &mut App,
+ ) {
+ input.update(cx, |input, cx| {
+ input.editor().update(cx, |editor, cx| {
+ editor.set_text(text, window, cx);
+ });
+ });
+ }
+
+ let task = cx.update(|window, cx| {
+ let mut input = AddLlmProviderInput::new(LlmCompatibleProvider::OpenAi, window, cx);
+ set_text(&input.provider_name, provider_name, window, cx);
+ set_text(&input.api_url, api_url, window, cx);
+ set_text(&input.api_key, api_key, window, cx);
+
+ for (i, (name, max_tokens, max_completion_tokens, max_output_tokens)) in
+ models.iter().enumerate()
+ {
+ if i >= input.models.len() {
+ input.models.push(ModelInput::new(window, cx));
+ }
+ let model = &mut input.models[i];
+ set_text(&model.name, name, window, cx);
+ set_text(&model.max_tokens, max_tokens, window, cx);
+ set_text(
+ &model.max_completion_tokens,
+ max_completion_tokens,
+ window,
+ cx,
+ );
+ set_text(&model.max_output_tokens, max_output_tokens, window, cx);
+ }
+ save_provider_to_settings(&input, cx)
+ });
+
+ task.await.err()
+ }
+}
@@ -1,5 +1,5 @@
use std::cell::RefCell;
-use std::ops::Range;
+use std::ops::{Not, Range};
use std::path::Path;
use std::rc::Rc;
use std::sync::Arc;
@@ -43,7 +43,7 @@ use anyhow::{Result, anyhow};
use assistant_context::{AssistantContext, ContextEvent, ContextSummary};
use assistant_slash_command::SlashCommandWorkingSet;
use assistant_tool::ToolWorkingSet;
-use client::{UserStore, zed_urls};
+use client::{DisableAiSettings, UserStore, zed_urls};
use editor::{Anchor, AnchorRangeExt as _, Editor, EditorEvent, MultiBuffer};
use feature_flags::{self, FeatureFlagAppExt};
use fs::Fs;
@@ -744,6 +744,7 @@ impl AgentPanel {
if workspace
.panel::<Self>(cx)
.is_some_and(|panel| panel.read(cx).enabled(cx))
+ && !DisableAiSettings::get_global(cx).disable_ai
{
workspace.toggle_panel_focus::<Self>(window, cx);
}
@@ -1681,7 +1682,7 @@ impl Panel for AgentPanel {
}
fn enabled(&self, cx: &App) -> bool {
- AgentSettings::get_global(cx).enabled
+ DisableAiSettings::get_global(cx).disable_ai.not() && AgentSettings::get_global(cx).enabled
}
fn is_zoomed(&self, _window: &Window, _cx: &App) -> bool {
@@ -31,7 +31,8 @@ use std::sync::Arc;
use agent::{Thread, ThreadId};
use agent_settings::{AgentProfileId, AgentSettings, LanguageModelSelection};
use assistant_slash_command::SlashCommandRegistry;
-use client::Client;
+use client::{Client, DisableAiSettings};
+use command_palette_hooks::CommandPaletteFilter;
use feature_flags::FeatureFlagAppExt as _;
use fs::Fs;
use gpui::{Action, App, Entity, actions};
@@ -43,6 +44,7 @@ use prompt_store::PromptBuilder;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use settings::{Settings as _, SettingsStore};
+use std::any::TypeId;
pub use crate::active_thread::ActiveThread;
use crate::agent_configuration::{ConfigureContextServerModal, ManageProfilesModal};
@@ -52,6 +54,7 @@ use crate::slash_command_settings::SlashCommandSettings;
pub use agent_diff::{AgentDiffPane, AgentDiffToolbar};
pub use text_thread_editor::{AgentPanelDelegate, TextThreadEditor};
pub use ui::preview::{all_agent_previews, get_agent_preview};
+use zed_actions;
actions!(
agent,
@@ -243,6 +246,66 @@ pub fn init(
})
.detach();
cx.observe_new(ManageProfilesModal::register).detach();
+
+ // Update command palette filter based on AI settings
+ update_command_palette_filter(cx);
+
+ // Watch for settings changes
+ cx.observe_global::<SettingsStore>(|app_cx| {
+ // When settings change, update the command palette filter
+ update_command_palette_filter(app_cx);
+ })
+ .detach();
+}
+
+fn update_command_palette_filter(cx: &mut App) {
+ let disable_ai = DisableAiSettings::get_global(cx).disable_ai;
+ CommandPaletteFilter::update_global(cx, |filter, _| {
+ if disable_ai {
+ filter.hide_namespace("agent");
+ filter.hide_namespace("assistant");
+ filter.hide_namespace("zed_predict_onboarding");
+ filter.hide_namespace("edit_prediction");
+
+ use editor::actions::{
+ AcceptEditPrediction, AcceptPartialEditPrediction, NextEditPrediction,
+ PreviousEditPrediction, ShowEditPrediction, ToggleEditPrediction,
+ };
+ let edit_prediction_actions = [
+ TypeId::of::<AcceptEditPrediction>(),
+ TypeId::of::<AcceptPartialEditPrediction>(),
+ TypeId::of::<ShowEditPrediction>(),
+ TypeId::of::<NextEditPrediction>(),
+ TypeId::of::<PreviousEditPrediction>(),
+ TypeId::of::<ToggleEditPrediction>(),
+ ];
+ filter.hide_action_types(&edit_prediction_actions);
+ filter.hide_action_types(&[TypeId::of::<zed_actions::OpenZedPredictOnboarding>()]);
+ } else {
+ filter.show_namespace("agent");
+ filter.show_namespace("assistant");
+ filter.show_namespace("zed_predict_onboarding");
+
+ filter.show_namespace("edit_prediction");
+
+ use editor::actions::{
+ AcceptEditPrediction, AcceptPartialEditPrediction, NextEditPrediction,
+ PreviousEditPrediction, ShowEditPrediction, ToggleEditPrediction,
+ };
+ let edit_prediction_actions = [
+ TypeId::of::<AcceptEditPrediction>(),
+ TypeId::of::<AcceptPartialEditPrediction>(),
+ TypeId::of::<ShowEditPrediction>(),
+ TypeId::of::<NextEditPrediction>(),
+ TypeId::of::<PreviousEditPrediction>(),
+ TypeId::of::<ToggleEditPrediction>(),
+ ];
+ filter.show_action_types(edit_prediction_actions.iter());
+
+ filter
+ .show_action_types([TypeId::of::<zed_actions::OpenZedPredictOnboarding>()].iter());
+ }
+ });
}
fn init_language_model_settings(cx: &mut App) {
@@ -16,7 +16,7 @@ use agent::{
};
use agent_settings::AgentSettings;
use anyhow::{Context as _, Result};
-use client::telemetry::Telemetry;
+use client::{DisableAiSettings, telemetry::Telemetry};
use collections::{HashMap, HashSet, VecDeque, hash_map};
use editor::SelectionEffects;
use editor::{
@@ -57,6 +57,17 @@ pub fn init(
cx: &mut App,
) {
cx.set_global(InlineAssistant::new(fs, prompt_builder, telemetry));
+
+ cx.observe_global::<SettingsStore>(|cx| {
+ if DisableAiSettings::get_global(cx).disable_ai {
+ // Hide any active inline assist UI when AI is disabled
+ InlineAssistant::update_global(cx, |assistant, cx| {
+ assistant.cancel_all_active_completions(cx);
+ });
+ }
+ })
+ .detach();
+
cx.observe_new(|_workspace: &mut Workspace, window, cx| {
let Some(window) = window else {
return;
@@ -141,6 +152,26 @@ impl InlineAssistant {
.detach();
}
+ /// Hides all active inline assists when AI is disabled
+ pub fn cancel_all_active_completions(&mut self, cx: &mut App) {
+ // Cancel all active completions in editors
+ for (editor_handle, _) in self.assists_by_editor.iter() {
+ if let Some(editor) = editor_handle.upgrade() {
+ let windows = cx.windows();
+ if !windows.is_empty() {
+ let window = windows[0];
+ let _ = window.update(cx, |_, window, cx| {
+ editor.update(cx, |editor, cx| {
+ if editor.has_active_inline_completion() {
+ editor.cancel(&Default::default(), window, cx);
+ }
+ });
+ });
+ }
+ }
+ }
+ }
+
fn handle_workspace_event(
&mut self,
workspace: Entity<Workspace>,
@@ -176,7 +207,7 @@ impl InlineAssistant {
window: &mut Window,
cx: &mut App,
) {
- let is_assistant2_enabled = true;
+ let is_assistant2_enabled = !DisableAiSettings::get_global(cx).disable_ai;
if let Some(editor) = item.act_as::<Editor>(cx) {
editor.update(cx, |editor, cx| {
@@ -199,6 +230,13 @@ impl InlineAssistant {
cx,
);
+ if DisableAiSettings::get_global(cx).disable_ai {
+ // Cancel any active completions
+ if editor.has_active_inline_completion() {
+ editor.cancel(&Default::default(), window, cx);
+ }
+ }
+
// Remove the Assistant1 code action provider, as it still might be registered.
editor.remove_code_action_provider("assistant".into(), window, cx);
} else {
@@ -219,7 +257,7 @@ impl InlineAssistant {
cx: &mut Context<Workspace>,
) {
let settings = AgentSettings::get_global(cx);
- if !settings.enabled {
+ if !settings.enabled || DisableAiSettings::get_global(cx).disable_ai {
return;
}
@@ -38,10 +38,6 @@ impl ApiKeysWithProviders {
.map(|provider| (provider.icon(), provider.name().0.clone()))
.collect()
}
-
- pub fn has_providers(&self) -> bool {
- !self.configured_providers.is_empty()
- }
}
impl Render for ApiKeysWithProviders {
@@ -53,11 +49,10 @@ impl Render for ApiKeysWithProviders {
.map(|(icon, name)| {
h_flex()
.gap_1p5()
- .child(Icon::new(icon).size(IconSize::Small).color(Color::Muted))
+ .child(Icon::new(icon).size(IconSize::XSmall).color(Color::Muted))
.child(Label::new(name))
});
-
- h_flex()
+ div()
.mx_2p5()
.p_1()
.pb_0()
@@ -85,8 +80,24 @@ impl Render for ApiKeysWithProviders {
.border_x_1()
.border_color(cx.theme().colors().border)
.bg(cx.theme().colors().panel_background)
- .child(Icon::new(IconName::Info).size(IconSize::XSmall).color(Color::Muted))
- .child(Label::new("Or start now using API keys from your environment for the following providers:").color(Color::Muted))
+ .child(
+ h_flex()
+ .min_w_0()
+ .gap_2()
+ .child(
+ Icon::new(IconName::Info)
+ .size(IconSize::XSmall)
+ .color(Color::Muted)
+ )
+ .child(
+ div()
+ .w_full()
+ .child(
+ Label::new("Start now using API keys from your environment for the following providers:")
+ .color(Color::Muted)
+ )
+ )
+ )
.children(configured_providers_list)
)
}
@@ -118,7 +129,7 @@ impl RenderOnce for ApiKeysWithoutProviders {
.child(Divider::horizontal()),
)
.child(List::new().child(BulletItem::new(
- "You can also use AI in Zed by bringing your own API keys",
+ "Add your own keys to use AI without signing in.",
)))
.child(
Button::new("configure-providers", "Configure Providers")
@@ -141,22 +141,18 @@ impl ZedAiOnboarding {
)
.child(
List::new()
+ .child(BulletItem::new("50 prompts per month with Claude models"))
.child(BulletItem::new(
- "50 prompts per month with the Claude models",
- ))
- .child(BulletItem::new(
- "2000 accepted edit predictions using our open-source Zeta model",
+ "2,000 accepted edit predictions with Zeta, our open-source model",
)),
)
}
fn pro_trial_definition(&self) -> impl IntoElement {
List::new()
+ .child(BulletItem::new("150 prompts with Claude models"))
.child(BulletItem::new(
- "150 prompts per month with the Claude models",
- ))
- .child(BulletItem::new(
- "Unlimited accepted edit predictions using our open-source Zeta model",
+ "Unlimited accepted edit predictions with Zeta, our open-source model",
))
}
@@ -178,12 +174,12 @@ impl ZedAiOnboarding {
List::new()
.child(BulletItem::new("500 prompts per month with Claude models"))
.child(BulletItem::new(
- "Unlimited accepted edit predictions using our open-source Zeta model",
+ "Unlimited accepted edit predictions with Zeta, our open-source model",
))
- .child(BulletItem::new("USD $20 per month")),
+ .child(BulletItem::new("$20 USD per month")),
)
.child(
- Button::new("pro", "Start with Pro")
+ Button::new("pro", "Get Started")
.full_width()
.style(ButtonStyle::Tinted(ui::TintColor::Accent))
.on_click(move |_, _window, cx| {
@@ -206,11 +202,11 @@ impl ZedAiOnboarding {
List::new()
.child(self.pro_trial_definition())
.child(BulletItem::new(
- "Try it out for 14 days with no charge and no credit card required",
+ "Try it out for 14 days for free, no credit card required",
)),
)
.child(
- Button::new("pro", "Start Pro Trial")
+ Button::new("pro", "Start Free Trial")
.full_width()
.style(ButtonStyle::Tinted(ui::TintColor::Accent))
.on_click(move |_, _window, cx| {
@@ -225,14 +221,14 @@ impl ZedAiOnboarding {
v_flex()
.gap_1()
.w_full()
- .child(Headline::new("Before starting…"))
+ .child(Headline::new("Accept Terms of Service"))
.child(
- Label::new("Make sure you have read and accepted Zed AI's terms of service.")
+ Label::new("We don’t sell your data, track you across the web, or compromise your privacy.")
.color(Color::Muted)
.mb_2(),
)
.child(
- Button::new("terms_of_service", "View and Read the Terms of Service")
+ Button::new("terms_of_service", "Review Terms of Service")
.full_width()
.style(ButtonStyle::Outlined)
.icon(IconName::ArrowUpRight)
@@ -241,7 +237,7 @@ impl ZedAiOnboarding {
.on_click(move |_, _window, cx| cx.open_url(&zed_urls::terms_of_service(cx))),
)
.child(
- Button::new("accept_terms", "I've read it and accept it")
+ Button::new("accept_terms", "Accept")
.full_width()
.style(ButtonStyle::Tinted(TintColor::Accent))
.on_click({
@@ -259,13 +255,13 @@ impl ZedAiOnboarding {
.gap_1()
.child(Headline::new("Welcome to Zed AI"))
.child(
- Label::new("Sign in to start using AI in Zed with a free trial of the Pro plan, which includes:")
+ Label::new("Sign in to try Zed Pro for 14 days, no credit card required.")
.color(Color::Muted)
.mb_2(),
)
.child(self.pro_trial_definition())
.child(
- Button::new("sign_in", "Sign in to Start Trial")
+ Button::new("sign_in", "Try Zed Pro for Free")
.disabled(signing_in)
.full_width()
.style(ButtonStyle::Tinted(ui::TintColor::Accent))
@@ -284,11 +280,6 @@ impl ZedAiOnboarding {
.relative()
.gap_1()
.child(Headline::new("Welcome to Zed AI"))
- .child(
- Label::new("Choose how you want to start.")
- .color(Color::Muted)
- .mb_2(),
- )
.map(|this| {
if self.account_too_young {
this.child(young_account_banner)
@@ -318,7 +309,7 @@ impl ZedAiOnboarding {
v_flex()
.relative()
.gap_1()
- .child(Headline::new("Welcome to the Zed Pro free trial"))
+ .child(Headline::new("Welcome to the Zed Pro Trial"))
.child(
Label::new("Here's what you get for the next 14 days:")
.color(Color::Muted)
@@ -6,7 +6,7 @@ pub struct YoungAccountBanner;
impl RenderOnce for YoungAccountBanner {
fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement {
- const YOUNG_ACCOUNT_DISCLAIMER: &str = "To prevent abuse of our service, we cannot offer plans to GitHub accounts created fewer than 30 days ago. To request an exception, reach out to billing@zed.dev.";
+ const YOUNG_ACCOUNT_DISCLAIMER: &str = "To prevent abuse of our service, we cannot offer plans to GitHub accounts created fewer than 30 days ago. To request an exception, reach out to billing-support@zed.dev.";
let label = div()
.w_full()
@@ -1323,7 +1323,7 @@ fn setup_context_editor_with_fake_model(
) -> (Entity<AssistantContext>, Arc<FakeLanguageModel>) {
let registry = Arc::new(LanguageRegistry::test(cx.executor().clone()));
- let fake_provider = Arc::new(FakeLanguageModelProvider);
+ let fake_provider = Arc::new(FakeLanguageModelProvider::default());
let fake_model = Arc::new(fake_provider.test_model());
cx.update(|cx| {
@@ -51,23 +51,13 @@ impl ActionLog {
Some(self.tracked_buffers.get(buffer)?.snapshot.clone())
}
- pub fn has_unnotified_user_edits(&self) -> bool {
- self.tracked_buffers
- .values()
- .any(|tracked| tracked.has_unnotified_user_edits)
- }
-
/// Return a unified diff patch with user edits made since last read or notification
pub fn unnotified_user_edits(&self, cx: &Context<Self>) -> Option<String> {
- if !self.has_unnotified_user_edits() {
- return None;
- }
-
- let unified_diff = self
+ let diffs = self
.tracked_buffers
.values()
.filter_map(|tracked| {
- if !tracked.has_unnotified_user_edits {
+ if !tracked.may_have_unnotified_user_edits {
return None;
}
@@ -95,9 +85,13 @@ impl ActionLog {
Some(result)
})
- .collect::<Vec<_>>()
- .join("\n\n");
+ .collect::<Vec<_>>();
+
+ if diffs.is_empty() {
+ return None;
+ }
+ let unified_diff = diffs.join("\n\n");
Some(unified_diff)
}
@@ -106,7 +100,7 @@ impl ActionLog {
pub fn flush_unnotified_user_edits(&mut self, cx: &Context<Self>) -> Option<String> {
let patch = self.unnotified_user_edits(cx);
self.tracked_buffers.values_mut().for_each(|tracked| {
- tracked.has_unnotified_user_edits = false;
+ tracked.may_have_unnotified_user_edits = false;
tracked.last_seen_base = tracked.diff_base.clone();
});
patch
@@ -185,7 +179,7 @@ impl ActionLog {
version: buffer.read(cx).version(),
diff,
diff_update: diff_update_tx,
- has_unnotified_user_edits: false,
+ may_have_unnotified_user_edits: false,
_open_lsp_handle: open_lsp_handle,
_maintain_diff: cx.spawn({
let buffer = buffer.clone();
@@ -337,27 +331,34 @@ impl ActionLog {
let new_snapshot = buffer_snapshot.clone();
let unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
let edits = diff_snapshots(&old_snapshot, &new_snapshot);
- if let ChangeAuthor::User = author
- && !edits.is_empty()
- {
- tracked_buffer.has_unnotified_user_edits = true;
- }
+ let mut has_user_changes = false;
async move {
if let ChangeAuthor::User = author {
- apply_non_conflicting_edits(
+ has_user_changes = apply_non_conflicting_edits(
&unreviewed_edits,
edits,
&mut base_text,
new_snapshot.as_rope(),
);
}
- (Arc::new(base_text.to_string()), base_text)
+
+ (Arc::new(base_text.to_string()), base_text, has_user_changes)
}
});
anyhow::Ok(rebase)
})??;
- let (new_base_text, new_diff_base) = rebase.await;
+ let (new_base_text, new_diff_base, has_user_changes) = rebase.await;
+
+ this.update(cx, |this, _| {
+ let tracked_buffer = this
+ .tracked_buffers
+ .get_mut(buffer)
+ .context("buffer not tracked")
+ .unwrap();
+ tracked_buffer.may_have_unnotified_user_edits |= has_user_changes;
+ })?;
+
Self::update_diff(
this,
buffer,
@@ -829,11 +830,12 @@ fn apply_non_conflicting_edits(
edits: Vec<Edit<u32>>,
old_text: &mut Rope,
new_text: &Rope,
-) {
+) -> bool {
let mut old_edits = patch.edits().iter().cloned().peekable();
let mut new_edits = edits.into_iter().peekable();
let mut applied_delta = 0i32;
let mut rebased_delta = 0i32;
+ let mut has_made_changes = false;
while let Some(mut new_edit) = new_edits.next() {
let mut conflict = false;
@@ -883,8 +885,10 @@ fn apply_non_conflicting_edits(
&new_text.chunks_in_range(new_bytes).collect::<String>(),
);
applied_delta += new_edit.new_len() as i32 - new_edit.old_len() as i32;
+ has_made_changes = true;
}
}
+ has_made_changes
}
fn diff_snapshots(
@@ -958,7 +962,7 @@ struct TrackedBuffer {
diff: Entity<BufferDiff>,
snapshot: text::BufferSnapshot,
diff_update: mpsc::UnboundedSender<(ChangeAuthor, text::BufferSnapshot)>,
- has_unnotified_user_edits: bool,
+ may_have_unnotified_user_edits: bool,
_open_lsp_handle: OpenLspBufferHandle,
_maintain_diff: Task<()>,
_subscription: Subscription,
@@ -20,6 +20,7 @@ anyhow.workspace = true
assistant_tool.workspace = true
buffer_diff.workspace = true
chrono.workspace = true
+client.workspace = true
collections.workspace = true
component.workspace = true
derive_more.workspace = true
@@ -20,14 +20,13 @@ mod thinking_tool;
mod ui;
mod web_search_tool;
-use std::sync::Arc;
-
use assistant_tool::ToolRegistry;
use copy_path_tool::CopyPathTool;
use gpui::{App, Entity};
use http_client::HttpClientWithUrl;
use language_model::LanguageModelRegistry;
use move_path_tool::MovePathTool;
+use std::sync::Arc;
use web_search_tool::WebSearchTool;
pub(crate) use templates::*;
@@ -278,6 +278,9 @@ impl Tool for EditFileTool {
.unwrap_or(false);
if format_on_save_enabled {
+ action_log.update(cx, |log, cx| {
+ log.buffer_edited(buffer.clone(), cx);
+ })?;
let format_task = project.update(cx, |project, cx| {
project.format(
HashSet::from_iter([buffer.clone()]),
@@ -200,7 +200,7 @@ mod tests {
// Run the tool before any changes
let tool = Arc::new(ProjectNotificationsTool);
- let provider = Arc::new(FakeLanguageModelProvider);
+ let provider = Arc::new(FakeLanguageModelProvider::default());
let model: Arc<dyn LanguageModel> = Arc::new(provider.test_model());
let request = Arc::new(LanguageModelRequest::default());
let tool_input = json!({});
@@ -17,7 +17,5 @@ default = []
[dependencies]
aws-smithy-runtime-api.workspace = true
aws-smithy-types.workspace = true
-futures.workspace = true
http_client.workspace = true
-tokio = { workspace = true, features = ["rt", "rt-multi-thread"] }
workspace-hack.workspace = true
@@ -11,14 +11,11 @@ use aws_smithy_runtime_api::client::result::ConnectorError;
use aws_smithy_runtime_api::client::runtime_components::RuntimeComponents;
use aws_smithy_runtime_api::http::{Headers, StatusCode};
use aws_smithy_types::body::SdkBody;
-use futures::AsyncReadExt;
-use http_client::{AsyncBody, Inner};
+use http_client::AsyncBody;
use http_client::{HttpClient, Request};
-use tokio::runtime::Handle;
struct AwsHttpConnector {
client: Arc<dyn HttpClient>,
- handle: Handle,
}
impl std::fmt::Debug for AwsHttpConnector {
@@ -42,18 +39,17 @@ impl AwsConnector for AwsHttpConnector {
.client
.send(Request::from_parts(parts, convert_to_async_body(body)));
- let handle = self.handle.clone();
-
HttpConnectorFuture::new(async move {
let response = match response.await {
Ok(response) => response,
Err(err) => return Err(ConnectorError::other(err.into(), None)),
};
let (parts, body) = response.into_parts();
- let body = convert_to_sdk_body(body, handle).await;
- let mut response =
- HttpResponse::new(StatusCode::try_from(parts.status.as_u16()).unwrap(), body);
+ let mut response = HttpResponse::new(
+ StatusCode::try_from(parts.status.as_u16()).unwrap(),
+ convert_to_sdk_body(body),
+ );
let headers = match Headers::try_from(parts.headers) {
Ok(headers) => headers,
@@ -70,7 +66,6 @@ impl AwsConnector for AwsHttpConnector {
#[derive(Clone)]
pub struct AwsHttpClient {
client: Arc<dyn HttpClient>,
- handler: Handle,
}
impl std::fmt::Debug for AwsHttpClient {
@@ -80,11 +75,8 @@ impl std::fmt::Debug for AwsHttpClient {
}
impl AwsHttpClient {
- pub fn new(client: Arc<dyn HttpClient>, handle: Handle) -> Self {
- Self {
- client,
- handler: handle,
- }
+ pub fn new(client: Arc<dyn HttpClient>) -> Self {
+ Self { client }
}
}
@@ -96,25 +88,12 @@ impl AwsClient for AwsHttpClient {
) -> SharedHttpConnector {
SharedHttpConnector::new(AwsHttpConnector {
client: self.client.clone(),
- handle: self.handler.clone(),
})
}
}
-pub async fn convert_to_sdk_body(body: AsyncBody, handle: Handle) -> SdkBody {
- match body.0 {
- Inner::Empty => SdkBody::empty(),
- Inner::Bytes(bytes) => SdkBody::from(bytes.into_inner()),
- Inner::AsyncReader(mut reader) => {
- let buffer = handle.spawn(async move {
- let mut buffer = Vec::new();
- let _ = reader.read_to_end(&mut buffer).await;
- buffer
- });
-
- SdkBody::from(buffer.await.unwrap_or_default())
- }
- }
+pub fn convert_to_sdk_body(body: AsyncBody) -> SdkBody {
+ SdkBody::from_body_1_x(body)
}
pub fn convert_to_async_body(body: SdkBody) -> AsyncBody {
@@ -343,8 +343,7 @@ impl BufferDiffInner {
..
} in hunks.iter().cloned()
{
- let preceding_pending_hunks =
- old_pending_hunks.slice(&buffer_range.start, Bias::Left, buffer);
+ let preceding_pending_hunks = old_pending_hunks.slice(&buffer_range.start, Bias::Left);
pending_hunks.append(preceding_pending_hunks, buffer);
// Skip all overlapping or adjacent old pending hunks
@@ -355,7 +354,7 @@ impl BufferDiffInner {
.cmp(&buffer_range.end, buffer)
.is_le()
}) {
- old_pending_hunks.next(buffer);
+ old_pending_hunks.next();
}
if (stage && secondary_status == DiffHunkSecondaryStatus::NoSecondaryHunk)
@@ -379,10 +378,10 @@ impl BufferDiffInner {
);
}
// append the remainder
- pending_hunks.append(old_pending_hunks.suffix(buffer), buffer);
+ pending_hunks.append(old_pending_hunks.suffix(), buffer);
let mut unstaged_hunk_cursor = unstaged_diff.hunks.cursor::<DiffHunkSummary>(buffer);
- unstaged_hunk_cursor.next(buffer);
+ unstaged_hunk_cursor.next();
// then, iterate over all pending hunks (both new ones and the existing ones) and compute the edits
let mut prev_unstaged_hunk_buffer_end = 0;
@@ -397,8 +396,7 @@ impl BufferDiffInner {
}) = pending_hunks_iter.next()
{
// Advance unstaged_hunk_cursor to skip unstaged hunks before current hunk
- let skipped_unstaged =
- unstaged_hunk_cursor.slice(&buffer_range.start, Bias::Left, buffer);
+ let skipped_unstaged = unstaged_hunk_cursor.slice(&buffer_range.start, Bias::Left);
if let Some(unstaged_hunk) = skipped_unstaged.last() {
prev_unstaged_hunk_base_text_end = unstaged_hunk.diff_base_byte_range.end;
@@ -425,7 +423,7 @@ impl BufferDiffInner {
buffer_offset_range.end =
buffer_offset_range.end.max(unstaged_hunk_offset_range.end);
- unstaged_hunk_cursor.next(buffer);
+ unstaged_hunk_cursor.next();
continue;
}
}
@@ -514,7 +512,7 @@ impl BufferDiffInner {
});
let anchor_iter = iter::from_fn(move || {
- cursor.next(buffer);
+ cursor.next();
cursor.item()
})
.flat_map(move |hunk| {
@@ -531,12 +529,12 @@ impl BufferDiffInner {
});
let mut pending_hunks_cursor = self.pending_hunks.cursor::<DiffHunkSummary>(buffer);
- pending_hunks_cursor.next(buffer);
+ pending_hunks_cursor.next();
let mut secondary_cursor = None;
if let Some(secondary) = secondary.as_ref() {
let mut cursor = secondary.hunks.cursor::<DiffHunkSummary>(buffer);
- cursor.next(buffer);
+ cursor.next();
secondary_cursor = Some(cursor);
}
@@ -564,7 +562,7 @@ impl BufferDiffInner {
.cmp(&pending_hunks_cursor.start().buffer_range.start, buffer)
.is_gt()
{
- pending_hunks_cursor.seek_forward(&start_anchor, Bias::Left, buffer);
+ pending_hunks_cursor.seek_forward(&start_anchor, Bias::Left);
}
if let Some(pending_hunk) = pending_hunks_cursor.item() {
@@ -590,7 +588,7 @@ impl BufferDiffInner {
.cmp(&secondary_cursor.start().buffer_range.start, buffer)
.is_gt()
{
- secondary_cursor.seek_forward(&start_anchor, Bias::Left, buffer);
+ secondary_cursor.seek_forward(&start_anchor, Bias::Left);
}
if let Some(secondary_hunk) = secondary_cursor.item() {
@@ -635,7 +633,7 @@ impl BufferDiffInner {
});
iter::from_fn(move || {
- cursor.prev(buffer);
+ cursor.prev();
let hunk = cursor.item()?;
let range = hunk.buffer_range.to_point(buffer);
@@ -653,8 +651,8 @@ impl BufferDiffInner {
fn compare(&self, old: &Self, new_snapshot: &text::BufferSnapshot) -> Option<Range<Anchor>> {
let mut new_cursor = self.hunks.cursor::<()>(new_snapshot);
let mut old_cursor = old.hunks.cursor::<()>(new_snapshot);
- old_cursor.next(new_snapshot);
- new_cursor.next(new_snapshot);
+ old_cursor.next();
+ new_cursor.next();
let mut start = None;
let mut end = None;
@@ -669,7 +667,7 @@ impl BufferDiffInner {
Ordering::Less => {
start.get_or_insert(new_hunk.buffer_range.start);
end.replace(new_hunk.buffer_range.end);
- new_cursor.next(new_snapshot);
+ new_cursor.next();
}
Ordering::Equal => {
if new_hunk != old_hunk {
@@ -686,25 +684,25 @@ impl BufferDiffInner {
}
}
- new_cursor.next(new_snapshot);
- old_cursor.next(new_snapshot);
+ new_cursor.next();
+ old_cursor.next();
}
Ordering::Greater => {
start.get_or_insert(old_hunk.buffer_range.start);
end.replace(old_hunk.buffer_range.end);
- old_cursor.next(new_snapshot);
+ old_cursor.next();
}
}
}
(Some(new_hunk), None) => {
start.get_or_insert(new_hunk.buffer_range.start);
end.replace(new_hunk.buffer_range.end);
- new_cursor.next(new_snapshot);
+ new_cursor.next();
}
(None, Some(old_hunk)) => {
start.get_or_insert(old_hunk.buffer_range.start);
end.replace(old_hunk.buffer_range.end);
- old_cursor.next(new_snapshot);
+ old_cursor.next();
}
(None, None) => break,
}
@@ -333,7 +333,7 @@ impl ChannelChat {
if first_id <= message_id {
let mut cursor = chat.messages.cursor::<(ChannelMessageId, Count)>(&());
let message_id = ChannelMessageId::Saved(message_id);
- cursor.seek(&message_id, Bias::Left, &());
+ cursor.seek(&message_id, Bias::Left);
return ControlFlow::Break(
if cursor
.item()
@@ -499,7 +499,7 @@ impl ChannelChat {
pub fn message(&self, ix: usize) -> &ChannelMessage {
let mut cursor = self.messages.cursor::<Count>(&());
- cursor.seek(&Count(ix), Bias::Right, &());
+ cursor.seek(&Count(ix), Bias::Right);
cursor.item().unwrap()
}
@@ -516,13 +516,13 @@ impl ChannelChat {
pub fn messages_in_range(&self, range: Range<usize>) -> impl Iterator<Item = &ChannelMessage> {
let mut cursor = self.messages.cursor::<Count>(&());
- cursor.seek(&Count(range.start), Bias::Right, &());
+ cursor.seek(&Count(range.start), Bias::Right);
cursor.take(range.len())
}
pub fn pending_messages(&self) -> impl Iterator<Item = &ChannelMessage> {
let mut cursor = self.messages.cursor::<ChannelMessageId>(&());
- cursor.seek(&ChannelMessageId::Pending(0), Bias::Left, &());
+ cursor.seek(&ChannelMessageId::Pending(0), Bias::Left);
cursor
}
@@ -588,9 +588,9 @@ impl ChannelChat {
.collect::<HashSet<_>>();
let mut old_cursor = self.messages.cursor::<(ChannelMessageId, Count)>(&());
- let mut new_messages = old_cursor.slice(&first_message.id, Bias::Left, &());
+ let mut new_messages = old_cursor.slice(&first_message.id, Bias::Left);
let start_ix = old_cursor.start().1.0;
- let removed_messages = old_cursor.slice(&last_message.id, Bias::Right, &());
+ let removed_messages = old_cursor.slice(&last_message.id, Bias::Right);
let removed_count = removed_messages.summary().count;
let new_count = messages.summary().count;
let end_ix = start_ix + removed_count;
@@ -599,10 +599,10 @@ impl ChannelChat {
let mut ranges = Vec::<Range<usize>>::new();
if new_messages.last().unwrap().is_pending() {
- new_messages.append(old_cursor.suffix(&()), &());
+ new_messages.append(old_cursor.suffix(), &());
} else {
new_messages.append(
- old_cursor.slice(&ChannelMessageId::Pending(0), Bias::Left, &()),
+ old_cursor.slice(&ChannelMessageId::Pending(0), Bias::Left),
&(),
);
@@ -617,7 +617,7 @@ impl ChannelChat {
} else {
new_messages.push(message.clone(), &());
}
- old_cursor.next(&());
+ old_cursor.next();
}
}
@@ -641,12 +641,12 @@ impl ChannelChat {
fn message_removed(&mut self, id: u64, cx: &mut Context<Self>) {
let mut cursor = self.messages.cursor::<ChannelMessageId>(&());
- let mut messages = cursor.slice(&ChannelMessageId::Saved(id), Bias::Left, &());
+ let mut messages = cursor.slice(&ChannelMessageId::Saved(id), Bias::Left);
if let Some(item) = cursor.item() {
if item.id == ChannelMessageId::Saved(id) {
let deleted_message_ix = messages.summary().count;
- cursor.next(&());
- messages.append(cursor.suffix(&()), &());
+ cursor.next();
+ messages.append(cursor.suffix(), &());
drop(cursor);
self.messages = messages;
@@ -680,7 +680,7 @@ impl ChannelChat {
cx: &mut Context<Self>,
) {
let mut cursor = self.messages.cursor::<ChannelMessageId>(&());
- let mut messages = cursor.slice(&id, Bias::Left, &());
+ let mut messages = cursor.slice(&id, Bias::Left);
let ix = messages.summary().count;
if let Some(mut message_to_update) = cursor.item().cloned() {
@@ -688,10 +688,10 @@ impl ChannelChat {
message_to_update.mentions = mentions;
message_to_update.edited_at = edited_at;
messages.push(message_to_update, &());
- cursor.next(&());
+ cursor.next();
}
- messages.append(cursor.suffix(&()), &());
+ messages.append(cursor.suffix(), &());
drop(cursor);
self.messages = messages;
@@ -151,6 +151,7 @@ impl Settings for ProxySettings {
pub fn init_settings(cx: &mut App) {
TelemetrySettings::register(cx);
+ DisableAiSettings::register(cx);
ClientSettings::register(cx);
ProxySettings::register(cx);
}
@@ -548,6 +549,33 @@ impl settings::Settings for TelemetrySettings {
}
}
+/// Whether to disable all AI features in Zed.
+///
+/// Default: false
+#[derive(Copy, Clone, Debug)]
+pub struct DisableAiSettings {
+ pub disable_ai: bool,
+}
+
+impl settings::Settings for DisableAiSettings {
+ const KEY: Option<&'static str> = Some("disable_ai");
+
+ type FileContent = Option<bool>;
+
+ fn load(sources: SettingsSources<Self::FileContent>, _: &mut App) -> Result<Self> {
+ Ok(Self {
+ disable_ai: sources
+ .user
+ .or(sources.server)
+ .copied()
+ .flatten()
+ .unwrap_or(sources.default.ok_or_else(Self::missing_default)?),
+ })
+ }
+
+ fn import_from_vscode(_vscode: &settings::VsCodeSettings, _current: &mut Self::FileContent) {}
+}
+
impl Client {
pub fn new(
clock: Arc<dyn SystemClock>,
@@ -25,7 +25,7 @@ use crate::llm::db::subscription_usage_meter::{self, CompletionMode};
use crate::rpc::{ResultExt as _, Server};
use crate::stripe_client::{
StripeCancellationDetailsReason, StripeClient, StripeCustomerId, StripeSubscription,
- StripeSubscriptionId, UpdateCustomerParams,
+ StripeSubscriptionId,
};
use crate::{AppState, Error, Result};
use crate::{db::UserId, llm::db::LlmDatabase};
@@ -40,7 +40,6 @@ use crate::{
pub fn router() -> Router {
Router::new()
- .route("/billing/subscriptions", post(create_billing_subscription))
.route(
"/billing/subscriptions/manage",
post(manage_billing_subscription),
@@ -51,122 +50,6 @@ pub fn router() -> Router {
)
}
-#[derive(Debug, PartialEq, Clone, Copy, Deserialize)]
-#[serde(rename_all = "snake_case")]
-enum ProductCode {
- ZedPro,
- ZedProTrial,
-}
-
-#[derive(Debug, Deserialize)]
-struct CreateBillingSubscriptionBody {
- github_user_id: i32,
- product: ProductCode,
-}
-
-#[derive(Debug, Serialize)]
-struct CreateBillingSubscriptionResponse {
- checkout_session_url: String,
-}
-
-/// Initiates a Stripe Checkout session for creating a billing subscription.
-async fn create_billing_subscription(
- Extension(app): Extension<Arc<AppState>>,
- extract::Json(body): extract::Json<CreateBillingSubscriptionBody>,
-) -> Result<Json<CreateBillingSubscriptionResponse>> {
- let user = app
- .db
- .get_user_by_github_user_id(body.github_user_id)
- .await?
- .context("user not found")?;
-
- let Some(stripe_billing) = app.stripe_billing.clone() else {
- log::error!("failed to retrieve Stripe billing object");
- Err(Error::http(
- StatusCode::NOT_IMPLEMENTED,
- "not supported".into(),
- ))?
- };
-
- if let Some(existing_subscription) = app.db.get_active_billing_subscription(user.id).await? {
- let is_checkout_allowed = body.product == ProductCode::ZedProTrial
- && existing_subscription.kind == Some(SubscriptionKind::ZedFree);
-
- if !is_checkout_allowed {
- return Err(Error::http(
- StatusCode::CONFLICT,
- "user already has an active subscription".into(),
- ));
- }
- }
-
- let existing_billing_customer = app.db.get_billing_customer_by_user_id(user.id).await?;
- if let Some(existing_billing_customer) = &existing_billing_customer {
- if existing_billing_customer.has_overdue_invoices {
- return Err(Error::http(
- StatusCode::PAYMENT_REQUIRED,
- "user has overdue invoices".into(),
- ));
- }
- }
-
- let customer_id = if let Some(existing_customer) = &existing_billing_customer {
- let customer_id = StripeCustomerId(existing_customer.stripe_customer_id.clone().into());
- if let Some(email) = user.email_address.as_deref() {
- stripe_billing
- .client()
- .update_customer(&customer_id, UpdateCustomerParams { email: Some(email) })
- .await
- // Update of email address is best-effort - continue checkout even if it fails
- .context("error updating stripe customer email address")
- .log_err();
- }
- customer_id
- } else {
- stripe_billing
- .find_or_create_customer_by_email(user.email_address.as_deref())
- .await?
- };
-
- let success_url = format!(
- "{}/account?checkout_complete=1",
- app.config.zed_dot_dev_url()
- );
-
- let checkout_session_url = match body.product {
- ProductCode::ZedPro => {
- stripe_billing
- .checkout_with_zed_pro(&customer_id, &user.github_login, &success_url)
- .await?
- }
- ProductCode::ZedProTrial => {
- if let Some(existing_billing_customer) = &existing_billing_customer {
- if existing_billing_customer.trial_started_at.is_some() {
- return Err(Error::http(
- StatusCode::FORBIDDEN,
- "user already used free trial".into(),
- ));
- }
- }
-
- let feature_flags = app.db.get_user_flags(user.id).await?;
-
- stripe_billing
- .checkout_with_zed_pro_trial(
- &customer_id,
- &user.github_login,
- feature_flags,
- &success_url,
- )
- .await?
- }
- };
-
- Ok(Json(CreateBillingSubscriptionResponse {
- checkout_session_url,
- }))
-}
-
#[derive(Debug, PartialEq, Deserialize)]
#[serde(rename_all = "snake_case")]
enum ManageSubscriptionIntent {
@@ -1,6 +1,6 @@
use std::sync::Arc;
-use anyhow::{Context as _, anyhow};
+use anyhow::anyhow;
use chrono::Utc;
use collections::HashMap;
use stripe::SubscriptionStatus;
@@ -9,18 +9,13 @@ use uuid::Uuid;
use crate::Result;
use crate::db::billing_subscription::SubscriptionKind;
-use crate::llm::AGENT_EXTENDED_TRIAL_FEATURE_FLAG;
use crate::stripe_client::{
- RealStripeClient, StripeAutomaticTax, StripeBillingAddressCollection,
- StripeCheckoutSessionMode, StripeCheckoutSessionPaymentMethodCollection, StripeClient,
- StripeCreateCheckoutSessionLineItems, StripeCreateCheckoutSessionParams,
- StripeCreateCheckoutSessionSubscriptionData, StripeCreateMeterEventParams,
+ RealStripeClient, StripeAutomaticTax, StripeClient, StripeCreateMeterEventParams,
StripeCreateMeterEventPayload, StripeCreateSubscriptionItems, StripeCreateSubscriptionParams,
- StripeCustomerId, StripeCustomerUpdate, StripeCustomerUpdateAddress, StripeCustomerUpdateName,
- StripeMeter, StripePrice, StripePriceId, StripeSubscription, StripeSubscriptionId,
+ StripeCustomerId, StripePrice, StripePriceId, StripeSubscription, StripeSubscriptionId,
StripeSubscriptionTrialSettings, StripeSubscriptionTrialSettingsEndBehavior,
- StripeSubscriptionTrialSettingsEndBehaviorMissingPaymentMethod, StripeTaxIdCollection,
- UpdateSubscriptionItems, UpdateSubscriptionParams,
+ StripeSubscriptionTrialSettingsEndBehaviorMissingPaymentMethod, UpdateSubscriptionItems,
+ UpdateSubscriptionParams,
};
pub struct StripeBilling {
@@ -30,8 +25,6 @@ pub struct StripeBilling {
#[derive(Default)]
struct StripeBillingState {
- meters_by_event_name: HashMap<String, StripeMeter>,
- price_ids_by_meter_id: HashMap<String, StripePriceId>,
prices_by_lookup_key: HashMap<String, StripePrice>,
}
@@ -60,24 +53,11 @@ impl StripeBilling {
let mut state = self.state.write().await;
- let (meters, prices) =
- futures::try_join!(self.client.list_meters(), self.client.list_prices())?;
-
- for meter in meters {
- state
- .meters_by_event_name
- .insert(meter.event_name.clone(), meter);
- }
+ let prices = self.client.list_prices().await?;
for price in prices {
if let Some(lookup_key) = price.lookup_key.clone() {
- state.prices_by_lookup_key.insert(lookup_key, price.clone());
- }
-
- if let Some(recurring) = price.recurring {
- if let Some(meter) = recurring.meter {
- state.price_ids_by_meter_id.insert(meter, price.id);
- }
+ state.prices_by_lookup_key.insert(lookup_key, price);
}
}
@@ -229,95 +209,6 @@ impl StripeBilling {
Ok(())
}
- pub async fn checkout_with_zed_pro(
- &self,
- customer_id: &StripeCustomerId,
- github_login: &str,
- success_url: &str,
- ) -> Result<String> {
- let zed_pro_price_id = self.zed_pro_price_id().await?;
-
- let mut params = StripeCreateCheckoutSessionParams::default();
- params.mode = Some(StripeCheckoutSessionMode::Subscription);
- params.customer = Some(customer_id);
- params.client_reference_id = Some(github_login);
- params.line_items = Some(vec![StripeCreateCheckoutSessionLineItems {
- price: Some(zed_pro_price_id.to_string()),
- quantity: Some(1),
- }]);
- params.success_url = Some(success_url);
- params.billing_address_collection = Some(StripeBillingAddressCollection::Required);
- params.customer_update = Some(StripeCustomerUpdate {
- address: Some(StripeCustomerUpdateAddress::Auto),
- name: Some(StripeCustomerUpdateName::Auto),
- shipping: None,
- });
- params.tax_id_collection = Some(StripeTaxIdCollection { enabled: true });
-
- let session = self.client.create_checkout_session(params).await?;
- Ok(session.url.context("no checkout session URL")?)
- }
-
- pub async fn checkout_with_zed_pro_trial(
- &self,
- customer_id: &StripeCustomerId,
- github_login: &str,
- feature_flags: Vec<String>,
- success_url: &str,
- ) -> Result<String> {
- let zed_pro_price_id = self.zed_pro_price_id().await?;
-
- let eligible_for_extended_trial = feature_flags
- .iter()
- .any(|flag| flag == AGENT_EXTENDED_TRIAL_FEATURE_FLAG);
-
- let trial_period_days = if eligible_for_extended_trial { 60 } else { 14 };
-
- let mut subscription_metadata = std::collections::HashMap::new();
- if eligible_for_extended_trial {
- subscription_metadata.insert(
- "promo_feature_flag".to_string(),
- AGENT_EXTENDED_TRIAL_FEATURE_FLAG.to_string(),
- );
- }
-
- let mut params = StripeCreateCheckoutSessionParams::default();
- params.subscription_data = Some(StripeCreateCheckoutSessionSubscriptionData {
- trial_period_days: Some(trial_period_days),
- trial_settings: Some(StripeSubscriptionTrialSettings {
- end_behavior: StripeSubscriptionTrialSettingsEndBehavior {
- missing_payment_method:
- StripeSubscriptionTrialSettingsEndBehaviorMissingPaymentMethod::Cancel,
- },
- }),
- metadata: if !subscription_metadata.is_empty() {
- Some(subscription_metadata)
- } else {
- None
- },
- });
- params.mode = Some(StripeCheckoutSessionMode::Subscription);
- params.payment_method_collection =
- Some(StripeCheckoutSessionPaymentMethodCollection::IfRequired);
- params.customer = Some(customer_id);
- params.client_reference_id = Some(github_login);
- params.line_items = Some(vec![StripeCreateCheckoutSessionLineItems {
- price: Some(zed_pro_price_id.to_string()),
- quantity: Some(1),
- }]);
- params.success_url = Some(success_url);
- params.billing_address_collection = Some(StripeBillingAddressCollection::Required);
- params.customer_update = Some(StripeCustomerUpdate {
- address: Some(StripeCustomerUpdateAddress::Auto),
- name: Some(StripeCustomerUpdateName::Auto),
- shipping: None,
- });
- params.tax_id_collection = Some(StripeTaxIdCollection { enabled: true });
-
- let session = self.client.create_checkout_session(params).await?;
- Ok(session.url.context("no checkout session URL")?)
- }
-
pub async fn subscribe_to_zed_free(
&self,
customer_id: StripeCustomerId,
@@ -3,17 +3,11 @@ use std::sync::Arc;
use chrono::{Duration, Utc};
use pretty_assertions::assert_eq;
-use crate::llm::AGENT_EXTENDED_TRIAL_FEATURE_FLAG;
use crate::stripe_billing::StripeBilling;
use crate::stripe_client::{
- FakeStripeClient, StripeBillingAddressCollection, StripeCheckoutSessionMode,
- StripeCheckoutSessionPaymentMethodCollection, StripeCreateCheckoutSessionLineItems,
- StripeCreateCheckoutSessionSubscriptionData, StripeCustomerId, StripeCustomerUpdate,
- StripeCustomerUpdateAddress, StripeCustomerUpdateName, StripeMeter, StripeMeterId, StripePrice,
- StripePriceId, StripePriceRecurring, StripeSubscription, StripeSubscriptionId,
- StripeSubscriptionItem, StripeSubscriptionItemId, StripeSubscriptionTrialSettings,
- StripeSubscriptionTrialSettingsEndBehavior,
- StripeSubscriptionTrialSettingsEndBehaviorMissingPaymentMethod, UpdateSubscriptionItems,
+ FakeStripeClient, StripeCustomerId, StripeMeter, StripeMeterId, StripePrice, StripePriceId,
+ StripePriceRecurring, StripeSubscription, StripeSubscriptionId, StripeSubscriptionItem,
+ StripeSubscriptionItemId, UpdateSubscriptionItems,
};
fn make_stripe_billing() -> (StripeBilling, Arc<FakeStripeClient>) {
@@ -364,240 +358,3 @@ async fn test_bill_model_request_usage() {
);
assert_eq!(create_meter_event_calls[0].value, 73);
}
-
-#[gpui::test]
-async fn test_checkout_with_zed_pro() {
- let (stripe_billing, stripe_client) = make_stripe_billing();
-
- let customer_id = StripeCustomerId("cus_test".into());
- let github_login = "zeduser1";
- let success_url = "https://example.com/success";
-
- // It returns an error when the Zed Pro price doesn't exist.
- {
- let result = stripe_billing
- .checkout_with_zed_pro(&customer_id, github_login, success_url)
- .await;
-
- assert!(result.is_err());
- assert_eq!(
- result.err().unwrap().to_string(),
- r#"no price ID found for "zed-pro""#
- );
- }
-
- // Successful checkout.
- {
- let price = StripePrice {
- id: StripePriceId("price_1".into()),
- unit_amount: Some(2000),
- lookup_key: Some("zed-pro".to_string()),
- recurring: None,
- };
- stripe_client
- .prices
- .lock()
- .insert(price.id.clone(), price.clone());
-
- stripe_billing.initialize().await.unwrap();
-
- let checkout_url = stripe_billing
- .checkout_with_zed_pro(&customer_id, github_login, success_url)
- .await
- .unwrap();
-
- assert!(checkout_url.starts_with("https://checkout.stripe.com/c/pay"));
-
- let create_checkout_session_calls = stripe_client
- .create_checkout_session_calls
- .lock()
- .drain(..)
- .collect::<Vec<_>>();
- assert_eq!(create_checkout_session_calls.len(), 1);
- let call = create_checkout_session_calls.into_iter().next().unwrap();
- assert_eq!(call.customer, Some(customer_id));
- assert_eq!(call.client_reference_id.as_deref(), Some(github_login));
- assert_eq!(call.mode, Some(StripeCheckoutSessionMode::Subscription));
- assert_eq!(
- call.line_items,
- Some(vec![StripeCreateCheckoutSessionLineItems {
- price: Some(price.id.to_string()),
- quantity: Some(1)
- }])
- );
- assert_eq!(call.payment_method_collection, None);
- assert_eq!(call.subscription_data, None);
- assert_eq!(call.success_url.as_deref(), Some(success_url));
- assert_eq!(
- call.billing_address_collection,
- Some(StripeBillingAddressCollection::Required)
- );
- assert_eq!(
- call.customer_update,
- Some(StripeCustomerUpdate {
- address: Some(StripeCustomerUpdateAddress::Auto),
- name: Some(StripeCustomerUpdateName::Auto),
- shipping: None,
- })
- );
- }
-}
-
-#[gpui::test]
-async fn test_checkout_with_zed_pro_trial() {
- let (stripe_billing, stripe_client) = make_stripe_billing();
-
- let customer_id = StripeCustomerId("cus_test".into());
- let github_login = "zeduser1";
- let success_url = "https://example.com/success";
-
- // It returns an error when the Zed Pro price doesn't exist.
- {
- let result = stripe_billing
- .checkout_with_zed_pro_trial(&customer_id, github_login, Vec::new(), success_url)
- .await;
-
- assert!(result.is_err());
- assert_eq!(
- result.err().unwrap().to_string(),
- r#"no price ID found for "zed-pro""#
- );
- }
-
- let price = StripePrice {
- id: StripePriceId("price_1".into()),
- unit_amount: Some(2000),
- lookup_key: Some("zed-pro".to_string()),
- recurring: None,
- };
- stripe_client
- .prices
- .lock()
- .insert(price.id.clone(), price.clone());
-
- stripe_billing.initialize().await.unwrap();
-
- // Successful checkout.
- {
- let checkout_url = stripe_billing
- .checkout_with_zed_pro_trial(&customer_id, github_login, Vec::new(), success_url)
- .await
- .unwrap();
-
- assert!(checkout_url.starts_with("https://checkout.stripe.com/c/pay"));
-
- let create_checkout_session_calls = stripe_client
- .create_checkout_session_calls
- .lock()
- .drain(..)
- .collect::<Vec<_>>();
- assert_eq!(create_checkout_session_calls.len(), 1);
- let call = create_checkout_session_calls.into_iter().next().unwrap();
- assert_eq!(call.customer.as_ref(), Some(&customer_id));
- assert_eq!(call.client_reference_id.as_deref(), Some(github_login));
- assert_eq!(call.mode, Some(StripeCheckoutSessionMode::Subscription));
- assert_eq!(
- call.line_items,
- Some(vec![StripeCreateCheckoutSessionLineItems {
- price: Some(price.id.to_string()),
- quantity: Some(1)
- }])
- );
- assert_eq!(
- call.payment_method_collection,
- Some(StripeCheckoutSessionPaymentMethodCollection::IfRequired)
- );
- assert_eq!(
- call.subscription_data,
- Some(StripeCreateCheckoutSessionSubscriptionData {
- trial_period_days: Some(14),
- trial_settings: Some(StripeSubscriptionTrialSettings {
- end_behavior: StripeSubscriptionTrialSettingsEndBehavior {
- missing_payment_method:
- StripeSubscriptionTrialSettingsEndBehaviorMissingPaymentMethod::Cancel,
- },
- }),
- metadata: None,
- })
- );
- assert_eq!(call.success_url.as_deref(), Some(success_url));
- assert_eq!(
- call.billing_address_collection,
- Some(StripeBillingAddressCollection::Required)
- );
- assert_eq!(
- call.customer_update,
- Some(StripeCustomerUpdate {
- address: Some(StripeCustomerUpdateAddress::Auto),
- name: Some(StripeCustomerUpdateName::Auto),
- shipping: None,
- })
- );
- }
-
- // Successful checkout with extended trial.
- {
- let checkout_url = stripe_billing
- .checkout_with_zed_pro_trial(
- &customer_id,
- github_login,
- vec![AGENT_EXTENDED_TRIAL_FEATURE_FLAG.to_string()],
- success_url,
- )
- .await
- .unwrap();
-
- assert!(checkout_url.starts_with("https://checkout.stripe.com/c/pay"));
-
- let create_checkout_session_calls = stripe_client
- .create_checkout_session_calls
- .lock()
- .drain(..)
- .collect::<Vec<_>>();
- assert_eq!(create_checkout_session_calls.len(), 1);
- let call = create_checkout_session_calls.into_iter().next().unwrap();
- assert_eq!(call.customer, Some(customer_id));
- assert_eq!(call.client_reference_id.as_deref(), Some(github_login));
- assert_eq!(call.mode, Some(StripeCheckoutSessionMode::Subscription));
- assert_eq!(
- call.line_items,
- Some(vec![StripeCreateCheckoutSessionLineItems {
- price: Some(price.id.to_string()),
- quantity: Some(1)
- }])
- );
- assert_eq!(
- call.payment_method_collection,
- Some(StripeCheckoutSessionPaymentMethodCollection::IfRequired)
- );
- assert_eq!(
- call.subscription_data,
- Some(StripeCreateCheckoutSessionSubscriptionData {
- trial_period_days: Some(60),
- trial_settings: Some(StripeSubscriptionTrialSettings {
- end_behavior: StripeSubscriptionTrialSettingsEndBehavior {
- missing_payment_method:
- StripeSubscriptionTrialSettingsEndBehaviorMissingPaymentMethod::Cancel,
- },
- }),
- metadata: Some(std::collections::HashMap::from_iter([(
- "promo_feature_flag".into(),
- AGENT_EXTENDED_TRIAL_FEATURE_FLAG.into()
- )])),
- })
- );
- assert_eq!(call.success_url.as_deref(), Some(success_url));
- assert_eq!(
- call.billing_address_collection,
- Some(StripeBillingAddressCollection::Required)
- );
- assert_eq!(
- call.customer_update,
- Some(StripeCustomerUpdate {
- address: Some(StripeCustomerUpdateAddress::Auto),
- name: Some(StripeCustomerUpdateName::Auto),
- shipping: None,
- })
- );
- }
-}
@@ -6,6 +6,7 @@ mod sign_in;
use crate::sign_in::initiate_sign_in_within_workspace;
use ::fs::Fs;
use anyhow::{Context as _, Result, anyhow};
+use client::DisableAiSettings;
use collections::{HashMap, HashSet};
use command_palette_hooks::CommandPaletteFilter;
use futures::{Future, FutureExt, TryFutureExt, channel::oneshot, future::Shared};
@@ -25,6 +26,7 @@ use node_runtime::NodeRuntime;
use parking_lot::Mutex;
use request::StatusNotification;
use serde_json::json;
+use settings::Settings;
use settings::SettingsStore;
use sign_in::{reinstall_and_sign_in_within_workspace, sign_out_within_workspace};
use std::collections::hash_map::Entry;
@@ -93,26 +95,34 @@ pub fn init(
let copilot_auth_action_types = [TypeId::of::<SignOut>()];
let copilot_no_auth_action_types = [TypeId::of::<SignIn>()];
let status = handle.read(cx).status();
+
+ let is_ai_disabled = DisableAiSettings::get_global(cx).disable_ai;
let filter = CommandPaletteFilter::global_mut(cx);
- match status {
- Status::Disabled => {
- filter.hide_action_types(&copilot_action_types);
- filter.hide_action_types(&copilot_auth_action_types);
- filter.hide_action_types(&copilot_no_auth_action_types);
- }
- Status::Authorized => {
- filter.hide_action_types(&copilot_no_auth_action_types);
- filter.show_action_types(
- copilot_action_types
- .iter()
- .chain(&copilot_auth_action_types),
- );
- }
- _ => {
- filter.hide_action_types(&copilot_action_types);
- filter.hide_action_types(&copilot_auth_action_types);
- filter.show_action_types(copilot_no_auth_action_types.iter());
+ if is_ai_disabled {
+ filter.hide_action_types(&copilot_action_types);
+ filter.hide_action_types(&copilot_auth_action_types);
+ filter.hide_action_types(&copilot_no_auth_action_types);
+ } else {
+ match status {
+ Status::Disabled => {
+ filter.hide_action_types(&copilot_action_types);
+ filter.hide_action_types(&copilot_auth_action_types);
+ filter.hide_action_types(&copilot_no_auth_action_types);
+ }
+ Status::Authorized => {
+ filter.hide_action_types(&copilot_no_auth_action_types);
+ filter.show_action_types(
+ copilot_action_types
+ .iter()
+ .chain(&copilot_auth_action_types),
+ );
+ }
+ _ => {
+ filter.hide_action_types(&copilot_action_types);
+ filter.hide_action_types(&copilot_auth_action_types);
+ filter.show_action_types(copilot_no_auth_action_types.iter());
+ }
}
}
})
@@ -259,6 +259,13 @@ pub struct SpawnNearestTask {
pub reveal: task::RevealStrategy,
}
+#[derive(Clone, PartialEq, Action)]
+#[action(no_json, no_register)]
+pub struct DiffClipboardWithSelectionData {
+ pub clipboard_text: String,
+ pub editor: Entity<Editor>,
+}
+
#[derive(Debug, PartialEq, Eq, Clone, Copy, Deserialize, Default)]
pub enum UuidVersion {
#[default]
@@ -398,6 +405,8 @@ actions!(
DeleteToNextSubwordEnd,
/// Deletes to the start of the previous subword.
DeleteToPreviousSubwordStart,
+ /// Diffs the text stored in the clipboard against the current selection.
+ DiffClipboardWithSelection,
/// Displays names of all active cursors.
DisplayCursorNames,
/// Duplicates the current line below.
@@ -524,10 +524,10 @@ impl BlockMap {
// * Isomorphic transforms that end *at* the start of the edit
// * Below blocks that end at the start of the edit
// However, if we hit a replace block that ends at the start of the edit we want to reconstruct it.
- new_transforms.append(cursor.slice(&old_start, Bias::Left, &()), &());
+ new_transforms.append(cursor.slice(&old_start, Bias::Left), &());
if let Some(transform) = cursor.item() {
if transform.summary.input_rows > 0
- && cursor.end(&()) == old_start
+ && cursor.end() == old_start
&& transform
.block
.as_ref()
@@ -535,13 +535,13 @@ impl BlockMap {
{
// Preserve the transform (push and next)
new_transforms.push(transform.clone(), &());
- cursor.next(&());
+ cursor.next();
// Preserve below blocks at end of edit
while let Some(transform) = cursor.item() {
if transform.block.as_ref().map_or(false, |b| b.place_below()) {
new_transforms.push(transform.clone(), &());
- cursor.next(&());
+ cursor.next();
} else {
break;
}
@@ -579,8 +579,8 @@ impl BlockMap {
let mut new_end = WrapRow(edit.new.end);
loop {
// Seek to the transform starting at or after the end of the edit
- cursor.seek(&old_end, Bias::Left, &());
- cursor.next(&());
+ cursor.seek(&old_end, Bias::Left);
+ cursor.next();
// Extend edit to the end of the discarded transform so it is reconstructed in full
let transform_rows_after_edit = cursor.start().0 - old_end.0;
@@ -592,8 +592,8 @@ impl BlockMap {
if next_edit.old.start <= cursor.start().0 {
old_end = WrapRow(next_edit.old.end);
new_end = WrapRow(next_edit.new.end);
- cursor.seek(&old_end, Bias::Left, &());
- cursor.next(&());
+ cursor.seek(&old_end, Bias::Left);
+ cursor.next();
edits.next();
} else {
break;
@@ -608,7 +608,7 @@ impl BlockMap {
// Discard below blocks at the end of the edit. They'll be reconstructed.
while let Some(transform) = cursor.item() {
if transform.block.as_ref().map_or(false, |b| b.place_below()) {
- cursor.next(&());
+ cursor.next();
} else {
break;
}
@@ -720,7 +720,7 @@ impl BlockMap {
push_isomorphic(&mut new_transforms, rows_after_last_block, wrap_snapshot);
}
- new_transforms.append(cursor.suffix(&()), &());
+ new_transforms.append(cursor.suffix(), &());
debug_assert_eq!(
new_transforms.summary().input_rows,
wrap_snapshot.max_point().row() + 1
@@ -971,7 +971,7 @@ impl BlockMapReader<'_> {
);
let mut cursor = self.transforms.cursor::<(WrapRow, BlockRow)>(&());
- cursor.seek(&start_wrap_row, Bias::Left, &());
+ cursor.seek(&start_wrap_row, Bias::Left);
while let Some(transform) = cursor.item() {
if cursor.start().0 > end_wrap_row {
break;
@@ -982,7 +982,7 @@ impl BlockMapReader<'_> {
return Some(cursor.start().1);
}
}
- cursor.next(&());
+ cursor.next();
}
None
@@ -1293,7 +1293,7 @@ impl BlockSnapshot {
let max_output_row = cmp::min(rows.end, self.transforms.summary().output_rows);
let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&());
- cursor.seek(&BlockRow(rows.start), Bias::Right, &());
+ cursor.seek(&BlockRow(rows.start), Bias::Right);
let transform_output_start = cursor.start().0.0;
let transform_input_start = cursor.start().1.0;
@@ -1325,7 +1325,7 @@ impl BlockSnapshot {
pub(super) fn row_infos(&self, start_row: BlockRow) -> BlockRows<'_> {
let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&());
- cursor.seek(&start_row, Bias::Right, &());
+ cursor.seek(&start_row, Bias::Right);
let (output_start, input_start) = cursor.start();
let overshoot = if cursor
.item()
@@ -1346,9 +1346,9 @@ impl BlockSnapshot {
pub fn blocks_in_range(&self, rows: Range<u32>) -> impl Iterator<Item = (u32, &Block)> {
let mut cursor = self.transforms.cursor::<BlockRow>(&());
- cursor.seek(&BlockRow(rows.start), Bias::Left, &());
- while cursor.start().0 < rows.start && cursor.end(&()).0 <= rows.start {
- cursor.next(&());
+ cursor.seek(&BlockRow(rows.start), Bias::Left);
+ while cursor.start().0 < rows.start && cursor.end().0 <= rows.start {
+ cursor.next();
}
std::iter::from_fn(move || {
@@ -1364,10 +1364,10 @@ impl BlockSnapshot {
break;
}
if let Some(block) = &transform.block {
- cursor.next(&());
+ cursor.next();
return Some((start_row, block));
} else {
- cursor.next(&());
+ cursor.next();
}
}
None
@@ -1377,7 +1377,7 @@ impl BlockSnapshot {
pub fn sticky_header_excerpt(&self, position: f32) -> Option<StickyHeaderExcerpt<'_>> {
let top_row = position as u32;
let mut cursor = self.transforms.cursor::<BlockRow>(&());
- cursor.seek(&BlockRow(top_row), Bias::Right, &());
+ cursor.seek(&BlockRow(top_row), Bias::Right);
while let Some(transform) = cursor.item() {
match &transform.block {
@@ -1386,7 +1386,7 @@ impl BlockSnapshot {
}
Some(block) if block.is_buffer_header() => return None,
_ => {
- cursor.prev(&());
+ cursor.prev();
continue;
}
}
@@ -1414,7 +1414,7 @@ impl BlockSnapshot {
let wrap_row = WrapRow(wrap_point.row());
let mut cursor = self.transforms.cursor::<WrapRow>(&());
- cursor.seek(&wrap_row, Bias::Left, &());
+ cursor.seek(&wrap_row, Bias::Left);
while let Some(transform) = cursor.item() {
if let Some(block) = transform.block.as_ref() {
@@ -1425,7 +1425,7 @@ impl BlockSnapshot {
break;
}
- cursor.next(&());
+ cursor.next();
}
None
@@ -1442,7 +1442,7 @@ impl BlockSnapshot {
pub fn longest_row_in_range(&self, range: Range<BlockRow>) -> BlockRow {
let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&());
- cursor.seek(&range.start, Bias::Right, &());
+ cursor.seek(&range.start, Bias::Right);
let mut longest_row = range.start;
let mut longest_row_chars = 0;
@@ -1453,7 +1453,7 @@ impl BlockSnapshot {
let wrap_start_row = input_start.0 + overshoot;
let wrap_end_row = cmp::min(
input_start.0 + (range.end.0 - output_start.0),
- cursor.end(&()).1.0,
+ cursor.end().1.0,
);
let summary = self
.wrap_snapshot
@@ -1461,12 +1461,12 @@ impl BlockSnapshot {
longest_row = BlockRow(range.start.0 + summary.longest_row);
longest_row_chars = summary.longest_row_chars;
}
- cursor.next(&());
+ cursor.next();
}
let cursor_start_row = cursor.start().0;
if range.end > cursor_start_row {
- let summary = cursor.summary::<_, TransformSummary>(&range.end, Bias::Right, &());
+ let summary = cursor.summary::<_, TransformSummary>(&range.end, Bias::Right);
if summary.longest_row_chars > longest_row_chars {
longest_row = BlockRow(cursor_start_row.0 + summary.longest_row);
longest_row_chars = summary.longest_row_chars;
@@ -1493,7 +1493,7 @@ impl BlockSnapshot {
pub(super) fn line_len(&self, row: BlockRow) -> u32 {
let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&());
- cursor.seek(&BlockRow(row.0), Bias::Right, &());
+ cursor.seek(&BlockRow(row.0), Bias::Right);
if let Some(transform) = cursor.item() {
let (output_start, input_start) = cursor.start();
let overshoot = row.0 - output_start.0;
@@ -1511,13 +1511,13 @@ impl BlockSnapshot {
pub(super) fn is_block_line(&self, row: BlockRow) -> bool {
let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&());
- cursor.seek(&row, Bias::Right, &());
+ cursor.seek(&row, Bias::Right);
cursor.item().map_or(false, |t| t.block.is_some())
}
pub(super) fn is_folded_buffer_header(&self, row: BlockRow) -> bool {
let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&());
- cursor.seek(&row, Bias::Right, &());
+ cursor.seek(&row, Bias::Right);
let Some(transform) = cursor.item() else {
return false;
};
@@ -1529,7 +1529,7 @@ impl BlockSnapshot {
.wrap_snapshot
.make_wrap_point(Point::new(row.0, 0), Bias::Left);
let mut cursor = self.transforms.cursor::<(WrapRow, BlockRow)>(&());
- cursor.seek(&WrapRow(wrap_point.row()), Bias::Right, &());
+ cursor.seek(&WrapRow(wrap_point.row()), Bias::Right);
cursor.item().map_or(false, |transform| {
transform
.block
@@ -1540,17 +1540,17 @@ impl BlockSnapshot {
pub fn clip_point(&self, point: BlockPoint, bias: Bias) -> BlockPoint {
let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&());
- cursor.seek(&BlockRow(point.row), Bias::Right, &());
+ cursor.seek(&BlockRow(point.row), Bias::Right);
let max_input_row = WrapRow(self.transforms.summary().input_rows);
let mut search_left =
- (bias == Bias::Left && cursor.start().1.0 > 0) || cursor.end(&()).1 == max_input_row;
+ (bias == Bias::Left && cursor.start().1.0 > 0) || cursor.end().1 == max_input_row;
let mut reversed = false;
loop {
if let Some(transform) = cursor.item() {
let (output_start_row, input_start_row) = cursor.start();
- let (output_end_row, input_end_row) = cursor.end(&());
+ let (output_end_row, input_end_row) = cursor.end();
let output_start = Point::new(output_start_row.0, 0);
let input_start = Point::new(input_start_row.0, 0);
let input_end = Point::new(input_end_row.0, 0);
@@ -1584,23 +1584,23 @@ impl BlockSnapshot {
}
if search_left {
- cursor.prev(&());
+ cursor.prev();
} else {
- cursor.next(&());
+ cursor.next();
}
} else if reversed {
return self.max_point();
} else {
reversed = true;
search_left = !search_left;
- cursor.seek(&BlockRow(point.row), Bias::Right, &());
+ cursor.seek(&BlockRow(point.row), Bias::Right);
}
}
}
pub fn to_block_point(&self, wrap_point: WrapPoint) -> BlockPoint {
let mut cursor = self.transforms.cursor::<(WrapRow, BlockRow)>(&());
- cursor.seek(&WrapRow(wrap_point.row()), Bias::Right, &());
+ cursor.seek(&WrapRow(wrap_point.row()), Bias::Right);
if let Some(transform) = cursor.item() {
if transform.block.is_some() {
BlockPoint::new(cursor.start().1.0, 0)
@@ -1618,7 +1618,7 @@ impl BlockSnapshot {
pub fn to_wrap_point(&self, block_point: BlockPoint, bias: Bias) -> WrapPoint {
let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&());
- cursor.seek(&BlockRow(block_point.row), Bias::Right, &());
+ cursor.seek(&BlockRow(block_point.row), Bias::Right);
if let Some(transform) = cursor.item() {
match transform.block.as_ref() {
Some(block) => {
@@ -1630,7 +1630,7 @@ impl BlockSnapshot {
} else if bias == Bias::Left {
WrapPoint::new(cursor.start().1.0, 0)
} else {
- let wrap_row = cursor.end(&()).1.0 - 1;
+ let wrap_row = cursor.end().1.0 - 1;
WrapPoint::new(wrap_row, self.wrap_snapshot.line_len(wrap_row))
}
}
@@ -1650,14 +1650,14 @@ impl BlockChunks<'_> {
/// Go to the next transform
fn advance(&mut self) {
self.input_chunk = Chunk::default();
- self.transforms.next(&());
+ self.transforms.next();
while let Some(transform) = self.transforms.item() {
if transform
.block
.as_ref()
.map_or(false, |block| block.height() == 0)
{
- self.transforms.next(&());
+ self.transforms.next();
} else {
break;
}
@@ -1672,7 +1672,7 @@ impl BlockChunks<'_> {
let start_output_row = self.transforms.start().0.0;
if start_output_row < self.max_output_row {
let end_input_row = cmp::min(
- self.transforms.end(&()).1.0,
+ self.transforms.end().1.0,
start_input_row + (self.max_output_row - start_output_row),
);
self.input_chunks.seek(start_input_row..end_input_row);
@@ -1696,7 +1696,7 @@ impl<'a> Iterator for BlockChunks<'a> {
let transform = self.transforms.item()?;
if transform.block.is_some() {
let block_start = self.transforms.start().0.0;
- let mut block_end = self.transforms.end(&()).0.0;
+ let mut block_end = self.transforms.end().0.0;
self.advance();
if self.transforms.item().is_none() {
block_end -= 1;
@@ -1731,7 +1731,7 @@ impl<'a> Iterator for BlockChunks<'a> {
}
}
- let transform_end = self.transforms.end(&()).0.0;
+ let transform_end = self.transforms.end().0.0;
let (prefix_rows, prefix_bytes) =
offset_for_row(self.input_chunk.text, transform_end - self.output_row);
self.output_row += prefix_rows;
@@ -1770,15 +1770,15 @@ impl Iterator for BlockRows<'_> {
self.started = true;
}
- if self.output_row.0 >= self.transforms.end(&()).0.0 {
- self.transforms.next(&());
+ if self.output_row.0 >= self.transforms.end().0.0 {
+ self.transforms.next();
while let Some(transform) = self.transforms.item() {
if transform
.block
.as_ref()
.map_or(false, |block| block.height() == 0)
{
- self.transforms.next(&());
+ self.transforms.next();
} else {
break;
}
@@ -52,15 +52,15 @@ impl CreaseSnapshot {
) -> Option<&'a Crease<Anchor>> {
let start = snapshot.anchor_before(Point::new(row.0, 0));
let mut cursor = self.creases.cursor::<ItemSummary>(snapshot);
- cursor.seek(&start, Bias::Left, snapshot);
+ cursor.seek(&start, Bias::Left);
while let Some(item) = cursor.item() {
match Ord::cmp(&item.crease.range().start.to_point(snapshot).row, &row.0) {
- Ordering::Less => cursor.next(snapshot),
+ Ordering::Less => cursor.next(),
Ordering::Equal => {
if item.crease.range().start.is_valid(snapshot) {
return Some(&item.crease);
} else {
- cursor.next(snapshot);
+ cursor.next();
}
}
Ordering::Greater => break,
@@ -76,11 +76,11 @@ impl CreaseSnapshot {
) -> impl 'a + Iterator<Item = &'a Crease<Anchor>> {
let start = snapshot.anchor_before(Point::new(range.start.0, 0));
let mut cursor = self.creases.cursor::<ItemSummary>(snapshot);
- cursor.seek(&start, Bias::Left, snapshot);
+ cursor.seek(&start, Bias::Left);
std::iter::from_fn(move || {
while let Some(item) = cursor.item() {
- cursor.next(snapshot);
+ cursor.next();
let crease_range = item.crease.range();
let crease_start = crease_range.start.to_point(snapshot);
let crease_end = crease_range.end.to_point(snapshot);
@@ -102,13 +102,13 @@ impl CreaseSnapshot {
let mut cursor = self.creases.cursor::<ItemSummary>(snapshot);
let mut results = Vec::new();
- cursor.next(snapshot);
+ cursor.next();
while let Some(item) = cursor.item() {
let crease_range = item.crease.range();
let start_point = crease_range.start.to_point(snapshot);
let end_point = crease_range.end.to_point(snapshot);
results.push((item.id, start_point..end_point));
- cursor.next(snapshot);
+ cursor.next();
}
results
@@ -298,7 +298,7 @@ impl CreaseMap {
let mut cursor = self.snapshot.creases.cursor::<ItemSummary>(snapshot);
for crease in creases {
let crease_range = crease.range().clone();
- new_creases.append(cursor.slice(&crease_range, Bias::Left, snapshot), snapshot);
+ new_creases.append(cursor.slice(&crease_range, Bias::Left), snapshot);
let id = self.next_id;
self.next_id.0 += 1;
@@ -306,7 +306,7 @@ impl CreaseMap {
new_creases.push(CreaseItem { crease, id }, snapshot);
new_ids.push(id);
}
- new_creases.append(cursor.suffix(snapshot), snapshot);
+ new_creases.append(cursor.suffix(), snapshot);
new_creases
};
new_ids
@@ -332,9 +332,9 @@ impl CreaseMap {
let mut cursor = self.snapshot.creases.cursor::<ItemSummary>(snapshot);
for (id, range) in &removals {
- new_creases.append(cursor.slice(range, Bias::Left, snapshot), snapshot);
+ new_creases.append(cursor.slice(range, Bias::Left), snapshot);
while let Some(item) = cursor.item() {
- cursor.next(snapshot);
+ cursor.next();
if item.id == *id {
break;
} else {
@@ -343,7 +343,7 @@ impl CreaseMap {
}
}
- new_creases.append(cursor.suffix(snapshot), snapshot);
+ new_creases.append(cursor.suffix(), snapshot);
new_creases
};
@@ -99,7 +99,7 @@ impl FoldPoint {
pub fn to_inlay_point(self, snapshot: &FoldSnapshot) -> InlayPoint {
let mut cursor = snapshot.transforms.cursor::<(FoldPoint, InlayPoint)>(&());
- cursor.seek(&self, Bias::Right, &());
+ cursor.seek(&self, Bias::Right);
let overshoot = self.0 - cursor.start().0.0;
InlayPoint(cursor.start().1.0 + overshoot)
}
@@ -108,7 +108,7 @@ impl FoldPoint {
let mut cursor = snapshot
.transforms
.cursor::<(FoldPoint, TransformSummary)>(&());
- cursor.seek(&self, Bias::Right, &());
+ cursor.seek(&self, Bias::Right);
let overshoot = self.0 - cursor.start().1.output.lines;
let mut offset = cursor.start().1.output.len;
if !overshoot.is_zero() {
@@ -187,10 +187,10 @@ impl FoldMapWriter<'_> {
width: None,
},
);
- new_tree.append(cursor.slice(&fold.range, Bias::Right, buffer), buffer);
+ new_tree.append(cursor.slice(&fold.range, Bias::Right), buffer);
new_tree.push(fold, buffer);
}
- new_tree.append(cursor.suffix(buffer), buffer);
+ new_tree.append(cursor.suffix(), buffer);
new_tree
};
@@ -252,7 +252,7 @@ impl FoldMapWriter<'_> {
fold_ixs_to_delete.push(*folds_cursor.start());
self.0.snapshot.fold_metadata_by_id.remove(&fold.id);
}
- folds_cursor.next(buffer);
+ folds_cursor.next();
}
}
@@ -263,10 +263,10 @@ impl FoldMapWriter<'_> {
let mut cursor = self.0.snapshot.folds.cursor::<usize>(buffer);
let mut folds = SumTree::new(buffer);
for fold_ix in fold_ixs_to_delete {
- folds.append(cursor.slice(&fold_ix, Bias::Right, buffer), buffer);
- cursor.next(buffer);
+ folds.append(cursor.slice(&fold_ix, Bias::Right), buffer);
+ cursor.next();
}
- folds.append(cursor.suffix(buffer), buffer);
+ folds.append(cursor.suffix(), buffer);
folds
};
@@ -412,7 +412,7 @@ impl FoldMap {
let mut new_transforms = SumTree::<Transform>::default();
let mut cursor = self.snapshot.transforms.cursor::<InlayOffset>(&());
- cursor.seek(&InlayOffset(0), Bias::Right, &());
+ cursor.seek(&InlayOffset(0), Bias::Right);
while let Some(mut edit) = inlay_edits_iter.next() {
if let Some(item) = cursor.item() {
@@ -421,19 +421,19 @@ impl FoldMap {
|transform| {
if !transform.is_fold() {
transform.summary.add_summary(&item.summary, &());
- cursor.next(&());
+ cursor.next();
}
},
&(),
);
}
}
- new_transforms.append(cursor.slice(&edit.old.start, Bias::Left, &()), &());
+ new_transforms.append(cursor.slice(&edit.old.start, Bias::Left), &());
edit.new.start -= edit.old.start - *cursor.start();
edit.old.start = *cursor.start();
- cursor.seek(&edit.old.end, Bias::Right, &());
- cursor.next(&());
+ cursor.seek(&edit.old.end, Bias::Right);
+ cursor.next();
let mut delta = edit.new_len().0 as isize - edit.old_len().0 as isize;
loop {
@@ -449,8 +449,8 @@ impl FoldMap {
if next_edit.old.end >= edit.old.end {
edit.old.end = next_edit.old.end;
- cursor.seek(&edit.old.end, Bias::Right, &());
- cursor.next(&());
+ cursor.seek(&edit.old.end, Bias::Right);
+ cursor.next();
}
} else {
break;
@@ -467,11 +467,7 @@ impl FoldMap {
.snapshot
.folds
.cursor::<FoldRange>(&inlay_snapshot.buffer);
- folds_cursor.seek(
- &FoldRange(anchor..Anchor::max()),
- Bias::Left,
- &inlay_snapshot.buffer,
- );
+ folds_cursor.seek(&FoldRange(anchor..Anchor::max()), Bias::Left);
let mut folds = iter::from_fn({
let inlay_snapshot = &inlay_snapshot;
@@ -485,7 +481,7 @@ impl FoldMap {
..inlay_snapshot.to_inlay_offset(buffer_end),
)
});
- folds_cursor.next(&inlay_snapshot.buffer);
+ folds_cursor.next();
item
}
})
@@ -558,7 +554,7 @@ impl FoldMap {
}
}
- new_transforms.append(cursor.suffix(&()), &());
+ new_transforms.append(cursor.suffix(), &());
if new_transforms.is_empty() {
let text_summary = inlay_snapshot.text_summary();
push_isomorphic(&mut new_transforms, text_summary);
@@ -575,31 +571,31 @@ impl FoldMap {
let mut new_transforms = new_transforms.cursor::<(InlayOffset, FoldOffset)>(&());
for mut edit in inlay_edits {
- old_transforms.seek(&edit.old.start, Bias::Left, &());
+ old_transforms.seek(&edit.old.start, Bias::Left);
if old_transforms.item().map_or(false, |t| t.is_fold()) {
edit.old.start = old_transforms.start().0;
}
let old_start =
old_transforms.start().1.0 + (edit.old.start - old_transforms.start().0).0;
- old_transforms.seek_forward(&edit.old.end, Bias::Right, &());
+ old_transforms.seek_forward(&edit.old.end, Bias::Right);
if old_transforms.item().map_or(false, |t| t.is_fold()) {
- old_transforms.next(&());
+ old_transforms.next();
edit.old.end = old_transforms.start().0;
}
let old_end =
old_transforms.start().1.0 + (edit.old.end - old_transforms.start().0).0;
- new_transforms.seek(&edit.new.start, Bias::Left, &());
+ new_transforms.seek(&edit.new.start, Bias::Left);
if new_transforms.item().map_or(false, |t| t.is_fold()) {
edit.new.start = new_transforms.start().0;
}
let new_start =
new_transforms.start().1.0 + (edit.new.start - new_transforms.start().0).0;
- new_transforms.seek_forward(&edit.new.end, Bias::Right, &());
+ new_transforms.seek_forward(&edit.new.end, Bias::Right);
if new_transforms.item().map_or(false, |t| t.is_fold()) {
- new_transforms.next(&());
+ new_transforms.next();
edit.new.end = new_transforms.start().0;
}
let new_end =
@@ -656,10 +652,10 @@ impl FoldSnapshot {
let mut summary = TextSummary::default();
let mut cursor = self.transforms.cursor::<(FoldPoint, InlayPoint)>(&());
- cursor.seek(&range.start, Bias::Right, &());
+ cursor.seek(&range.start, Bias::Right);
if let Some(transform) = cursor.item() {
let start_in_transform = range.start.0 - cursor.start().0.0;
- let end_in_transform = cmp::min(range.end, cursor.end(&()).0).0 - cursor.start().0.0;
+ let end_in_transform = cmp::min(range.end, cursor.end().0).0 - cursor.start().0.0;
if let Some(placeholder) = transform.placeholder.as_ref() {
summary = TextSummary::from(
&placeholder.text
@@ -678,10 +674,10 @@ impl FoldSnapshot {
}
}
- if range.end > cursor.end(&()).0 {
- cursor.next(&());
+ if range.end > cursor.end().0 {
+ cursor.next();
summary += &cursor
- .summary::<_, TransformSummary>(&range.end, Bias::Right, &())
+ .summary::<_, TransformSummary>(&range.end, Bias::Right)
.output;
if let Some(transform) = cursor.item() {
let end_in_transform = range.end.0 - cursor.start().0.0;
@@ -705,19 +701,16 @@ impl FoldSnapshot {
pub fn to_fold_point(&self, point: InlayPoint, bias: Bias) -> FoldPoint {
let mut cursor = self.transforms.cursor::<(InlayPoint, FoldPoint)>(&());
- cursor.seek(&point, Bias::Right, &());
+ cursor.seek(&point, Bias::Right);
if cursor.item().map_or(false, |t| t.is_fold()) {
if bias == Bias::Left || point == cursor.start().0 {
cursor.start().1
} else {
- cursor.end(&()).1
+ cursor.end().1
}
} else {
let overshoot = point.0 - cursor.start().0.0;
- FoldPoint(cmp::min(
- cursor.start().1.0 + overshoot,
- cursor.end(&()).1.0,
- ))
+ FoldPoint(cmp::min(cursor.start().1.0 + overshoot, cursor.end().1.0))
}
}
@@ -742,7 +735,7 @@ impl FoldSnapshot {
let fold_point = FoldPoint::new(start_row, 0);
let mut cursor = self.transforms.cursor::<(FoldPoint, InlayPoint)>(&());
- cursor.seek(&fold_point, Bias::Left, &());
+ cursor.seek(&fold_point, Bias::Left);
let overshoot = fold_point.0 - cursor.start().0.0;
let inlay_point = InlayPoint(cursor.start().1.0 + overshoot);
@@ -773,7 +766,7 @@ impl FoldSnapshot {
let mut folds = intersecting_folds(&self.inlay_snapshot, &self.folds, range, false);
iter::from_fn(move || {
let item = folds.item();
- folds.next(&self.inlay_snapshot.buffer);
+ folds.next();
item
})
}
@@ -785,7 +778,7 @@ impl FoldSnapshot {
let buffer_offset = offset.to_offset(&self.inlay_snapshot.buffer);
let inlay_offset = self.inlay_snapshot.to_inlay_offset(buffer_offset);
let mut cursor = self.transforms.cursor::<InlayOffset>(&());
- cursor.seek(&inlay_offset, Bias::Right, &());
+ cursor.seek(&inlay_offset, Bias::Right);
cursor.item().map_or(false, |t| t.placeholder.is_some())
}
@@ -794,7 +787,7 @@ impl FoldSnapshot {
.inlay_snapshot
.to_inlay_point(Point::new(buffer_row.0, 0));
let mut cursor = self.transforms.cursor::<InlayPoint>(&());
- cursor.seek(&inlay_point, Bias::Right, &());
+ cursor.seek(&inlay_point, Bias::Right);
loop {
match cursor.item() {
Some(transform) => {
@@ -808,11 +801,11 @@ impl FoldSnapshot {
None => return false,
}
- if cursor.end(&()).row() == inlay_point.row() {
- cursor.next(&());
+ if cursor.end().row() == inlay_point.row() {
+ cursor.next();
} else {
inlay_point.0 += Point::new(1, 0);
- cursor.seek(&inlay_point, Bias::Right, &());
+ cursor.seek(&inlay_point, Bias::Right);
}
}
}
@@ -824,14 +817,14 @@ impl FoldSnapshot {
highlights: Highlights<'a>,
) -> FoldChunks<'a> {
let mut transform_cursor = self.transforms.cursor::<(FoldOffset, InlayOffset)>(&());
- transform_cursor.seek(&range.start, Bias::Right, &());
+ transform_cursor.seek(&range.start, Bias::Right);
let inlay_start = {
let overshoot = range.start.0 - transform_cursor.start().0.0;
transform_cursor.start().1 + InlayOffset(overshoot)
};
- let transform_end = transform_cursor.end(&());
+ let transform_end = transform_cursor.end();
let inlay_end = if transform_cursor
.item()
@@ -879,14 +872,14 @@ impl FoldSnapshot {
pub fn clip_point(&self, point: FoldPoint, bias: Bias) -> FoldPoint {
let mut cursor = self.transforms.cursor::<(FoldPoint, InlayPoint)>(&());
- cursor.seek(&point, Bias::Right, &());
+ cursor.seek(&point, Bias::Right);
if let Some(transform) = cursor.item() {
let transform_start = cursor.start().0.0;
if transform.placeholder.is_some() {
if point.0 == transform_start || matches!(bias, Bias::Left) {
FoldPoint(transform_start)
} else {
- FoldPoint(cursor.end(&()).0.0)
+ FoldPoint(cursor.end().0.0)
}
} else {
let overshoot = InlayPoint(point.0 - transform_start);
@@ -945,7 +938,7 @@ fn intersecting_folds<'a>(
start_cmp == Ordering::Less && end_cmp == Ordering::Greater
}
});
- cursor.next(buffer);
+ cursor.next();
cursor
}
@@ -1211,7 +1204,7 @@ pub struct FoldRows<'a> {
impl FoldRows<'_> {
pub(crate) fn seek(&mut self, row: u32) {
let fold_point = FoldPoint::new(row, 0);
- self.cursor.seek(&fold_point, Bias::Left, &());
+ self.cursor.seek(&fold_point, Bias::Left);
let overshoot = fold_point.0 - self.cursor.start().0.0;
let inlay_point = InlayPoint(self.cursor.start().1.0 + overshoot);
self.input_rows.seek(inlay_point.row());
@@ -1224,8 +1217,8 @@ impl Iterator for FoldRows<'_> {
fn next(&mut self) -> Option<Self::Item> {
let mut traversed_fold = false;
- while self.fold_point > self.cursor.end(&()).0 {
- self.cursor.next(&());
+ while self.fold_point > self.cursor.end().0 {
+ self.cursor.next();
traversed_fold = true;
if self.cursor.item().is_none() {
break;
@@ -1330,14 +1323,14 @@ pub struct FoldChunks<'a> {
impl FoldChunks<'_> {
pub(crate) fn seek(&mut self, range: Range<FoldOffset>) {
- self.transform_cursor.seek(&range.start, Bias::Right, &());
+ self.transform_cursor.seek(&range.start, Bias::Right);
let inlay_start = {
let overshoot = range.start.0 - self.transform_cursor.start().0.0;
self.transform_cursor.start().1 + InlayOffset(overshoot)
};
- let transform_end = self.transform_cursor.end(&());
+ let transform_end = self.transform_cursor.end();
let inlay_end = if self
.transform_cursor
@@ -1376,10 +1369,10 @@ impl<'a> Iterator for FoldChunks<'a> {
self.inlay_chunk.take();
self.inlay_offset += InlayOffset(transform.summary.input.len);
- while self.inlay_offset >= self.transform_cursor.end(&()).1
+ while self.inlay_offset >= self.transform_cursor.end().1
&& self.transform_cursor.item().is_some()
{
- self.transform_cursor.next(&());
+ self.transform_cursor.next();
}
self.output_offset.0 += placeholder.text.len();
@@ -1396,7 +1389,7 @@ impl<'a> Iterator for FoldChunks<'a> {
&& self.inlay_chunks.offset() != self.inlay_offset
{
let transform_start = self.transform_cursor.start();
- let transform_end = self.transform_cursor.end(&());
+ let transform_end = self.transform_cursor.end();
let inlay_end = if self.max_output_offset < transform_end.0 {
let overshoot = self.max_output_offset.0 - transform_start.0.0;
transform_start.1 + InlayOffset(overshoot)
@@ -1417,14 +1410,14 @@ impl<'a> Iterator for FoldChunks<'a> {
if let Some((buffer_chunk_start, mut inlay_chunk)) = self.inlay_chunk.clone() {
let chunk = &mut inlay_chunk.chunk;
let buffer_chunk_end = buffer_chunk_start + InlayOffset(chunk.text.len());
- let transform_end = self.transform_cursor.end(&()).1;
+ let transform_end = self.transform_cursor.end().1;
let chunk_end = buffer_chunk_end.min(transform_end);
chunk.text = &chunk.text
[(self.inlay_offset - buffer_chunk_start).0..(chunk_end - buffer_chunk_start).0];
if chunk_end == transform_end {
- self.transform_cursor.next(&());
+ self.transform_cursor.next();
} else if chunk_end == buffer_chunk_end {
self.inlay_chunk.take();
}
@@ -1456,7 +1449,7 @@ impl FoldOffset {
let mut cursor = snapshot
.transforms
.cursor::<(FoldOffset, TransformSummary)>(&());
- cursor.seek(&self, Bias::Right, &());
+ cursor.seek(&self, Bias::Right);
let overshoot = if cursor.item().map_or(true, |t| t.is_fold()) {
Point::new(0, (self.0 - cursor.start().0.0) as u32)
} else {
@@ -1470,7 +1463,7 @@ impl FoldOffset {
#[cfg(test)]
pub fn to_inlay_offset(self, snapshot: &FoldSnapshot) -> InlayOffset {
let mut cursor = snapshot.transforms.cursor::<(FoldOffset, InlayOffset)>(&());
- cursor.seek(&self, Bias::Right, &());
+ cursor.seek(&self, Bias::Right);
let overshoot = self.0 - cursor.start().0.0;
InlayOffset(cursor.start().1.0 + overshoot)
}
@@ -263,7 +263,7 @@ pub struct InlayChunk<'a> {
impl InlayChunks<'_> {
pub fn seek(&mut self, new_range: Range<InlayOffset>) {
- self.transforms.seek(&new_range.start, Bias::Right, &());
+ self.transforms.seek(&new_range.start, Bias::Right);
let buffer_range = self.snapshot.to_buffer_offset(new_range.start)
..self.snapshot.to_buffer_offset(new_range.end);
@@ -296,12 +296,12 @@ impl<'a> Iterator for InlayChunks<'a> {
*chunk = self.buffer_chunks.next().unwrap();
}
- let desired_bytes = self.transforms.end(&()).0.0 - self.output_offset.0;
+ let desired_bytes = self.transforms.end().0.0 - self.output_offset.0;
// If we're already at the transform boundary, skip to the next transform
if desired_bytes == 0 {
self.inlay_chunks = None;
- self.transforms.next(&());
+ self.transforms.next();
return self.next();
}
@@ -397,7 +397,7 @@ impl<'a> Iterator for InlayChunks<'a> {
let inlay_chunks = self.inlay_chunks.get_or_insert_with(|| {
let start = offset_in_inlay;
- let end = cmp::min(self.max_output_offset, self.transforms.end(&()).0)
+ let end = cmp::min(self.max_output_offset, self.transforms.end().0)
- self.transforms.start().0;
inlay.text.chunks_in_range(start.0..end.0)
});
@@ -441,9 +441,9 @@ impl<'a> Iterator for InlayChunks<'a> {
}
};
- if self.output_offset >= self.transforms.end(&()).0 {
+ if self.output_offset >= self.transforms.end().0 {
self.inlay_chunks = None;
- self.transforms.next(&());
+ self.transforms.next();
}
Some(chunk)
@@ -453,7 +453,7 @@ impl<'a> Iterator for InlayChunks<'a> {
impl InlayBufferRows<'_> {
pub fn seek(&mut self, row: u32) {
let inlay_point = InlayPoint::new(row, 0);
- self.transforms.seek(&inlay_point, Bias::Left, &());
+ self.transforms.seek(&inlay_point, Bias::Left);
let mut buffer_point = self.transforms.start().1;
let buffer_row = MultiBufferRow(if row == 0 {
@@ -487,7 +487,7 @@ impl Iterator for InlayBufferRows<'_> {
self.inlay_row += 1;
self.transforms
- .seek_forward(&InlayPoint::new(self.inlay_row, 0), Bias::Left, &());
+ .seek_forward(&InlayPoint::new(self.inlay_row, 0), Bias::Left);
Some(buffer_row)
}
@@ -556,18 +556,18 @@ impl InlayMap {
let mut cursor = snapshot.transforms.cursor::<(usize, InlayOffset)>(&());
let mut buffer_edits_iter = buffer_edits.iter().peekable();
while let Some(buffer_edit) = buffer_edits_iter.next() {
- new_transforms.append(cursor.slice(&buffer_edit.old.start, Bias::Left, &()), &());
+ new_transforms.append(cursor.slice(&buffer_edit.old.start, Bias::Left), &());
if let Some(Transform::Isomorphic(transform)) = cursor.item() {
- if cursor.end(&()).0 == buffer_edit.old.start {
+ if cursor.end().0 == buffer_edit.old.start {
push_isomorphic(&mut new_transforms, *transform);
- cursor.next(&());
+ cursor.next();
}
}
// Remove all the inlays and transforms contained by the edit.
let old_start =
cursor.start().1 + InlayOffset(buffer_edit.old.start - cursor.start().0);
- cursor.seek(&buffer_edit.old.end, Bias::Right, &());
+ cursor.seek(&buffer_edit.old.end, Bias::Right);
let old_end =
cursor.start().1 + InlayOffset(buffer_edit.old.end - cursor.start().0);
@@ -625,20 +625,20 @@ impl InlayMap {
// we can push its remainder.
if buffer_edits_iter
.peek()
- .map_or(true, |edit| edit.old.start >= cursor.end(&()).0)
+ .map_or(true, |edit| edit.old.start >= cursor.end().0)
{
let transform_start = new_transforms.summary().input.len;
let transform_end =
- buffer_edit.new.end + (cursor.end(&()).0 - buffer_edit.old.end);
+ buffer_edit.new.end + (cursor.end().0 - buffer_edit.old.end);
push_isomorphic(
&mut new_transforms,
buffer_snapshot.text_summary_for_range(transform_start..transform_end),
);
- cursor.next(&());
+ cursor.next();
}
}
- new_transforms.append(cursor.suffix(&()), &());
+ new_transforms.append(cursor.suffix(), &());
if new_transforms.is_empty() {
new_transforms.push(Transform::Isomorphic(Default::default()), &());
}
@@ -773,7 +773,7 @@ impl InlaySnapshot {
let mut cursor = self
.transforms
.cursor::<(InlayOffset, (InlayPoint, usize))>(&());
- cursor.seek(&offset, Bias::Right, &());
+ cursor.seek(&offset, Bias::Right);
let overshoot = offset.0 - cursor.start().0.0;
match cursor.item() {
Some(Transform::Isomorphic(_)) => {
@@ -803,7 +803,7 @@ impl InlaySnapshot {
let mut cursor = self
.transforms
.cursor::<(InlayPoint, (InlayOffset, Point))>(&());
- cursor.seek(&point, Bias::Right, &());
+ cursor.seek(&point, Bias::Right);
let overshoot = point.0 - cursor.start().0.0;
match cursor.item() {
Some(Transform::Isomorphic(_)) => {
@@ -822,7 +822,7 @@ impl InlaySnapshot {
}
pub fn to_buffer_point(&self, point: InlayPoint) -> Point {
let mut cursor = self.transforms.cursor::<(InlayPoint, Point)>(&());
- cursor.seek(&point, Bias::Right, &());
+ cursor.seek(&point, Bias::Right);
match cursor.item() {
Some(Transform::Isomorphic(_)) => {
let overshoot = point.0 - cursor.start().0.0;
@@ -834,7 +834,7 @@ impl InlaySnapshot {
}
pub fn to_buffer_offset(&self, offset: InlayOffset) -> usize {
let mut cursor = self.transforms.cursor::<(InlayOffset, usize)>(&());
- cursor.seek(&offset, Bias::Right, &());
+ cursor.seek(&offset, Bias::Right);
match cursor.item() {
Some(Transform::Isomorphic(_)) => {
let overshoot = offset - cursor.start().0;
@@ -847,19 +847,19 @@ impl InlaySnapshot {
pub fn to_inlay_offset(&self, offset: usize) -> InlayOffset {
let mut cursor = self.transforms.cursor::<(usize, InlayOffset)>(&());
- cursor.seek(&offset, Bias::Left, &());
+ cursor.seek(&offset, Bias::Left);
loop {
match cursor.item() {
Some(Transform::Isomorphic(_)) => {
- if offset == cursor.end(&()).0 {
+ if offset == cursor.end().0 {
while let Some(Transform::Inlay(inlay)) = cursor.next_item() {
if inlay.position.bias() == Bias::Right {
break;
} else {
- cursor.next(&());
+ cursor.next();
}
}
- return cursor.end(&()).1;
+ return cursor.end().1;
} else {
let overshoot = offset - cursor.start().0;
return InlayOffset(cursor.start().1.0 + overshoot);
@@ -867,7 +867,7 @@ impl InlaySnapshot {
}
Some(Transform::Inlay(inlay)) => {
if inlay.position.bias() == Bias::Left {
- cursor.next(&());
+ cursor.next();
} else {
return cursor.start().1;
}
@@ -880,19 +880,19 @@ impl InlaySnapshot {
}
pub fn to_inlay_point(&self, point: Point) -> InlayPoint {
let mut cursor = self.transforms.cursor::<(Point, InlayPoint)>(&());
- cursor.seek(&point, Bias::Left, &());
+ cursor.seek(&point, Bias::Left);
loop {
match cursor.item() {
Some(Transform::Isomorphic(_)) => {
- if point == cursor.end(&()).0 {
+ if point == cursor.end().0 {
while let Some(Transform::Inlay(inlay)) = cursor.next_item() {
if inlay.position.bias() == Bias::Right {
break;
} else {
- cursor.next(&());
+ cursor.next();
}
}
- return cursor.end(&()).1;
+ return cursor.end().1;
} else {
let overshoot = point - cursor.start().0;
return InlayPoint(cursor.start().1.0 + overshoot);
@@ -900,7 +900,7 @@ impl InlaySnapshot {
}
Some(Transform::Inlay(inlay)) => {
if inlay.position.bias() == Bias::Left {
- cursor.next(&());
+ cursor.next();
} else {
return cursor.start().1;
}
@@ -914,7 +914,7 @@ impl InlaySnapshot {
pub fn clip_point(&self, mut point: InlayPoint, mut bias: Bias) -> InlayPoint {
let mut cursor = self.transforms.cursor::<(InlayPoint, Point)>(&());
- cursor.seek(&point, Bias::Left, &());
+ cursor.seek(&point, Bias::Left);
loop {
match cursor.item() {
Some(Transform::Isomorphic(transform)) => {
@@ -923,7 +923,7 @@ impl InlaySnapshot {
if inlay.position.bias() == Bias::Left {
return point;
} else if bias == Bias::Left {
- cursor.prev(&());
+ cursor.prev();
} else if transform.first_line_chars == 0 {
point.0 += Point::new(1, 0);
} else {
@@ -932,12 +932,12 @@ impl InlaySnapshot {
} else {
return point;
}
- } else if cursor.end(&()).0 == point {
+ } else if cursor.end().0 == point {
if let Some(Transform::Inlay(inlay)) = cursor.next_item() {
if inlay.position.bias() == Bias::Right {
return point;
} else if bias == Bias::Right {
- cursor.next(&());
+ cursor.next();
} else if point.0.column == 0 {
point.0.row -= 1;
point.0.column = self.line_len(point.0.row);
@@ -970,7 +970,7 @@ impl InlaySnapshot {
}
_ => return point,
}
- } else if point == cursor.end(&()).0 && inlay.position.bias() == Bias::Left {
+ } else if point == cursor.end().0 && inlay.position.bias() == Bias::Left {
match cursor.next_item() {
Some(Transform::Inlay(inlay)) => {
if inlay.position.bias() == Bias::Right {
@@ -983,9 +983,9 @@ impl InlaySnapshot {
if bias == Bias::Left {
point = cursor.start().0;
- cursor.prev(&());
+ cursor.prev();
} else {
- cursor.next(&());
+ cursor.next();
point = cursor.start().0;
}
}
@@ -993,9 +993,9 @@ impl InlaySnapshot {
bias = bias.invert();
if bias == Bias::Left {
point = cursor.start().0;
- cursor.prev(&());
+ cursor.prev();
} else {
- cursor.next(&());
+ cursor.next();
point = cursor.start().0;
}
}
@@ -1011,7 +1011,7 @@ impl InlaySnapshot {
let mut summary = TextSummary::default();
let mut cursor = self.transforms.cursor::<(InlayOffset, usize)>(&());
- cursor.seek(&range.start, Bias::Right, &());
+ cursor.seek(&range.start, Bias::Right);
let overshoot = range.start.0 - cursor.start().0.0;
match cursor.item() {
@@ -1019,22 +1019,22 @@ impl InlaySnapshot {
let buffer_start = cursor.start().1;
let suffix_start = buffer_start + overshoot;
let suffix_end =
- buffer_start + (cmp::min(cursor.end(&()).0, range.end).0 - cursor.start().0.0);
+ buffer_start + (cmp::min(cursor.end().0, range.end).0 - cursor.start().0.0);
summary = self.buffer.text_summary_for_range(suffix_start..suffix_end);
- cursor.next(&());
+ cursor.next();
}
Some(Transform::Inlay(inlay)) => {
let suffix_start = overshoot;
- let suffix_end = cmp::min(cursor.end(&()).0, range.end).0 - cursor.start().0.0;
+ let suffix_end = cmp::min(cursor.end().0, range.end).0 - cursor.start().0.0;
summary = inlay.text.cursor(suffix_start).summary(suffix_end);
- cursor.next(&());
+ cursor.next();
}
None => {}
}
if range.end > cursor.start().0 {
summary += cursor
- .summary::<_, TransformSummary>(&range.end, Bias::Right, &())
+ .summary::<_, TransformSummary>(&range.end, Bias::Right)
.output;
let overshoot = range.end.0 - cursor.start().0.0;
@@ -1060,7 +1060,7 @@ impl InlaySnapshot {
pub fn row_infos(&self, row: u32) -> InlayBufferRows<'_> {
let mut cursor = self.transforms.cursor::<(InlayPoint, Point)>(&());
let inlay_point = InlayPoint::new(row, 0);
- cursor.seek(&inlay_point, Bias::Left, &());
+ cursor.seek(&inlay_point, Bias::Left);
let max_buffer_row = self.buffer.max_row();
let mut buffer_point = cursor.start().1;
@@ -1101,7 +1101,7 @@ impl InlaySnapshot {
highlights: Highlights<'a>,
) -> InlayChunks<'a> {
let mut cursor = self.transforms.cursor::<(InlayOffset, usize)>(&());
- cursor.seek(&range.start, Bias::Right, &());
+ cursor.seek(&range.start, Bias::Right);
let buffer_range = self.to_buffer_offset(range.start)..self.to_buffer_offset(range.end);
let buffer_chunks = CustomHighlightsChunks::new(
@@ -72,7 +72,7 @@ pub struct WrapRows<'a> {
impl WrapRows<'_> {
pub(crate) fn seek(&mut self, start_row: u32) {
self.transforms
- .seek(&WrapPoint::new(start_row, 0), Bias::Left, &());
+ .seek(&WrapPoint::new(start_row, 0), Bias::Left);
let mut input_row = self.transforms.start().1.row();
if self.transforms.item().map_or(false, |t| t.is_isomorphic()) {
input_row += start_row - self.transforms.start().0.row();
@@ -340,7 +340,7 @@ impl WrapSnapshot {
let mut tab_edits_iter = tab_edits.iter().peekable();
new_transforms =
- old_cursor.slice(&tab_edits_iter.peek().unwrap().old.start, Bias::Right, &());
+ old_cursor.slice(&tab_edits_iter.peek().unwrap().old.start, Bias::Right);
while let Some(edit) = tab_edits_iter.next() {
if edit.new.start > TabPoint::from(new_transforms.summary().input.lines) {
@@ -356,31 +356,29 @@ impl WrapSnapshot {
));
}
- old_cursor.seek_forward(&edit.old.end, Bias::Right, &());
+ old_cursor.seek_forward(&edit.old.end, Bias::Right);
if let Some(next_edit) = tab_edits_iter.peek() {
- if next_edit.old.start > old_cursor.end(&()) {
- if old_cursor.end(&()) > edit.old.end {
+ if next_edit.old.start > old_cursor.end() {
+ if old_cursor.end() > edit.old.end {
let summary = self
.tab_snapshot
- .text_summary_for_range(edit.old.end..old_cursor.end(&()));
+ .text_summary_for_range(edit.old.end..old_cursor.end());
new_transforms.push_or_extend(Transform::isomorphic(summary));
}
- old_cursor.next(&());
- new_transforms.append(
- old_cursor.slice(&next_edit.old.start, Bias::Right, &()),
- &(),
- );
+ old_cursor.next();
+ new_transforms
+ .append(old_cursor.slice(&next_edit.old.start, Bias::Right), &());
}
} else {
- if old_cursor.end(&()) > edit.old.end {
+ if old_cursor.end() > edit.old.end {
let summary = self
.tab_snapshot
- .text_summary_for_range(edit.old.end..old_cursor.end(&()));
+ .text_summary_for_range(edit.old.end..old_cursor.end());
new_transforms.push_or_extend(Transform::isomorphic(summary));
}
- old_cursor.next(&());
- new_transforms.append(old_cursor.suffix(&()), &());
+ old_cursor.next();
+ new_transforms.append(old_cursor.suffix(), &());
}
}
}
@@ -441,7 +439,6 @@ impl WrapSnapshot {
new_transforms = old_cursor.slice(
&TabPoint::new(row_edits.peek().unwrap().old_rows.start, 0),
Bias::Right,
- &(),
);
while let Some(edit) = row_edits.next() {
@@ -516,34 +513,31 @@ impl WrapSnapshot {
}
new_transforms.extend(edit_transforms, &());
- old_cursor.seek_forward(&TabPoint::new(edit.old_rows.end, 0), Bias::Right, &());
+ old_cursor.seek_forward(&TabPoint::new(edit.old_rows.end, 0), Bias::Right);
if let Some(next_edit) = row_edits.peek() {
- if next_edit.old_rows.start > old_cursor.end(&()).row() {
- if old_cursor.end(&()) > TabPoint::new(edit.old_rows.end, 0) {
+ if next_edit.old_rows.start > old_cursor.end().row() {
+ if old_cursor.end() > TabPoint::new(edit.old_rows.end, 0) {
let summary = self.tab_snapshot.text_summary_for_range(
- TabPoint::new(edit.old_rows.end, 0)..old_cursor.end(&()),
+ TabPoint::new(edit.old_rows.end, 0)..old_cursor.end(),
);
new_transforms.push_or_extend(Transform::isomorphic(summary));
}
- old_cursor.next(&());
+ old_cursor.next();
new_transforms.append(
- old_cursor.slice(
- &TabPoint::new(next_edit.old_rows.start, 0),
- Bias::Right,
- &(),
- ),
+ old_cursor
+ .slice(&TabPoint::new(next_edit.old_rows.start, 0), Bias::Right),
&(),
);
}
} else {
- if old_cursor.end(&()) > TabPoint::new(edit.old_rows.end, 0) {
+ if old_cursor.end() > TabPoint::new(edit.old_rows.end, 0) {
let summary = self.tab_snapshot.text_summary_for_range(
- TabPoint::new(edit.old_rows.end, 0)..old_cursor.end(&()),
+ TabPoint::new(edit.old_rows.end, 0)..old_cursor.end(),
);
new_transforms.push_or_extend(Transform::isomorphic(summary));
}
- old_cursor.next(&());
- new_transforms.append(old_cursor.suffix(&()), &());
+ old_cursor.next();
+ new_transforms.append(old_cursor.suffix(), &());
}
}
}
@@ -570,19 +564,19 @@ impl WrapSnapshot {
tab_edit.new.start.0.column = 0;
tab_edit.new.end.0 += Point::new(1, 0);
- old_cursor.seek(&tab_edit.old.start, Bias::Right, &());
+ old_cursor.seek(&tab_edit.old.start, Bias::Right);
let mut old_start = old_cursor.start().output.lines;
old_start += tab_edit.old.start.0 - old_cursor.start().input.lines;
- old_cursor.seek(&tab_edit.old.end, Bias::Right, &());
+ old_cursor.seek(&tab_edit.old.end, Bias::Right);
let mut old_end = old_cursor.start().output.lines;
old_end += tab_edit.old.end.0 - old_cursor.start().input.lines;
- new_cursor.seek(&tab_edit.new.start, Bias::Right, &());
+ new_cursor.seek(&tab_edit.new.start, Bias::Right);
let mut new_start = new_cursor.start().output.lines;
new_start += tab_edit.new.start.0 - new_cursor.start().input.lines;
- new_cursor.seek(&tab_edit.new.end, Bias::Right, &());
+ new_cursor.seek(&tab_edit.new.end, Bias::Right);
let mut new_end = new_cursor.start().output.lines;
new_end += tab_edit.new.end.0 - new_cursor.start().input.lines;
@@ -605,7 +599,7 @@ impl WrapSnapshot {
let output_start = WrapPoint::new(rows.start, 0);
let output_end = WrapPoint::new(rows.end, 0);
let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
- transforms.seek(&output_start, Bias::Right, &());
+ transforms.seek(&output_start, Bias::Right);
let mut input_start = TabPoint(transforms.start().1.0);
if transforms.item().map_or(false, |t| t.is_isomorphic()) {
input_start.0 += output_start.0 - transforms.start().0.0;
@@ -633,7 +627,7 @@ impl WrapSnapshot {
pub fn line_len(&self, row: u32) -> u32 {
let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
- cursor.seek(&WrapPoint::new(row + 1, 0), Bias::Left, &());
+ cursor.seek(&WrapPoint::new(row + 1, 0), Bias::Left);
if cursor
.item()
.map_or(false, |transform| transform.is_isomorphic())
@@ -658,10 +652,10 @@ impl WrapSnapshot {
let end = WrapPoint::new(rows.end, 0);
let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
- cursor.seek(&start, Bias::Right, &());
+ cursor.seek(&start, Bias::Right);
if let Some(transform) = cursor.item() {
let start_in_transform = start.0 - cursor.start().0.0;
- let end_in_transform = cmp::min(end, cursor.end(&()).0).0 - cursor.start().0.0;
+ let end_in_transform = cmp::min(end, cursor.end().0).0 - cursor.start().0.0;
if transform.is_isomorphic() {
let tab_start = TabPoint(cursor.start().1.0 + start_in_transform);
let tab_end = TabPoint(cursor.start().1.0 + end_in_transform);
@@ -678,12 +672,12 @@ impl WrapSnapshot {
};
}
- cursor.next(&());
+ cursor.next();
}
if rows.end > cursor.start().0.row() {
summary += &cursor
- .summary::<_, TransformSummary>(&WrapPoint::new(rows.end, 0), Bias::Right, &())
+ .summary::<_, TransformSummary>(&WrapPoint::new(rows.end, 0), Bias::Right)
.output;
if let Some(transform) = cursor.item() {
@@ -712,7 +706,7 @@ impl WrapSnapshot {
pub fn soft_wrap_indent(&self, row: u32) -> Option<u32> {
let mut cursor = self.transforms.cursor::<WrapPoint>(&());
- cursor.seek(&WrapPoint::new(row + 1, 0), Bias::Right, &());
+ cursor.seek(&WrapPoint::new(row + 1, 0), Bias::Right);
cursor.item().and_then(|transform| {
if transform.is_isomorphic() {
None
@@ -728,7 +722,7 @@ impl WrapSnapshot {
pub fn row_infos(&self, start_row: u32) -> WrapRows<'_> {
let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
- transforms.seek(&WrapPoint::new(start_row, 0), Bias::Left, &());
+ transforms.seek(&WrapPoint::new(start_row, 0), Bias::Left);
let mut input_row = transforms.start().1.row();
if transforms.item().map_or(false, |t| t.is_isomorphic()) {
input_row += start_row - transforms.start().0.row();
@@ -748,7 +742,7 @@ impl WrapSnapshot {
pub fn to_tab_point(&self, point: WrapPoint) -> TabPoint {
let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
- cursor.seek(&point, Bias::Right, &());
+ cursor.seek(&point, Bias::Right);
let mut tab_point = cursor.start().1.0;
if cursor.item().map_or(false, |t| t.is_isomorphic()) {
tab_point += point.0 - cursor.start().0.0;
@@ -766,14 +760,14 @@ impl WrapSnapshot {
pub fn tab_point_to_wrap_point(&self, point: TabPoint) -> WrapPoint {
let mut cursor = self.transforms.cursor::<(TabPoint, WrapPoint)>(&());
- cursor.seek(&point, Bias::Right, &());
+ cursor.seek(&point, Bias::Right);
WrapPoint(cursor.start().1.0 + (point.0 - cursor.start().0.0))
}
pub fn clip_point(&self, mut point: WrapPoint, bias: Bias) -> WrapPoint {
if bias == Bias::Left {
let mut cursor = self.transforms.cursor::<WrapPoint>(&());
- cursor.seek(&point, Bias::Right, &());
+ cursor.seek(&point, Bias::Right);
if cursor.item().map_or(false, |t| !t.is_isomorphic()) {
point = *cursor.start();
*point.column_mut() -= 1;
@@ -791,16 +785,16 @@ impl WrapSnapshot {
*point.column_mut() = 0;
let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
- cursor.seek(&point, Bias::Right, &());
+ cursor.seek(&point, Bias::Right);
if cursor.item().is_none() {
- cursor.prev(&());
+ cursor.prev();
}
while let Some(transform) = cursor.item() {
if transform.is_isomorphic() && cursor.start().1.column() == 0 {
- return cmp::min(cursor.end(&()).0.row(), point.row());
+ return cmp::min(cursor.end().0.row(), point.row());
} else {
- cursor.prev(&());
+ cursor.prev();
}
}
@@ -811,12 +805,12 @@ impl WrapSnapshot {
point.0 += Point::new(1, 0);
let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
- cursor.seek(&point, Bias::Right, &());
+ cursor.seek(&point, Bias::Right);
while let Some(transform) = cursor.item() {
if transform.is_isomorphic() && cursor.start().1.column() == 0 {
return Some(cmp::max(cursor.start().0.row(), point.row()));
} else {
- cursor.next(&());
+ cursor.next();
}
}
@@ -889,7 +883,7 @@ impl WrapChunks<'_> {
pub(crate) fn seek(&mut self, rows: Range<u32>) {
let output_start = WrapPoint::new(rows.start, 0);
let output_end = WrapPoint::new(rows.end, 0);
- self.transforms.seek(&output_start, Bias::Right, &());
+ self.transforms.seek(&output_start, Bias::Right);
let mut input_start = TabPoint(self.transforms.start().1.0);
if self.transforms.item().map_or(false, |t| t.is_isomorphic()) {
input_start.0 += output_start.0 - self.transforms.start().0.0;
@@ -930,7 +924,7 @@ impl<'a> Iterator for WrapChunks<'a> {
}
self.output_position.0 += summary;
- self.transforms.next(&());
+ self.transforms.next();
return Some(Chunk {
text: &display_text[start_ix..end_ix],
..Default::default()
@@ -942,7 +936,7 @@ impl<'a> Iterator for WrapChunks<'a> {
}
let mut input_len = 0;
- let transform_end = self.transforms.end(&()).0;
+ let transform_end = self.transforms.end().0;
for c in self.input_chunk.text.chars() {
let char_len = c.len_utf8();
input_len += char_len;
@@ -954,7 +948,7 @@ impl<'a> Iterator for WrapChunks<'a> {
}
if self.output_position >= transform_end {
- self.transforms.next(&());
+ self.transforms.next();
break;
}
}
@@ -982,7 +976,7 @@ impl Iterator for WrapRows<'_> {
self.output_row += 1;
self.transforms
- .seek_forward(&WrapPoint::new(self.output_row, 0), Bias::Left, &());
+ .seek_forward(&WrapPoint::new(self.output_row, 0), Bias::Left);
if self.transforms.item().map_or(false, |t| t.is_isomorphic()) {
self.input_buffer_row = self.input_buffer_rows.next().unwrap();
self.soft_wrapped = false;
@@ -213,6 +213,7 @@ use workspace::{
notifications::{DetachAndPromptErr, NotificationId, NotifyTaskExt},
searchable::SearchEvent,
};
+use zed_actions;
use crate::{
code_context_menus::CompletionsMenuSource,
@@ -12154,6 +12155,41 @@ impl Editor {
});
}
+ pub fn diff_clipboard_with_selection(
+ &mut self,
+ _: &DiffClipboardWithSelection,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ let selections = self.selections.all::<usize>(cx);
+
+ if selections.is_empty() {
+ log::warn!("There should always be at least one selection in Zed. This is a bug.");
+ return;
+ };
+
+ let clipboard_text = match cx.read_from_clipboard() {
+ Some(item) => match item.entries().first() {
+ Some(ClipboardEntry::String(text)) => Some(text.text().to_string()),
+ _ => None,
+ },
+ None => None,
+ };
+
+ let Some(clipboard_text) = clipboard_text else {
+ log::warn!("Clipboard doesn't contain text.");
+ return;
+ };
+
+ window.dispatch_action(
+ Box::new(DiffClipboardWithSelectionData {
+ clipboard_text,
+ editor: cx.entity(),
+ }),
+ cx,
+ );
+ }
+
pub fn paste(&mut self, _: &Paste, window: &mut Window, cx: &mut Context<Self>) {
self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx);
if let Some(item) = cx.read_from_clipboard() {
@@ -9570,6 +9570,74 @@ async fn test_document_format_during_save(cx: &mut TestAppContext) {
}
}
+#[gpui::test]
+async fn test_redo_after_noop_format(cx: &mut TestAppContext) {
+ init_test(cx, |settings| {
+ settings.defaults.ensure_final_newline_on_save = Some(false);
+ });
+
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_file(path!("/file.txt"), "foo".into()).await;
+
+ let project = Project::test(fs, [path!("/file.txt").as_ref()], cx).await;
+
+ let buffer = project
+ .update(cx, |project, cx| {
+ project.open_local_buffer(path!("/file.txt"), cx)
+ })
+ .await
+ .unwrap();
+
+ let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx));
+ let (editor, cx) = cx.add_window_view(|window, cx| {
+ build_editor_with_project(project.clone(), buffer, window, cx)
+ });
+ editor.update_in(cx, |editor, window, cx| {
+ editor.change_selections(SelectionEffects::default(), window, cx, |s| {
+ s.select_ranges([0..0])
+ });
+ });
+ assert!(!cx.read(|cx| editor.is_dirty(cx)));
+
+ editor.update_in(cx, |editor, window, cx| {
+ editor.handle_input("\n", window, cx)
+ });
+ cx.run_until_parked();
+ save(&editor, &project, cx).await;
+ assert_eq!("\nfoo", editor.read_with(cx, |editor, cx| editor.text(cx)));
+
+ editor.update_in(cx, |editor, window, cx| {
+ editor.undo(&Default::default(), window, cx);
+ });
+ save(&editor, &project, cx).await;
+ assert_eq!("foo", editor.read_with(cx, |editor, cx| editor.text(cx)));
+
+ editor.update_in(cx, |editor, window, cx| {
+ editor.redo(&Default::default(), window, cx);
+ });
+ cx.run_until_parked();
+ assert_eq!("\nfoo", editor.read_with(cx, |editor, cx| editor.text(cx)));
+
+ async fn save(editor: &Entity<Editor>, project: &Entity<Project>, cx: &mut VisualTestContext) {
+ let save = editor
+ .update_in(cx, |editor, window, cx| {
+ editor.save(
+ SaveOptions {
+ format: true,
+ autosave: false,
+ },
+ project.clone(),
+ window,
+ cx,
+ )
+ })
+ .unwrap();
+ cx.executor().start_waiting();
+ save.await;
+ assert!(!cx.read(|cx| editor.is_dirty(cx)));
+ }
+}
+
#[gpui::test]
async fn test_multibuffer_format_during_save(cx: &mut TestAppContext) {
init_test(cx, |_| {});
@@ -22708,7 +22776,7 @@ pub(crate) fn init_test(cx: &mut TestAppContext, f: fn(&mut AllLanguageSettingsC
workspace::init_settings(cx);
crate::init(cx);
});
-
+ zlog::init_test();
update_test_language_settings(cx, f);
}
@@ -262,6 +262,7 @@ impl EditorElement {
register_action(editor, window, Editor::kill_ring_yank);
register_action(editor, window, Editor::copy);
register_action(editor, window, Editor::copy_and_trim);
+ register_action(editor, window, Editor::diff_clipboard_with_selection);
register_action(editor, window, Editor::paste);
register_action(editor, window, Editor::undo);
register_action(editor, window, Editor::redo);
@@ -296,7 +296,7 @@ impl GitBlame {
let row = info
.buffer_row
.filter(|_| info.buffer_id == Some(buffer_id))?;
- cursor.seek_forward(&row, Bias::Right, &());
+ cursor.seek_forward(&row, Bias::Right);
cursor.item()?.blame.clone()
})
}
@@ -389,7 +389,7 @@ impl GitBlame {
}
}
- new_entries.append(cursor.slice(&edit.old.start, Bias::Right, &()), &());
+ new_entries.append(cursor.slice(&edit.old.start, Bias::Right), &());
if edit.new.start > new_entries.summary().rows {
new_entries.push(
@@ -401,7 +401,7 @@ impl GitBlame {
);
}
- cursor.seek(&edit.old.end, Bias::Right, &());
+ cursor.seek(&edit.old.end, Bias::Right);
if !edit.new.is_empty() {
new_entries.push(
GitBlameEntry {
@@ -412,7 +412,7 @@ impl GitBlame {
);
}
- let old_end = cursor.end(&());
+ let old_end = cursor.end();
if row_edits
.peek()
.map_or(true, |next_edit| next_edit.old.start >= old_end)
@@ -421,18 +421,18 @@ impl GitBlame {
if old_end > edit.old.end {
new_entries.push(
GitBlameEntry {
- rows: cursor.end(&()) - edit.old.end,
+ rows: cursor.end() - edit.old.end,
blame: entry.blame.clone(),
},
&(),
);
}
- cursor.next(&());
+ cursor.next();
}
}
}
- new_entries.append(cursor.suffix(&()), &());
+ new_entries.append(cursor.suffix(), &());
drop(cursor);
self.buffer_snapshot = new_snapshot;
@@ -23,6 +23,7 @@ askpass.workspace = true
buffer_diff.workspace = true
call.workspace = true
chrono.workspace = true
+client.workspace = true
collections.workspace = true
command_palette_hooks.workspace = true
component.workspace = true
@@ -1,8 +1,10 @@
use crate::branch_picker::{self, BranchList};
use crate::git_panel::{GitPanel, commit_message_editor};
+use client::DisableAiSettings;
use git::repository::CommitOptions;
use git::{Amend, Commit, GenerateCommitMessage, Signoff};
use panel::{panel_button, panel_editor_style};
+use settings::Settings;
use ui::{
ContextMenu, KeybindingHint, PopoverMenu, PopoverMenuHandle, SplitButton, Tooltip, prelude::*,
};
@@ -569,11 +571,13 @@ impl Render for CommitModal {
.on_action(cx.listener(Self::dismiss))
.on_action(cx.listener(Self::commit))
.on_action(cx.listener(Self::amend))
- .on_action(cx.listener(|this, _: &GenerateCommitMessage, _, cx| {
- this.git_panel.update(cx, |panel, cx| {
- panel.generate_commit_message(cx);
- })
- }))
+ .when(!DisableAiSettings::get_global(cx).disable_ai, |this| {
+ this.on_action(cx.listener(|this, _: &GenerateCommitMessage, _, cx| {
+ this.git_panel.update(cx, |panel, cx| {
+ panel.generate_commit_message(cx);
+ })
+ }))
+ })
.on_action(
cx.listener(|this, _: &zed_actions::git::Branch, window, cx| {
this.toggle_branch_selector(window, cx);
@@ -1,4 +1,4 @@
-//! DiffView provides a UI for displaying differences between two buffers.
+//! FileDiffView provides a UI for displaying differences between two buffers.
use anyhow::Result;
use buffer_diff::{BufferDiff, BufferDiffSnapshot};
@@ -25,7 +25,7 @@ use workspace::{
searchable::SearchableItemHandle,
};
-pub struct DiffView {
+pub struct FileDiffView {
editor: Entity<Editor>,
old_buffer: Entity<Buffer>,
new_buffer: Entity<Buffer>,
@@ -35,7 +35,7 @@ pub struct DiffView {
const RECALCULATE_DIFF_DEBOUNCE: Duration = Duration::from_millis(250);
-impl DiffView {
+impl FileDiffView {
pub fn open(
old_path: PathBuf,
new_path: PathBuf,
@@ -57,7 +57,7 @@ impl DiffView {
workspace.update_in(cx, |workspace, window, cx| {
let diff_view = cx.new(|cx| {
- DiffView::new(
+ FileDiffView::new(
old_buffer,
new_buffer,
buffer_diff,
@@ -190,15 +190,15 @@ async fn build_buffer_diff(
})
}
-impl EventEmitter<EditorEvent> for DiffView {}
+impl EventEmitter<EditorEvent> for FileDiffView {}
-impl Focusable for DiffView {
+impl Focusable for FileDiffView {
fn focus_handle(&self, cx: &App) -> FocusHandle {
self.editor.focus_handle(cx)
}
}
-impl Item for DiffView {
+impl Item for FileDiffView {
type Event = EditorEvent;
fn tab_icon(&self, _window: &Window, _cx: &App) -> Option<Icon> {
@@ -216,48 +216,37 @@ impl Item for DiffView {
}
fn tab_content_text(&self, _detail: usize, cx: &App) -> SharedString {
- let old_filename = self
- .old_buffer
- .read(cx)
- .file()
- .and_then(|file| {
- Some(
- file.full_path(cx)
- .file_name()?
- .to_string_lossy()
- .to_string(),
- )
- })
- .unwrap_or_else(|| "untitled".into());
- let new_filename = self
- .new_buffer
- .read(cx)
- .file()
- .and_then(|file| {
- Some(
- file.full_path(cx)
- .file_name()?
- .to_string_lossy()
- .to_string(),
- )
- })
- .unwrap_or_else(|| "untitled".into());
+ let title_text = |buffer: &Entity<Buffer>| {
+ buffer
+ .read(cx)
+ .file()
+ .and_then(|file| {
+ Some(
+ file.full_path(cx)
+ .file_name()?
+ .to_string_lossy()
+ .to_string(),
+ )
+ })
+ .unwrap_or_else(|| "untitled".into())
+ };
+ let old_filename = title_text(&self.old_buffer);
+ let new_filename = title_text(&self.new_buffer);
+
format!("{old_filename} ↔ {new_filename}").into()
}
fn tab_tooltip_text(&self, cx: &App) -> Option<ui::SharedString> {
- let old_path = self
- .old_buffer
- .read(cx)
- .file()
- .map(|file| file.full_path(cx).compact().to_string_lossy().to_string())
- .unwrap_or_else(|| "untitled".into());
- let new_path = self
- .new_buffer
- .read(cx)
- .file()
- .map(|file| file.full_path(cx).compact().to_string_lossy().to_string())
- .unwrap_or_else(|| "untitled".into());
+ let path = |buffer: &Entity<Buffer>| {
+ buffer
+ .read(cx)
+ .file()
+ .map(|file| file.full_path(cx).compact().to_string_lossy().to_string())
+ .unwrap_or_else(|| "untitled".into())
+ };
+ let old_path = path(&self.old_buffer);
+ let new_path = path(&self.new_buffer);
+
Some(format!("{old_path} ↔ {new_path}").into())
}
@@ -363,7 +352,7 @@ impl Item for DiffView {
}
}
-impl Render for DiffView {
+impl Render for FileDiffView {
fn render(&mut self, _: &mut Window, _: &mut Context<Self>) -> impl IntoElement {
self.editor.clone()
}
@@ -407,16 +396,16 @@ mod tests {
)
.await;
- let project = Project::test(fs.clone(), ["/test".as_ref()], cx).await;
+ let project = Project::test(fs.clone(), [path!("/test").as_ref()], cx).await;
let (workspace, mut cx) =
cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx));
let diff_view = workspace
.update_in(cx, |workspace, window, cx| {
- DiffView::open(
- PathBuf::from(path!("/test/old_file.txt")),
- PathBuf::from(path!("/test/new_file.txt")),
+ FileDiffView::open(
+ path!("/test/old_file.txt").into(),
+ path!("/test/new_file.txt").into(),
workspace,
window,
cx,
@@ -510,6 +499,21 @@ mod tests {
",
),
);
+
+ diff_view.read_with(cx, |diff_view, cx| {
+ assert_eq!(
+ diff_view.tab_content_text(0, cx),
+ "old_file.txt ↔ new_file.txt"
+ );
+ assert_eq!(
+ diff_view.tab_tooltip_text(cx).unwrap(),
+ format!(
+ "{} ↔ {}",
+ path!("test/old_file.txt"),
+ path!("test/new_file.txt")
+ )
+ );
+ })
}
#[gpui::test]
@@ -533,7 +537,7 @@ mod tests {
let diff_view = workspace
.update_in(cx, |workspace, window, cx| {
- DiffView::open(
+ FileDiffView::open(
PathBuf::from(path!("/test/old_file.txt")),
PathBuf::from(path!("/test/new_file.txt")),
workspace,
@@ -12,6 +12,7 @@ use crate::{
use agent_settings::AgentSettings;
use anyhow::Context as _;
use askpass::AskPassDelegate;
+use client::DisableAiSettings;
use db::kvp::KEY_VALUE_STORE;
use editor::{
Editor, EditorElement, EditorMode, EditorSettings, MultiBuffer, ShowScrollbar,
@@ -53,7 +54,7 @@ use project::{
git_store::{GitStoreEvent, Repository},
};
use serde::{Deserialize, Serialize};
-use settings::{Settings as _, SettingsStore};
+use settings::{Settings, SettingsStore};
use std::future::Future;
use std::ops::Range;
use std::path::{Path, PathBuf};
@@ -464,9 +465,14 @@ impl GitPanel {
};
let mut assistant_enabled = AgentSettings::get_global(cx).enabled;
+ let mut was_ai_disabled = DisableAiSettings::get_global(cx).disable_ai;
let _settings_subscription = cx.observe_global::<SettingsStore>(move |_, cx| {
- if assistant_enabled != AgentSettings::get_global(cx).enabled {
+ let is_ai_disabled = DisableAiSettings::get_global(cx).disable_ai;
+ if assistant_enabled != AgentSettings::get_global(cx).enabled
+ || was_ai_disabled != is_ai_disabled
+ {
assistant_enabled = AgentSettings::get_global(cx).enabled;
+ was_ai_disabled = is_ai_disabled;
cx.notify();
}
});
@@ -1806,7 +1812,7 @@ impl GitPanel {
/// Generates a commit message using an LLM.
pub fn generate_commit_message(&mut self, cx: &mut Context<Self>) {
- if !self.can_commit() {
+ if !self.can_commit() || DisableAiSettings::get_global(cx).disable_ai {
return;
}
@@ -4305,8 +4311,10 @@ impl GitPanel {
}
fn current_language_model(cx: &Context<'_, GitPanel>) -> Option<Arc<dyn LanguageModel>> {
- agent_settings::AgentSettings::get_global(cx)
- .enabled
+ let is_enabled = agent_settings::AgentSettings::get_global(cx).enabled
+ && !DisableAiSettings::get_global(cx).disable_ai;
+
+ is_enabled
.then(|| {
let ConfiguredModel { provider, model } =
LanguageModelRegistry::read_global(cx).commit_message_model()?;
@@ -5037,6 +5045,7 @@ mod tests {
language::init(cx);
editor::init(cx);
Project::init_settings(cx);
+ client::DisableAiSettings::register(cx);
crate::init(cx);
});
}
@@ -3,7 +3,7 @@ use std::any::Any;
use ::settings::Settings;
use command_palette_hooks::CommandPaletteFilter;
use commit_modal::CommitModal;
-use editor::Editor;
+use editor::{Editor, actions::DiffClipboardWithSelectionData};
mod blame_ui;
use git::{
repository::{Branch, Upstream, UpstreamTracking, UpstreamTrackingStatus},
@@ -15,6 +15,9 @@ use onboarding::GitOnboardingModal;
use project_diff::ProjectDiff;
use ui::prelude::*;
use workspace::Workspace;
+use zed_actions;
+
+use crate::text_diff_view::TextDiffView;
mod askpass_modal;
pub mod branch_picker;
@@ -22,7 +25,7 @@ mod commit_modal;
pub mod commit_tooltip;
mod commit_view;
mod conflict_view;
-pub mod diff_view;
+pub mod file_diff_view;
pub mod git_panel;
mod git_panel_settings;
pub mod onboarding;
@@ -30,6 +33,7 @@ pub mod picker_prompt;
pub mod project_diff;
pub(crate) mod remote_output;
pub mod repository_selector;
+pub mod text_diff_view;
actions!(
git,
@@ -152,6 +156,13 @@ pub fn init(cx: &mut App) {
workspace.register_action(|workspace, _: &git::OpenModifiedFiles, window, cx| {
open_modified_files(workspace, window, cx);
});
+ workspace.register_action(
+ |workspace, action: &DiffClipboardWithSelectionData, window, cx| {
+ if let Some(task) = TextDiffView::open(action, workspace, window, cx) {
+ task.detach();
+ };
+ },
+ );
})
.detach();
}
@@ -501,7 +512,7 @@ mod remote_button {
)
.into_any_element();
- SplitButton { left, right }
+ SplitButton::new(left, right)
}
}
@@ -0,0 +1,554 @@
+//! TextDiffView currently provides a UI for displaying differences between the clipboard and selected text.
+
+use anyhow::Result;
+use buffer_diff::{BufferDiff, BufferDiffSnapshot};
+use editor::{Editor, EditorEvent, MultiBuffer, ToPoint, actions::DiffClipboardWithSelectionData};
+use futures::{FutureExt, select_biased};
+use gpui::{
+ AnyElement, AnyView, App, AppContext as _, AsyncApp, Context, Entity, EventEmitter,
+ FocusHandle, Focusable, IntoElement, Render, Task, Window,
+};
+use language::{self, Buffer, Point};
+use project::Project;
+use std::{
+ any::{Any, TypeId},
+ ops::Range,
+ pin::pin,
+ sync::Arc,
+ time::Duration,
+};
+use ui::{Color, Icon, IconName, Label, LabelCommon as _, SharedString};
+use util::paths::PathExt;
+
+use workspace::{
+ Item, ItemHandle as _, ItemNavHistory, ToolbarItemLocation, Workspace,
+ item::{BreadcrumbText, ItemEvent, SaveOptions, TabContentParams},
+ searchable::SearchableItemHandle,
+};
+
+pub struct TextDiffView {
+ diff_editor: Entity<Editor>,
+ title: SharedString,
+ path: Option<SharedString>,
+ buffer_changes_tx: watch::Sender<()>,
+ _recalculate_diff_task: Task<Result<()>>,
+}
+
+const RECALCULATE_DIFF_DEBOUNCE: Duration = Duration::from_millis(250);
+
+impl TextDiffView {
+ pub fn open(
+ diff_data: &DiffClipboardWithSelectionData,
+ workspace: &Workspace,
+ window: &mut Window,
+ cx: &mut App,
+ ) -> Option<Task<Result<Entity<Self>>>> {
+ let source_editor = diff_data.editor.clone();
+
+ let source_editor_buffer_and_range = source_editor.update(cx, |editor, cx| {
+ let multibuffer = editor.buffer().read(cx);
+ let source_buffer = multibuffer.as_singleton()?.clone();
+ let selections = editor.selections.all::<Point>(cx);
+ let buffer_snapshot = source_buffer.read(cx);
+ let first_selection = selections.first()?;
+ let selection_range = if first_selection.is_empty() {
+ Point::new(0, 0)..buffer_snapshot.max_point()
+ } else {
+ first_selection.start..first_selection.end
+ };
+
+ Some((source_buffer, selection_range))
+ });
+
+ let Some((source_buffer, selected_range)) = source_editor_buffer_and_range else {
+ log::warn!("There should always be at least one selection in Zed. This is a bug.");
+ return None;
+ };
+
+ let clipboard_text = diff_data.clipboard_text.clone();
+
+ let workspace = workspace.weak_handle();
+
+ let diff_buffer = cx.new(|cx| {
+ let source_buffer_snapshot = source_buffer.read(cx).snapshot();
+ let diff = BufferDiff::new(&source_buffer_snapshot.text, cx);
+ diff
+ });
+
+ let clipboard_buffer =
+ build_clipboard_buffer(clipboard_text, &source_buffer, selected_range.clone(), cx);
+
+ let task = window.spawn(cx, async move |cx| {
+ let project = workspace.update(cx, |workspace, _| workspace.project().clone())?;
+
+ update_diff_buffer(&diff_buffer, &source_buffer, &clipboard_buffer, cx).await?;
+
+ workspace.update_in(cx, |workspace, window, cx| {
+ let diff_view = cx.new(|cx| {
+ TextDiffView::new(
+ clipboard_buffer,
+ source_editor,
+ source_buffer,
+ selected_range,
+ diff_buffer,
+ project,
+ window,
+ cx,
+ )
+ });
+
+ let pane = workspace.active_pane();
+ pane.update(cx, |pane, cx| {
+ pane.add_item(Box::new(diff_view.clone()), true, true, None, window, cx);
+ });
+
+ diff_view
+ })
+ });
+
+ Some(task)
+ }
+
+ pub fn new(
+ clipboard_buffer: Entity<Buffer>,
+ source_editor: Entity<Editor>,
+ source_buffer: Entity<Buffer>,
+ source_range: Range<Point>,
+ diff_buffer: Entity<BufferDiff>,
+ project: Entity<Project>,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) -> Self {
+ let multibuffer = cx.new(|cx| {
+ let mut multibuffer = MultiBuffer::new(language::Capability::ReadWrite);
+
+ multibuffer.push_excerpts(
+ source_buffer.clone(),
+ [editor::ExcerptRange::new(source_range)],
+ cx,
+ );
+
+ multibuffer.add_diff(diff_buffer.clone(), cx);
+ multibuffer
+ });
+ let diff_editor = cx.new(|cx| {
+ let mut editor = Editor::for_multibuffer(multibuffer, Some(project), window, cx);
+ editor.start_temporary_diff_override();
+ editor.disable_diagnostics(cx);
+ editor.set_expand_all_diff_hunks(cx);
+ editor.set_render_diff_hunk_controls(
+ Arc::new(|_, _, _, _, _, _, _, _| gpui::Empty.into_any_element()),
+ cx,
+ );
+ editor
+ });
+
+ let (buffer_changes_tx, mut buffer_changes_rx) = watch::channel(());
+
+ cx.subscribe(&source_buffer, move |this, _, event, _| match event {
+ language::BufferEvent::Edited
+ | language::BufferEvent::LanguageChanged
+ | language::BufferEvent::Reparsed => {
+ this.buffer_changes_tx.send(()).ok();
+ }
+ _ => {}
+ })
+ .detach();
+
+ let editor = source_editor.read(cx);
+ let title = editor.buffer().read(cx).title(cx).to_string();
+ let selection_location_text = selection_location_text(editor, cx);
+ let selection_location_title = selection_location_text
+ .as_ref()
+ .map(|text| format!("{} @ {}", title, text))
+ .unwrap_or(title);
+
+ let path = editor
+ .buffer()
+ .read(cx)
+ .as_singleton()
+ .and_then(|b| {
+ b.read(cx)
+ .file()
+ .map(|f| f.full_path(cx).compact().to_string_lossy().to_string())
+ })
+ .unwrap_or("untitled".into());
+
+ let selection_location_path = selection_location_text
+ .map(|text| format!("{} @ {}", path, text))
+ .unwrap_or(path);
+
+ Self {
+ diff_editor,
+ title: format!("Clipboard ↔ {selection_location_title}").into(),
+ path: Some(format!("Clipboard ↔ {selection_location_path}").into()),
+ buffer_changes_tx,
+ _recalculate_diff_task: cx.spawn(async move |_, cx| {
+ while let Ok(_) = buffer_changes_rx.recv().await {
+ loop {
+ let mut timer = cx
+ .background_executor()
+ .timer(RECALCULATE_DIFF_DEBOUNCE)
+ .fuse();
+ let mut recv = pin!(buffer_changes_rx.recv().fuse());
+ select_biased! {
+ _ = timer => break,
+ _ = recv => continue,
+ }
+ }
+
+ log::trace!("start recalculating");
+ update_diff_buffer(&diff_buffer, &source_buffer, &clipboard_buffer, cx).await?;
+ log::trace!("finish recalculating");
+ }
+ Ok(())
+ }),
+ }
+ }
+}
+
+fn build_clipboard_buffer(
+ clipboard_text: String,
+ source_buffer: &Entity<Buffer>,
+ selected_range: Range<Point>,
+ cx: &mut App,
+) -> Entity<Buffer> {
+ let source_buffer_snapshot = source_buffer.read(cx).snapshot();
+ cx.new(|cx| {
+ let mut buffer = language::Buffer::local(source_buffer_snapshot.text(), cx);
+ let language = source_buffer.read(cx).language().cloned();
+ buffer.set_language(language, cx);
+
+ let range_start = source_buffer_snapshot.point_to_offset(selected_range.start);
+ let range_end = source_buffer_snapshot.point_to_offset(selected_range.end);
+ buffer.edit([(range_start..range_end, clipboard_text)], None, cx);
+
+ buffer
+ })
+}
+
+async fn update_diff_buffer(
+ diff: &Entity<BufferDiff>,
+ source_buffer: &Entity<Buffer>,
+ clipboard_buffer: &Entity<Buffer>,
+ cx: &mut AsyncApp,
+) -> Result<()> {
+ let source_buffer_snapshot = source_buffer.read_with(cx, |buffer, _| buffer.snapshot())?;
+
+ let base_buffer_snapshot = clipboard_buffer.read_with(cx, |buffer, _| buffer.snapshot())?;
+ let base_text = base_buffer_snapshot.text().to_string();
+
+ let diff_snapshot = cx
+ .update(|cx| {
+ BufferDiffSnapshot::new_with_base_buffer(
+ source_buffer_snapshot.text.clone(),
+ Some(Arc::new(base_text)),
+ base_buffer_snapshot,
+ cx,
+ )
+ })?
+ .await;
+
+ diff.update(cx, |diff, cx| {
+ diff.set_snapshot(diff_snapshot, &source_buffer_snapshot.text, cx);
+ })?;
+ Ok(())
+}
+
+impl EventEmitter<EditorEvent> for TextDiffView {}
+
+impl Focusable for TextDiffView {
+ fn focus_handle(&self, cx: &App) -> FocusHandle {
+ self.diff_editor.focus_handle(cx)
+ }
+}
+
+impl Item for TextDiffView {
+ type Event = EditorEvent;
+
+ fn tab_icon(&self, _window: &Window, _cx: &App) -> Option<Icon> {
+ Some(Icon::new(IconName::Diff).color(Color::Muted))
+ }
+
+ fn tab_content(&self, params: TabContentParams, _window: &Window, cx: &App) -> AnyElement {
+ Label::new(self.tab_content_text(params.detail.unwrap_or_default(), cx))
+ .color(if params.selected {
+ Color::Default
+ } else {
+ Color::Muted
+ })
+ .into_any_element()
+ }
+
+ fn tab_content_text(&self, _detail: usize, _: &App) -> SharedString {
+ self.title.clone()
+ }
+
+ fn tab_tooltip_text(&self, _: &App) -> Option<SharedString> {
+ self.path.clone()
+ }
+
+ fn to_item_events(event: &EditorEvent, f: impl FnMut(ItemEvent)) {
+ Editor::to_item_events(event, f)
+ }
+
+ fn telemetry_event_text(&self) -> Option<&'static str> {
+ Some("Diff View Opened")
+ }
+
+ fn deactivated(&mut self, window: &mut Window, cx: &mut Context<Self>) {
+ self.diff_editor
+ .update(cx, |editor, cx| editor.deactivated(window, cx));
+ }
+
+ fn is_singleton(&self, _: &App) -> bool {
+ false
+ }
+
+ fn act_as_type<'a>(
+ &'a self,
+ type_id: TypeId,
+ self_handle: &'a Entity<Self>,
+ _: &'a App,
+ ) -> Option<AnyView> {
+ if type_id == TypeId::of::<Self>() {
+ Some(self_handle.to_any())
+ } else if type_id == TypeId::of::<Editor>() {
+ Some(self.diff_editor.to_any())
+ } else {
+ None
+ }
+ }
+
+ fn as_searchable(&self, _: &Entity<Self>) -> Option<Box<dyn SearchableItemHandle>> {
+ Some(Box::new(self.diff_editor.clone()))
+ }
+
+ fn for_each_project_item(
+ &self,
+ cx: &App,
+ f: &mut dyn FnMut(gpui::EntityId, &dyn project::ProjectItem),
+ ) {
+ self.diff_editor.for_each_project_item(cx, f)
+ }
+
+ fn set_nav_history(
+ &mut self,
+ nav_history: ItemNavHistory,
+ _: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ self.diff_editor.update(cx, |editor, _| {
+ editor.set_nav_history(Some(nav_history));
+ });
+ }
+
+ fn navigate(
+ &mut self,
+ data: Box<dyn Any>,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) -> bool {
+ self.diff_editor
+ .update(cx, |editor, cx| editor.navigate(data, window, cx))
+ }
+
+ fn breadcrumb_location(&self, _: &App) -> ToolbarItemLocation {
+ ToolbarItemLocation::PrimaryLeft
+ }
+
+ fn breadcrumbs(&self, theme: &theme::Theme, cx: &App) -> Option<Vec<BreadcrumbText>> {
+ self.diff_editor.breadcrumbs(theme, cx)
+ }
+
+ fn added_to_workspace(
+ &mut self,
+ workspace: &mut Workspace,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ self.diff_editor.update(cx, |editor, cx| {
+ editor.added_to_workspace(workspace, window, cx)
+ });
+ }
+
+ fn can_save(&self, cx: &App) -> bool {
+ // The editor handles the new buffer, so delegate to it
+ self.diff_editor.read(cx).can_save(cx)
+ }
+
+ fn save(
+ &mut self,
+ options: SaveOptions,
+ project: Entity<Project>,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) -> Task<Result<()>> {
+ // Delegate saving to the editor, which manages the new buffer
+ self.diff_editor
+ .update(cx, |editor, cx| editor.save(options, project, window, cx))
+ }
+}
+
+pub fn selection_location_text(editor: &Editor, cx: &App) -> Option<String> {
+ let buffer = editor.buffer().read(cx);
+ let buffer_snapshot = buffer.snapshot(cx);
+ let first_selection = editor.selections.disjoint.first()?;
+
+ let (start_row, start_column, end_row, end_column) =
+ if first_selection.start == first_selection.end {
+ let max_point = buffer_snapshot.max_point();
+ (0, 0, max_point.row, max_point.column)
+ } else {
+ let selection_start = first_selection.start.to_point(&buffer_snapshot);
+ let selection_end = first_selection.end.to_point(&buffer_snapshot);
+
+ (
+ selection_start.row,
+ selection_start.column,
+ selection_end.row,
+ selection_end.column,
+ )
+ };
+
+ let range_text = if start_row == end_row {
+ format!("L{}:{}-{}", start_row + 1, start_column + 1, end_column + 1)
+ } else {
+ format!(
+ "L{}:{}-L{}:{}",
+ start_row + 1,
+ start_column + 1,
+ end_row + 1,
+ end_column + 1
+ )
+ };
+
+ Some(range_text)
+}
+
+impl Render for TextDiffView {
+ fn render(&mut self, _: &mut Window, _: &mut Context<Self>) -> impl IntoElement {
+ self.diff_editor.clone()
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use editor::{actions, test::editor_test_context::assert_state_with_diff};
+ use gpui::{TestAppContext, VisualContext};
+ use project::{FakeFs, Project};
+ use serde_json::json;
+ use settings::{Settings, SettingsStore};
+ use unindent::unindent;
+ use util::path;
+
+ fn init_test(cx: &mut TestAppContext) {
+ cx.update(|cx| {
+ let settings_store = SettingsStore::test(cx);
+ cx.set_global(settings_store);
+ language::init(cx);
+ Project::init_settings(cx);
+ workspace::init_settings(cx);
+ editor::init_settings(cx);
+ theme::ThemeSettings::register(cx)
+ });
+ }
+
+ #[gpui::test]
+ async fn test_diffing_clipboard_against_specific_selection(cx: &mut TestAppContext) {
+ base_test(true, cx).await;
+ }
+
+ #[gpui::test]
+ async fn test_diffing_clipboard_against_empty_selection_uses_full_buffer(
+ cx: &mut TestAppContext,
+ ) {
+ base_test(false, cx).await;
+ }
+
+ async fn base_test(select_all_text: bool, cx: &mut TestAppContext) {
+ init_test(cx);
+
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree(
+ path!("/test"),
+ json!({
+ "a": {
+ "b": {
+ "text.txt": "new line 1\nline 2\nnew line 3\nline 4"
+ }
+ }
+ }),
+ )
+ .await;
+
+ let project = Project::test(fs, [path!("/test").as_ref()], cx).await;
+
+ let (workspace, mut cx) =
+ cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx));
+
+ let buffer = project
+ .update(cx, |project, cx| {
+ project.open_local_buffer(path!("/test/a/b/text.txt"), cx)
+ })
+ .await
+ .unwrap();
+
+ let editor = cx.new_window_entity(|window, cx| {
+ let mut editor = Editor::for_buffer(buffer, None, window, cx);
+ editor.set_text("new line 1\nline 2\nnew line 3\nline 4\n", window, cx);
+
+ if select_all_text {
+ editor.select_all(&actions::SelectAll, window, cx);
+ }
+
+ editor
+ });
+
+ let diff_view = workspace
+ .update_in(cx, |workspace, window, cx| {
+ TextDiffView::open(
+ &DiffClipboardWithSelectionData {
+ clipboard_text: "old line 1\nline 2\nold line 3\nline 4\n".to_string(),
+ editor,
+ },
+ workspace,
+ window,
+ cx,
+ )
+ })
+ .unwrap()
+ .await
+ .unwrap();
+
+ cx.executor().run_until_parked();
+
+ assert_state_with_diff(
+ &diff_view.read_with(cx, |diff_view, _| diff_view.diff_editor.clone()),
+ &mut cx,
+ &unindent(
+ "
+ - old line 1
+ + ˇnew line 1
+ line 2
+ - old line 3
+ + new line 3
+ line 4
+ ",
+ ),
+ );
+
+ diff_view.read_with(cx, |diff_view, cx| {
+ assert_eq!(
+ diff_view.tab_content_text(0, cx),
+ "Clipboard ↔ text.txt @ L1:1-L5:1"
+ );
+ assert_eq!(
+ diff_view.tab_tooltip_text(cx).unwrap(),
+ format!("Clipboard ↔ {}", path!("test/a/b/text.txt @ L1:1-L5:1"))
+ );
+ });
+ }
+}
@@ -295,6 +295,10 @@ path = "examples/text.rs"
name = "text_wrapper"
path = "examples/text_wrapper.rs"
+[[example]]
+name = "tree"
+path = "examples/tree.rs"
+
[[example]]
name = "uniform_list"
path = "examples/uniform_list.rs"
@@ -0,0 +1,46 @@
+//! Renders a div with deep children hierarchy. This example is useful to exemplify that Zed can
+//! handle deep hierarchies (even though it cannot just yet!).
+use std::sync::LazyLock;
+
+use gpui::{
+ App, Application, Bounds, Context, Window, WindowBounds, WindowOptions, div, prelude::*, px,
+ size,
+};
+
+struct Tree {}
+
+static DEPTH: LazyLock<u64> = LazyLock::new(|| {
+ std::env::var("GPUI_TREE_DEPTH")
+ .ok()
+ .and_then(|depth| depth.parse().ok())
+ .unwrap_or_else(|| 50)
+});
+
+impl Render for Tree {
+ fn render(&mut self, _: &mut Window, _: &mut Context<Self>) -> impl IntoElement {
+ let mut depth = *DEPTH;
+ static COLORS: [gpui::Hsla; 4] = [gpui::red(), gpui::blue(), gpui::green(), gpui::yellow()];
+ let mut colors = COLORS.iter().cycle().copied();
+ let mut next_div = || div().p_0p5().bg(colors.next().unwrap());
+ let mut innermost_node = next_div();
+ while depth > 0 {
+ innermost_node = next_div().child(innermost_node);
+ depth -= 1;
+ }
+ innermost_node
+ }
+}
+
+fn main() {
+ Application::new().run(|cx: &mut App| {
+ let bounds = Bounds::centered(None, size(px(300.0), px(300.0)), cx);
+ cx.open_window(
+ WindowOptions {
+ window_bounds: Some(WindowBounds::Windowed(bounds)),
+ ..Default::default()
+ },
+ |_, cx| cx.new(|_| Tree {}),
+ )
+ .unwrap();
+ });
+}
@@ -370,7 +370,7 @@ impl std::fmt::Debug for AnyEntity {
}
}
-/// A strong, well typed reference to a struct which is managed
+/// A strong, well-typed reference to a struct which is managed
/// by GPUI
#[derive(Deref, DerefMut)]
pub struct Entity<T> {
@@ -1,4 +1,7 @@
-use std::time::{Duration, Instant};
+use std::{
+ rc::Rc,
+ time::{Duration, Instant},
+};
use crate::{
AnyElement, App, Element, ElementId, GlobalElementId, InspectorElementId, IntoElement, Window,
@@ -8,6 +11,7 @@ pub use easing::*;
use smallvec::SmallVec;
/// An animation that can be applied to an element.
+#[derive(Clone)]
pub struct Animation {
/// The amount of time for which this animation should run
pub duration: Duration,
@@ -15,7 +19,7 @@ pub struct Animation {
pub oneshot: bool,
/// A function that takes a delta between 0 and 1 and returns a new delta
/// between 0 and 1 based on the given easing function.
- pub easing: Box<dyn Fn(f32) -> f32>,
+ pub easing: Rc<dyn Fn(f32) -> f32>,
}
impl Animation {
@@ -25,7 +29,7 @@ impl Animation {
Self {
duration,
oneshot: true,
- easing: Box::new(linear),
+ easing: Rc::new(linear),
}
}
@@ -39,7 +43,7 @@ impl Animation {
/// The easing function will take a time delta between 0 and 1 and return a new delta
/// between 0 and 1
pub fn with_easing(mut self, easing: impl Fn(f32) -> f32 + 'static) -> Self {
- self.easing = Box::new(easing);
+ self.easing = Rc::new(easing);
self
}
}
@@ -249,8 +249,8 @@ impl ListState {
let state = &mut *self.0.borrow_mut();
let mut old_items = state.items.cursor::<Count>(&());
- let mut new_items = old_items.slice(&Count(old_range.start), Bias::Right, &());
- old_items.seek_forward(&Count(old_range.end), Bias::Right, &());
+ let mut new_items = old_items.slice(&Count(old_range.start), Bias::Right);
+ old_items.seek_forward(&Count(old_range.end), Bias::Right);
let mut spliced_count = 0;
new_items.extend(
@@ -260,7 +260,7 @@ impl ListState {
}),
&(),
);
- new_items.append(old_items.suffix(&()), &());
+ new_items.append(old_items.suffix(), &());
drop(old_items);
state.items = new_items;
@@ -300,14 +300,14 @@ impl ListState {
let current_offset = self.logical_scroll_top();
let state = &mut *self.0.borrow_mut();
let mut cursor = state.items.cursor::<ListItemSummary>(&());
- cursor.seek(&Count(current_offset.item_ix), Bias::Right, &());
+ cursor.seek(&Count(current_offset.item_ix), Bias::Right);
let start_pixel_offset = cursor.start().height + current_offset.offset_in_item;
let new_pixel_offset = (start_pixel_offset + distance).max(px(0.));
if new_pixel_offset > start_pixel_offset {
- cursor.seek_forward(&Height(new_pixel_offset), Bias::Right, &());
+ cursor.seek_forward(&Height(new_pixel_offset), Bias::Right);
} else {
- cursor.seek(&Height(new_pixel_offset), Bias::Right, &());
+ cursor.seek(&Height(new_pixel_offset), Bias::Right);
}
state.logical_scroll_top = Some(ListOffset {
@@ -343,11 +343,11 @@ impl ListState {
scroll_top.offset_in_item = px(0.);
} else {
let mut cursor = state.items.cursor::<ListItemSummary>(&());
- cursor.seek(&Count(ix + 1), Bias::Right, &());
+ cursor.seek(&Count(ix + 1), Bias::Right);
let bottom = cursor.start().height + padding.top;
let goal_top = px(0.).max(bottom - height + padding.bottom);
- cursor.seek(&Height(goal_top), Bias::Left, &());
+ cursor.seek(&Height(goal_top), Bias::Left);
let start_ix = cursor.start().count;
let start_item_top = cursor.start().height;
@@ -372,11 +372,11 @@ impl ListState {
}
let mut cursor = state.items.cursor::<(Count, Height)>(&());
- cursor.seek(&Count(scroll_top.item_ix), Bias::Right, &());
+ cursor.seek(&Count(scroll_top.item_ix), Bias::Right);
let scroll_top = cursor.start().1.0 + scroll_top.offset_in_item;
- cursor.seek_forward(&Count(ix), Bias::Right, &());
+ cursor.seek_forward(&Count(ix), Bias::Right);
if let Some(&ListItem::Measured { size, .. }) = cursor.item() {
let &(Count(count), Height(top)) = cursor.start();
if count == ix {
@@ -431,7 +431,7 @@ impl ListState {
let mut cursor = state.items.cursor::<ListItemSummary>(&());
let summary: ListItemSummary =
- cursor.summary(&Count(logical_scroll_top.item_ix), Bias::Right, &());
+ cursor.summary(&Count(logical_scroll_top.item_ix), Bias::Right);
let content_height = state.items.summary().height;
let drag_offset =
// if dragging the scrollbar, we want to offset the point if the height changed
@@ -450,9 +450,9 @@ impl ListState {
impl StateInner {
fn visible_range(&self, height: Pixels, scroll_top: &ListOffset) -> Range<usize> {
let mut cursor = self.items.cursor::<ListItemSummary>(&());
- cursor.seek(&Count(scroll_top.item_ix), Bias::Right, &());
+ cursor.seek(&Count(scroll_top.item_ix), Bias::Right);
let start_y = cursor.start().height + scroll_top.offset_in_item;
- cursor.seek_forward(&Height(start_y + height), Bias::Left, &());
+ cursor.seek_forward(&Height(start_y + height), Bias::Left);
scroll_top.item_ix..cursor.start().count + 1
}
@@ -482,7 +482,7 @@ impl StateInner {
self.logical_scroll_top = None;
} else {
let mut cursor = self.items.cursor::<ListItemSummary>(&());
- cursor.seek(&Height(new_scroll_top), Bias::Right, &());
+ cursor.seek(&Height(new_scroll_top), Bias::Right);
let item_ix = cursor.start().count;
let offset_in_item = new_scroll_top - cursor.start().height;
self.logical_scroll_top = Some(ListOffset {
@@ -523,7 +523,7 @@ impl StateInner {
fn scroll_top(&self, logical_scroll_top: &ListOffset) -> Pixels {
let mut cursor = self.items.cursor::<ListItemSummary>(&());
- cursor.seek(&Count(logical_scroll_top.item_ix), Bias::Right, &());
+ cursor.seek(&Count(logical_scroll_top.item_ix), Bias::Right);
cursor.start().height + logical_scroll_top.offset_in_item
}
@@ -553,7 +553,7 @@ impl StateInner {
let mut cursor = old_items.cursor::<Count>(&());
// Render items after the scroll top, including those in the trailing overdraw
- cursor.seek(&Count(scroll_top.item_ix), Bias::Right, &());
+ cursor.seek(&Count(scroll_top.item_ix), Bias::Right);
for (ix, item) in cursor.by_ref().enumerate() {
let visible_height = rendered_height - scroll_top.offset_in_item;
if visible_height >= available_height + self.overdraw {
@@ -592,13 +592,13 @@ impl StateInner {
rendered_height += padding.bottom;
// Prepare to start walking upward from the item at the scroll top.
- cursor.seek(&Count(scroll_top.item_ix), Bias::Right, &());
+ cursor.seek(&Count(scroll_top.item_ix), Bias::Right);
// If the rendered items do not fill the visible region, then adjust
// the scroll top upward.
if rendered_height - scroll_top.offset_in_item < available_height {
while rendered_height < available_height {
- cursor.prev(&());
+ cursor.prev();
if let Some(item) = cursor.item() {
let item_index = cursor.start().0;
let mut element = (self.render_item)(item_index, window, cx);
@@ -645,7 +645,7 @@ impl StateInner {
// Measure items in the leading overdraw
let mut leading_overdraw = scroll_top.offset_in_item;
while leading_overdraw < self.overdraw {
- cursor.prev(&());
+ cursor.prev();
if let Some(item) = cursor.item() {
let size = if let ListItem::Measured { size, .. } = item {
*size
@@ -666,10 +666,10 @@ impl StateInner {
let measured_range = cursor.start().0..(cursor.start().0 + measured_items.len());
let mut cursor = old_items.cursor::<Count>(&());
- let mut new_items = cursor.slice(&Count(measured_range.start), Bias::Right, &());
+ let mut new_items = cursor.slice(&Count(measured_range.start), Bias::Right);
new_items.extend(measured_items, &());
- cursor.seek(&Count(measured_range.end), Bias::Right, &());
- new_items.append(cursor.suffix(&()), &());
+ cursor.seek(&Count(measured_range.end), Bias::Right);
+ new_items.append(cursor.suffix(), &());
self.items = new_items;
// If none of the visible items are focused, check if an off-screen item is focused
@@ -679,7 +679,7 @@ impl StateInner {
let mut cursor = self
.items
.filter::<_, Count>(&(), |summary| summary.has_focus_handles);
- cursor.next(&());
+ cursor.next();
while let Some(item) = cursor.item() {
if item.contains_focused(window, cx) {
let item_index = cursor.start().0;
@@ -692,7 +692,7 @@ impl StateInner {
});
break;
}
- cursor.next(&());
+ cursor.next();
}
}
@@ -741,7 +741,7 @@ impl StateInner {
});
} else if autoscroll_bounds.bottom() > bounds.bottom() {
let mut cursor = self.items.cursor::<Count>(&());
- cursor.seek(&Count(item.index), Bias::Right, &());
+ cursor.seek(&Count(item.index), Bias::Right);
let mut height = bounds.size.height - padding.top - padding.bottom;
// Account for the height of the element down until the autoscroll bottom.
@@ -749,7 +749,7 @@ impl StateInner {
// Keep decreasing the scroll top until we fill all the available space.
while height > Pixels::ZERO {
- cursor.prev(&());
+ cursor.prev();
let Some(item) = cursor.item() else { break };
let size = item.size().unwrap_or_else(|| {
@@ -806,7 +806,7 @@ impl StateInner {
self.logical_scroll_top = None;
} else {
let mut cursor = self.items.cursor::<ListItemSummary>(&());
- cursor.seek(&Height(new_scroll_top), Bias::Right, &());
+ cursor.seek(&Height(new_scroll_top), Bias::Right);
let item_ix = cursor.start().count;
let offset_in_item = new_scroll_top - cursor.start().height;
@@ -50,8 +50,8 @@
/// KeyBinding::new("cmd-k left", pane::SplitLeft, Some("Pane"))
///
use crate::{
- Action, ActionRegistry, App, BindingIndex, DispatchPhase, EntityId, FocusId, KeyBinding,
- KeyContext, Keymap, Keystroke, ModifiersChangedEvent, Window,
+ Action, ActionRegistry, App, DispatchPhase, EntityId, FocusId, KeyBinding, KeyContext, Keymap,
+ Keystroke, ModifiersChangedEvent, Window,
};
use collections::FxHashMap;
use smallvec::SmallVec;
@@ -406,16 +406,11 @@ impl DispatchTree {
// methods, but this can't be done very cleanly since keymap must be borrowed.
let keymap = self.keymap.borrow();
keymap
- .bindings_for_action_with_indices(action)
- .filter(|(binding_index, binding)| {
- Self::binding_matches_predicate_and_not_shadowed(
- &keymap,
- *binding_index,
- &binding.keystrokes,
- context_stack,
- )
+ .bindings_for_action(action)
+ .filter(|binding| {
+ Self::binding_matches_predicate_and_not_shadowed(&keymap, &binding, context_stack)
})
- .map(|(_, binding)| binding.clone())
+ .cloned()
.collect()
}
@@ -428,28 +423,22 @@ impl DispatchTree {
) -> Option<KeyBinding> {
let keymap = self.keymap.borrow();
keymap
- .bindings_for_action_with_indices(action)
+ .bindings_for_action(action)
.rev()
- .find_map(|(binding_index, binding)| {
- let found = Self::binding_matches_predicate_and_not_shadowed(
- &keymap,
- binding_index,
- &binding.keystrokes,
- context_stack,
- );
- if found { Some(binding.clone()) } else { None }
+ .find(|binding| {
+ Self::binding_matches_predicate_and_not_shadowed(&keymap, &binding, context_stack)
})
+ .cloned()
}
fn binding_matches_predicate_and_not_shadowed(
keymap: &Keymap,
- binding_index: BindingIndex,
- keystrokes: &[Keystroke],
+ binding: &KeyBinding,
context_stack: &[KeyContext],
) -> bool {
- let (bindings, _) = keymap.bindings_for_input_with_indices(&keystrokes, context_stack);
- if let Some((highest_precedence_index, _)) = bindings.iter().next() {
- binding_index == *highest_precedence_index
+ let (bindings, _) = keymap.bindings_for_input(&binding.keystrokes, context_stack);
+ if let Some(found) = bindings.iter().next() {
+ found.action.partial_eq(binding.action.as_ref())
} else {
false
}
@@ -5,7 +5,7 @@ pub use binding::*;
pub use context::*;
use crate::{Action, Keystroke, is_no_action};
-use collections::HashMap;
+use collections::{HashMap, HashSet};
use smallvec::SmallVec;
use std::any::TypeId;
@@ -77,15 +77,6 @@ impl Keymap {
&'a self,
action: &'a dyn Action,
) -> impl 'a + DoubleEndedIterator<Item = &'a KeyBinding> {
- self.bindings_for_action_with_indices(action)
- .map(|(_, binding)| binding)
- }
-
- /// Like `bindings_for_action_with_indices`, but also returns the binding indices.
- pub fn bindings_for_action_with_indices<'a>(
- &'a self,
- action: &'a dyn Action,
- ) -> impl 'a + DoubleEndedIterator<Item = (BindingIndex, &'a KeyBinding)> {
let action_id = action.type_id();
let binding_indices = self
.binding_indices_by_action_id
@@ -118,7 +109,7 @@ impl Keymap {
}
}
- Some((BindingIndex(*ix), binding))
+ Some(binding)
})
}
@@ -153,90 +144,53 @@ impl Keymap {
input: &[Keystroke],
context_stack: &[KeyContext],
) -> (SmallVec<[KeyBinding; 1]>, bool) {
- let (bindings, pending) = self.bindings_for_input_with_indices(input, context_stack);
- let bindings = bindings
- .into_iter()
- .map(|(_, binding)| binding)
- .collect::<SmallVec<[KeyBinding; 1]>>();
- (bindings, pending)
- }
+ let mut matched_bindings = SmallVec::<[(usize, BindingIndex, &KeyBinding); 1]>::new();
+ let mut pending_bindings = SmallVec::<[(BindingIndex, &KeyBinding); 1]>::new();
- /// Like `bindings_for_input`, but also returns the binding indices.
- pub fn bindings_for_input_with_indices(
- &self,
- input: &[Keystroke],
- context_stack: &[KeyContext],
- ) -> (SmallVec<[(BindingIndex, KeyBinding); 1]>, bool) {
- let mut possibilities = self
- .bindings()
- .enumerate()
- .rev()
- .filter_map(|(ix, binding)| {
- let depth = self.binding_enabled(binding, &context_stack)?;
- let pending = binding.match_keystrokes(input)?;
- Some((depth, BindingIndex(ix), binding, pending))
- })
- .collect::<Vec<_>>();
- possibilities.sort_by(|(depth_a, ix_a, _, _), (depth_b, ix_b, _, _)| {
+ for (ix, binding) in self.bindings().enumerate().rev() {
+ let Some(depth) = self.binding_enabled(binding, &context_stack) else {
+ continue;
+ };
+ let Some(pending) = binding.match_keystrokes(input) else {
+ continue;
+ };
+
+ if !pending {
+ matched_bindings.push((depth, BindingIndex(ix), binding));
+ } else {
+ pending_bindings.push((BindingIndex(ix), binding));
+ }
+ }
+
+ matched_bindings.sort_by(|(depth_a, ix_a, _), (depth_b, ix_b, _)| {
depth_b.cmp(depth_a).then(ix_b.cmp(ix_a))
});
- let mut bindings: SmallVec<[(BindingIndex, KeyBinding, usize); 1]> = SmallVec::new();
-
- // (pending, is_no_action, depth, keystrokes)
- let mut pending_info_opt: Option<(bool, bool, usize, &[Keystroke])> = None;
-
- 'outer: for (depth, binding_index, binding, pending) in possibilities {
- let is_no_action = is_no_action(&*binding.action);
- // We only want to consider a binding pending if it has an action
- // This, however, means that if we have both a NoAction binding and a binding
- // with an action at the same depth, we should still set is_pending to true.
- if let Some(pending_info) = pending_info_opt.as_mut() {
- let (already_pending, pending_is_no_action, pending_depth, pending_keystrokes) =
- *pending_info;
-
- // We only want to change the pending status if it's not already pending AND if
- // the existing pending status was set by a NoAction binding. This avoids a NoAction
- // binding erroneously setting the pending status to true when a binding with an action
- // already set it to false
- //
- // We also want to change the pending status if the keystrokes don't match,
- // meaning it's different keystrokes than the NoAction that set pending to false
- if pending
- && !already_pending
- && pending_is_no_action
- && (pending_depth == depth || pending_keystrokes != binding.keystrokes())
- {
- pending_info.0 = !is_no_action;
- }
- } else {
- pending_info_opt = Some((
- pending && !is_no_action,
- is_no_action,
- depth,
- binding.keystrokes(),
- ));
+ let mut bindings: SmallVec<[_; 1]> = SmallVec::new();
+ let mut first_binding_index = None;
+ for (_, ix, binding) in matched_bindings {
+ if is_no_action(&*binding.action) {
+ break;
}
+ bindings.push(binding.clone());
+ first_binding_index.get_or_insert(ix);
+ }
- if !pending {
- bindings.push((binding_index, binding.clone(), depth));
- continue 'outer;
+ let mut pending = HashSet::default();
+ for (ix, binding) in pending_bindings.into_iter().rev() {
+ if let Some(binding_ix) = first_binding_index
+ && binding_ix > ix
+ {
+ continue;
}
+ if is_no_action(&*binding.action) {
+ pending.remove(&&binding.keystrokes);
+ continue;
+ }
+ pending.insert(&binding.keystrokes);
}
- // sort by descending depth
- bindings.sort_by(|a, b| a.2.cmp(&b.2).reverse());
- let bindings = bindings
- .into_iter()
- .map_while(|(binding_index, binding, _)| {
- if is_no_action(&*binding.action) {
- None
- } else {
- Some((binding_index, binding))
- }
- })
- .collect();
- (bindings, pending_info_opt.unwrap_or_default().0)
+ (bindings, !pending.is_empty())
}
/// Check if the given binding is enabled, given a certain key context.
@@ -302,6 +256,30 @@ mod tests {
);
}
+ #[test]
+ fn test_depth_precedence() {
+ let bindings = [
+ KeyBinding::new("ctrl-a", ActionBeta {}, Some("pane")),
+ KeyBinding::new("ctrl-a", ActionGamma {}, Some("editor")),
+ ];
+
+ let mut keymap = Keymap::default();
+ keymap.add_bindings(bindings.clone());
+
+ let (result, pending) = keymap.bindings_for_input(
+ &[Keystroke::parse("ctrl-a").unwrap()],
+ &[
+ KeyContext::parse("pane").unwrap(),
+ KeyContext::parse("editor").unwrap(),
+ ],
+ );
+
+ assert!(!pending);
+ assert_eq!(result.len(), 2);
+ assert!(result[0].action.partial_eq(&ActionGamma {}));
+ assert!(result[1].action.partial_eq(&ActionBeta {}));
+ }
+
#[test]
fn test_keymap_disabled() {
let bindings = [
@@ -453,6 +431,193 @@ mod tests {
assert_eq!(space_editor.1, true);
}
+ #[test]
+ fn test_override_multikey() {
+ let bindings = [
+ KeyBinding::new("ctrl-w left", ActionAlpha {}, Some("editor")),
+ KeyBinding::new("ctrl-w", NoAction {}, Some("editor")),
+ ];
+
+ let mut keymap = Keymap::default();
+ keymap.add_bindings(bindings.clone());
+
+ // Ensure `space` results in pending input on the workspace, but not editor
+ let (result, pending) = keymap.bindings_for_input(
+ &[Keystroke::parse("ctrl-w").unwrap()],
+ &[KeyContext::parse("editor").unwrap()],
+ );
+ assert!(result.is_empty());
+ assert_eq!(pending, true);
+
+ let bindings = [
+ KeyBinding::new("ctrl-w left", ActionAlpha {}, Some("editor")),
+ KeyBinding::new("ctrl-w", ActionBeta {}, Some("editor")),
+ ];
+
+ let mut keymap = Keymap::default();
+ keymap.add_bindings(bindings.clone());
+
+ // Ensure `space` results in pending input on the workspace, but not editor
+ let (result, pending) = keymap.bindings_for_input(
+ &[Keystroke::parse("ctrl-w").unwrap()],
+ &[KeyContext::parse("editor").unwrap()],
+ );
+ assert_eq!(result.len(), 1);
+ assert_eq!(pending, false);
+ }
+
+ #[test]
+ fn test_simple_disable() {
+ let bindings = [
+ KeyBinding::new("ctrl-x", ActionAlpha {}, Some("editor")),
+ KeyBinding::new("ctrl-x", NoAction {}, Some("editor")),
+ ];
+
+ let mut keymap = Keymap::default();
+ keymap.add_bindings(bindings.clone());
+
+ // Ensure `space` results in pending input on the workspace, but not editor
+ let (result, pending) = keymap.bindings_for_input(
+ &[Keystroke::parse("ctrl-x").unwrap()],
+ &[KeyContext::parse("editor").unwrap()],
+ );
+ assert!(result.is_empty());
+ assert_eq!(pending, false);
+ }
+
+ #[test]
+ fn test_fail_to_disable() {
+ // disabled at the wrong level
+ let bindings = [
+ KeyBinding::new("ctrl-x", ActionAlpha {}, Some("editor")),
+ KeyBinding::new("ctrl-x", NoAction {}, Some("workspace")),
+ ];
+
+ let mut keymap = Keymap::default();
+ keymap.add_bindings(bindings.clone());
+
+ // Ensure `space` results in pending input on the workspace, but not editor
+ let (result, pending) = keymap.bindings_for_input(
+ &[Keystroke::parse("ctrl-x").unwrap()],
+ &[
+ KeyContext::parse("workspace").unwrap(),
+ KeyContext::parse("editor").unwrap(),
+ ],
+ );
+ assert_eq!(result.len(), 1);
+ assert_eq!(pending, false);
+ }
+
+ #[test]
+ fn test_disable_deeper() {
+ let bindings = [
+ KeyBinding::new("ctrl-x", ActionAlpha {}, Some("workspace")),
+ KeyBinding::new("ctrl-x", NoAction {}, Some("editor")),
+ ];
+
+ let mut keymap = Keymap::default();
+ keymap.add_bindings(bindings.clone());
+
+ // Ensure `space` results in pending input on the workspace, but not editor
+ let (result, pending) = keymap.bindings_for_input(
+ &[Keystroke::parse("ctrl-x").unwrap()],
+ &[
+ KeyContext::parse("workspace").unwrap(),
+ KeyContext::parse("editor").unwrap(),
+ ],
+ );
+ assert_eq!(result.len(), 0);
+ assert_eq!(pending, false);
+ }
+
+ #[test]
+ fn test_pending_match_enabled() {
+ let bindings = [
+ KeyBinding::new("ctrl-x", ActionBeta, Some("vim_mode == normal")),
+ KeyBinding::new("ctrl-x 0", ActionAlpha, Some("Workspace")),
+ ];
+ let mut keymap = Keymap::default();
+ keymap.add_bindings(bindings.clone());
+
+ let matched = keymap.bindings_for_input(
+ &[Keystroke::parse("ctrl-x")].map(Result::unwrap),
+ &[
+ KeyContext::parse("Workspace"),
+ KeyContext::parse("Pane"),
+ KeyContext::parse("Editor vim_mode=normal"),
+ ]
+ .map(Result::unwrap),
+ );
+ assert_eq!(matched.0.len(), 1);
+ assert!(matched.0[0].action.partial_eq(&ActionBeta));
+ assert!(matched.1);
+ }
+
+ #[test]
+ fn test_pending_match_enabled_extended() {
+ let bindings = [
+ KeyBinding::new("ctrl-x", ActionBeta, Some("vim_mode == normal")),
+ KeyBinding::new("ctrl-x 0", NoAction, Some("Workspace")),
+ ];
+ let mut keymap = Keymap::default();
+ keymap.add_bindings(bindings.clone());
+
+ let matched = keymap.bindings_for_input(
+ &[Keystroke::parse("ctrl-x")].map(Result::unwrap),
+ &[
+ KeyContext::parse("Workspace"),
+ KeyContext::parse("Pane"),
+ KeyContext::parse("Editor vim_mode=normal"),
+ ]
+ .map(Result::unwrap),
+ );
+ assert_eq!(matched.0.len(), 1);
+ assert!(matched.0[0].action.partial_eq(&ActionBeta));
+ assert!(!matched.1);
+ let bindings = [
+ KeyBinding::new("ctrl-x", ActionBeta, Some("Workspace")),
+ KeyBinding::new("ctrl-x 0", NoAction, Some("vim_mode == normal")),
+ ];
+ let mut keymap = Keymap::default();
+ keymap.add_bindings(bindings.clone());
+
+ let matched = keymap.bindings_for_input(
+ &[Keystroke::parse("ctrl-x")].map(Result::unwrap),
+ &[
+ KeyContext::parse("Workspace"),
+ KeyContext::parse("Pane"),
+ KeyContext::parse("Editor vim_mode=normal"),
+ ]
+ .map(Result::unwrap),
+ );
+ assert_eq!(matched.0.len(), 1);
+ assert!(matched.0[0].action.partial_eq(&ActionBeta));
+ assert!(!matched.1);
+ }
+
+ #[test]
+ fn test_overriding_prefix() {
+ let bindings = [
+ KeyBinding::new("ctrl-x 0", ActionAlpha, Some("Workspace")),
+ KeyBinding::new("ctrl-x", ActionBeta, Some("vim_mode == normal")),
+ ];
+ let mut keymap = Keymap::default();
+ keymap.add_bindings(bindings.clone());
+
+ let matched = keymap.bindings_for_input(
+ &[Keystroke::parse("ctrl-x")].map(Result::unwrap),
+ &[
+ KeyContext::parse("Workspace"),
+ KeyContext::parse("Pane"),
+ KeyContext::parse("Editor vim_mode=normal"),
+ ]
+ .map(Result::unwrap),
+ );
+ assert_eq!(matched.0.len(), 1);
+ assert!(matched.0[0].action.partial_eq(&ActionBeta));
+ assert!(!matched.1);
+ }
+
#[test]
fn test_bindings_for_action() {
let bindings = [
@@ -21,6 +21,7 @@ anyhow.workspace = true
derive_more.workspace = true
futures.workspace = true
http.workspace = true
+http-body.workspace = true
log.workspace = true
serde.workspace = true
serde_json.workspace = true
@@ -6,6 +6,7 @@ use std::{
use bytes::Bytes;
use futures::AsyncRead;
+use http_body::{Body, Frame};
/// Based on the implementation of AsyncBody in
/// <https://github.com/sagebind/isahc/blob/5c533f1ef4d6bdf1fd291b5103c22110f41d0bf0/src/body/mod.rs>.
@@ -114,3 +115,24 @@ impl futures::AsyncRead for AsyncBody {
}
}
}
+
+impl Body for AsyncBody {
+ type Data = Bytes;
+ type Error = std::io::Error;
+
+ fn poll_frame(
+ mut self: Pin<&mut Self>,
+ cx: &mut std::task::Context<'_>,
+ ) -> Poll<Option<Result<Frame<Self::Data>, Self::Error>>> {
+ let mut buffer = vec![0; 8192];
+ match AsyncRead::poll_read(self.as_mut(), cx, &mut buffer) {
+ Poll::Ready(Ok(0)) => Poll::Ready(None),
+ Poll::Ready(Ok(n)) => {
+ let data = Bytes::copy_from_slice(&buffer[..n]);
+ Poll::Ready(Some(Ok(Frame::data(data))))
+ }
+ Poll::Ready(Err(e)) => Poll::Ready(Some(Err(e))),
+ Poll::Pending => Poll::Pending,
+ }
+ }
+}
@@ -20,6 +20,7 @@ pub enum IconName {
AiMistral,
AiOllama,
AiOpenAi,
+ AiOpenAiCompat,
AiOpenRouter,
AiVZero,
AiXAi,
@@ -1,5 +1,5 @@
use anyhow::Result;
-use client::{UserStore, zed_urls};
+use client::{DisableAiSettings, UserStore, zed_urls};
use copilot::{Copilot, Status};
use editor::{
Editor, SelectionEffects,
@@ -72,6 +72,11 @@ enum SupermavenButtonStatus {
impl Render for InlineCompletionButton {
fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
+ // Return empty div if AI is disabled
+ if DisableAiSettings::get_global(cx).disable_ai {
+ return div();
+ }
+
let all_language_settings = all_language_settings(None, cx);
match all_language_settings.edit_predictions.provider {
@@ -2072,6 +2072,21 @@ impl Buffer {
self.text.push_transaction(transaction, now);
}
+ /// Differs from `push_transaction` in that it does not clear the redo
+ /// stack. Intended to be used to create a parent transaction to merge
+ /// potential child transactions into.
+ ///
+ /// The caller is responsible for removing it from the undo history using
+ /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
+ /// are merged into this transaction, the caller is responsible for ensuring
+ /// the redo stack is cleared. The easiest way to ensure the redo stack is
+ /// cleared is to create transactions with the usual `start_transaction` and
+ /// `end_transaction` methods and merging the resulting transactions into
+ /// the transaction created by this method
+ pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
+ self.text.push_empty_transaction(now)
+ }
+
/// Prevent the last transaction from being grouped with any subsequent transactions,
/// even if they occur with the buffer's undo grouping duration.
pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
@@ -158,17 +158,17 @@ impl DiagnosticSet {
});
if reversed {
- cursor.prev(buffer);
+ cursor.prev();
} else {
- cursor.next(buffer);
+ cursor.next();
}
iter::from_fn({
move || {
if let Some(diagnostic) = cursor.item() {
if reversed {
- cursor.prev(buffer);
+ cursor.prev();
} else {
- cursor.next(buffer);
+ cursor.next();
}
Some(diagnostic.resolve(buffer))
} else {
@@ -297,10 +297,10 @@ impl SyntaxSnapshot {
let mut first_edit_ix_for_depth = 0;
let mut prev_depth = 0;
let mut cursor = self.layers.cursor::<SyntaxLayerSummary>(text);
- cursor.next(text);
+ cursor.next();
'outer: loop {
- let depth = cursor.end(text).max_depth;
+ let depth = cursor.end().max_depth;
if depth > prev_depth {
first_edit_ix_for_depth = 0;
prev_depth = depth;
@@ -313,7 +313,7 @@ impl SyntaxSnapshot {
position: edit_range.start,
};
if target.cmp(cursor.start(), text).is_gt() {
- let slice = cursor.slice(&target, Bias::Left, text);
+ let slice = cursor.slice(&target, Bias::Left);
layers.append(slice, text);
}
}
@@ -327,7 +327,6 @@ impl SyntaxSnapshot {
language: None,
},
Bias::Left,
- text,
);
layers.append(slice, text);
continue;
@@ -394,10 +393,10 @@ impl SyntaxSnapshot {
}
layers.push(layer, text);
- cursor.next(text);
+ cursor.next();
}
- layers.append(cursor.suffix(text), text);
+ layers.append(cursor.suffix(), text);
drop(cursor);
self.layers = layers;
}
@@ -420,7 +419,7 @@ impl SyntaxSnapshot {
let mut cursor = self
.layers
.filter::<_, ()>(text, |summary| summary.contains_unknown_injections);
- cursor.next(text);
+ cursor.next();
while let Some(layer) = cursor.item() {
let SyntaxLayerContent::Pending { language_name } = &layer.content else {
unreachable!()
@@ -436,7 +435,7 @@ impl SyntaxSnapshot {
resolved_injection_ranges.push(range);
}
- cursor.next(text);
+ cursor.next();
}
drop(cursor);
@@ -469,7 +468,7 @@ impl SyntaxSnapshot {
let max_depth = self.layers.summary().max_depth;
let mut cursor = self.layers.cursor::<SyntaxLayerSummary>(text);
- cursor.next(text);
+ cursor.next();
let mut layers = SumTree::new(text);
let mut changed_regions = ChangeRegionSet::default();
@@ -514,7 +513,7 @@ impl SyntaxSnapshot {
};
let mut done = cursor.item().is_none();
- while !done && position.cmp(&cursor.end(text), text).is_gt() {
+ while !done && position.cmp(&cursor.end(), text).is_gt() {
done = true;
let bounded_position = SyntaxLayerPositionBeforeChange {
@@ -522,16 +521,16 @@ impl SyntaxSnapshot {
change: changed_regions.start_position(),
};
if bounded_position.cmp(cursor.start(), text).is_gt() {
- let slice = cursor.slice(&bounded_position, Bias::Left, text);
+ let slice = cursor.slice(&bounded_position, Bias::Left);
if !slice.is_empty() {
layers.append(slice, text);
- if changed_regions.prune(cursor.end(text), text) {
+ if changed_regions.prune(cursor.end(), text) {
done = false;
}
}
}
- while position.cmp(&cursor.end(text), text).is_gt() {
+ while position.cmp(&cursor.end(), text).is_gt() {
let Some(layer) = cursor.item() else { break };
if changed_regions.intersects(layer, text) {
@@ -555,8 +554,8 @@ impl SyntaxSnapshot {
layers.push(layer.clone(), text);
}
- cursor.next(text);
- if changed_regions.prune(cursor.end(text), text) {
+ cursor.next();
+ if changed_regions.prune(cursor.end(), text) {
done = false;
}
}
@@ -572,7 +571,7 @@ impl SyntaxSnapshot {
if layer.range.to_offset(text) == (step_start_byte..step_end_byte)
&& layer.content.language_id() == step.language.id()
{
- cursor.next(text);
+ cursor.next();
} else {
old_layer = None;
}
@@ -918,7 +917,7 @@ impl SyntaxSnapshot {
}
});
- cursor.next(buffer);
+ cursor.next();
iter::from_fn(move || {
while let Some(layer) = cursor.item() {
let mut info = None;
@@ -940,7 +939,7 @@ impl SyntaxSnapshot {
});
}
}
- cursor.next(buffer);
+ cursor.next();
if info.is_some() {
return info;
}
@@ -10,25 +10,21 @@ use http_client::Result;
use parking_lot::Mutex;
use std::sync::Arc;
-pub fn language_model_id() -> LanguageModelId {
- LanguageModelId::from("fake".to_string())
+#[derive(Clone)]
+pub struct FakeLanguageModelProvider {
+ id: LanguageModelProviderId,
+ name: LanguageModelProviderName,
}
-pub fn language_model_name() -> LanguageModelName {
- LanguageModelName::from("Fake".to_string())
-}
-
-pub fn provider_id() -> LanguageModelProviderId {
- LanguageModelProviderId::from("fake".to_string())
-}
-
-pub fn provider_name() -> LanguageModelProviderName {
- LanguageModelProviderName::from("Fake".to_string())
+impl Default for FakeLanguageModelProvider {
+ fn default() -> Self {
+ Self {
+ id: LanguageModelProviderId::from("fake".to_string()),
+ name: LanguageModelProviderName::from("Fake".to_string()),
+ }
+ }
}
-#[derive(Clone, Default)]
-pub struct FakeLanguageModelProvider;
-
impl LanguageModelProviderState for FakeLanguageModelProvider {
type ObservableEntity = ();
@@ -39,11 +35,11 @@ impl LanguageModelProviderState for FakeLanguageModelProvider {
impl LanguageModelProvider for FakeLanguageModelProvider {
fn id(&self) -> LanguageModelProviderId {
- provider_id()
+ self.id.clone()
}
fn name(&self) -> LanguageModelProviderName {
- provider_name()
+ self.name.clone()
}
fn default_model(&self, _cx: &App) -> Option<Arc<dyn LanguageModel>> {
@@ -76,6 +72,10 @@ impl LanguageModelProvider for FakeLanguageModelProvider {
}
impl FakeLanguageModelProvider {
+ pub fn new(id: LanguageModelProviderId, name: LanguageModelProviderName) -> Self {
+ Self { id, name }
+ }
+
pub fn test_model(&self) -> FakeLanguageModel {
FakeLanguageModel::default()
}
@@ -89,11 +89,22 @@ pub struct ToolUseRequest {
pub schema: serde_json::Value,
}
-#[derive(Default)]
pub struct FakeLanguageModel {
+ provider_id: LanguageModelProviderId,
+ provider_name: LanguageModelProviderName,
current_completion_txs: Mutex<Vec<(LanguageModelRequest, mpsc::UnboundedSender<String>)>>,
}
+impl Default for FakeLanguageModel {
+ fn default() -> Self {
+ Self {
+ provider_id: LanguageModelProviderId::from("fake".to_string()),
+ provider_name: LanguageModelProviderName::from("Fake".to_string()),
+ current_completion_txs: Mutex::new(Vec::new()),
+ }
+ }
+}
+
impl FakeLanguageModel {
pub fn pending_completions(&self) -> Vec<LanguageModelRequest> {
self.current_completion_txs
@@ -138,19 +149,19 @@ impl FakeLanguageModel {
impl LanguageModel for FakeLanguageModel {
fn id(&self) -> LanguageModelId {
- language_model_id()
+ LanguageModelId::from("fake".to_string())
}
fn name(&self) -> LanguageModelName {
- language_model_name()
+ LanguageModelName::from("Fake".to_string())
}
fn provider_id(&self) -> LanguageModelProviderId {
- provider_id()
+ self.provider_id.clone()
}
fn provider_name(&self) -> LanguageModelProviderName {
- provider_name()
+ self.provider_name.clone()
}
fn supports_tools(&self) -> bool {
@@ -735,6 +735,18 @@ impl From<String> for LanguageModelProviderName {
}
}
+impl From<Arc<str>> for LanguageModelProviderId {
+ fn from(value: Arc<str>) -> Self {
+ Self(SharedString::from(value))
+ }
+}
+
+impl From<Arc<str>> for LanguageModelProviderName {
+ fn from(value: Arc<str>) -> Self {
+ Self(SharedString::from(value))
+ }
+}
+
#[cfg(test)]
mod tests {
use super::*;
@@ -125,7 +125,7 @@ impl LanguageModelRegistry {
#[cfg(any(test, feature = "test-support"))]
pub fn test(cx: &mut App) -> crate::fake_provider::FakeLanguageModelProvider {
- let fake_provider = crate::fake_provider::FakeLanguageModelProvider;
+ let fake_provider = crate::fake_provider::FakeLanguageModelProvider::default();
let registry = cx.new(|cx| {
let mut registry = Self::default();
registry.register_provider(fake_provider.clone(), cx);
@@ -403,16 +403,17 @@ mod tests {
fn test_register_providers(cx: &mut App) {
let registry = cx.new(|_| LanguageModelRegistry::default());
+ let provider = FakeLanguageModelProvider::default();
registry.update(cx, |registry, cx| {
- registry.register_provider(FakeLanguageModelProvider, cx);
+ registry.register_provider(provider.clone(), cx);
});
let providers = registry.read(cx).providers();
assert_eq!(providers.len(), 1);
- assert_eq!(providers[0].id(), crate::fake_provider::provider_id());
+ assert_eq!(providers[0].id(), provider.id());
registry.update(cx, |registry, cx| {
- registry.unregister_provider(crate::fake_provider::provider_id(), cx);
+ registry.unregister_provider(provider.id(), cx);
});
let providers = registry.read(cx).providers();
@@ -26,10 +26,10 @@ client.workspace = true
collections.workspace = true
component.workspace = true
credentials_provider.workspace = true
+convert_case.workspace = true
copilot.workspace = true
deepseek = { workspace = true, features = ["schemars"] }
editor.workspace = true
-fs.workspace = true
futures.workspace = true
google_ai = { workspace = true, features = ["schemars"] }
gpui.workspace = true
@@ -1,8 +1,10 @@
use std::sync::Arc;
+use ::settings::{Settings, SettingsStore};
use client::{Client, UserStore};
+use collections::HashSet;
use gpui::{App, Context, Entity};
-use language_model::LanguageModelRegistry;
+use language_model::{LanguageModelProviderId, LanguageModelRegistry};
use provider::deepseek::DeepSeekLanguageModelProvider;
pub mod provider;
@@ -18,17 +20,81 @@ use crate::provider::lmstudio::LmStudioLanguageModelProvider;
use crate::provider::mistral::MistralLanguageModelProvider;
use crate::provider::ollama::OllamaLanguageModelProvider;
use crate::provider::open_ai::OpenAiLanguageModelProvider;
+use crate::provider::open_ai_compatible::OpenAiCompatibleLanguageModelProvider;
use crate::provider::open_router::OpenRouterLanguageModelProvider;
use crate::provider::vercel::VercelLanguageModelProvider;
use crate::provider::x_ai::XAiLanguageModelProvider;
pub use crate::settings::*;
pub fn init(user_store: Entity<UserStore>, client: Arc<Client>, cx: &mut App) {
- crate::settings::init(cx);
+ crate::settings::init_settings(cx);
let registry = LanguageModelRegistry::global(cx);
registry.update(cx, |registry, cx| {
- register_language_model_providers(registry, user_store, client, cx);
+ register_language_model_providers(registry, user_store, client.clone(), cx);
});
+
+ let mut openai_compatible_providers = AllLanguageModelSettings::get_global(cx)
+ .openai_compatible
+ .keys()
+ .cloned()
+ .collect::<HashSet<_>>();
+
+ registry.update(cx, |registry, cx| {
+ register_openai_compatible_providers(
+ registry,
+ &HashSet::default(),
+ &openai_compatible_providers,
+ client.clone(),
+ cx,
+ );
+ });
+ cx.observe_global::<SettingsStore>(move |cx| {
+ let openai_compatible_providers_new = AllLanguageModelSettings::get_global(cx)
+ .openai_compatible
+ .keys()
+ .cloned()
+ .collect::<HashSet<_>>();
+ if openai_compatible_providers_new != openai_compatible_providers {
+ registry.update(cx, |registry, cx| {
+ register_openai_compatible_providers(
+ registry,
+ &openai_compatible_providers,
+ &openai_compatible_providers_new,
+ client.clone(),
+ cx,
+ );
+ });
+ openai_compatible_providers = openai_compatible_providers_new;
+ }
+ })
+ .detach();
+}
+
+fn register_openai_compatible_providers(
+ registry: &mut LanguageModelRegistry,
+ old: &HashSet<Arc<str>>,
+ new: &HashSet<Arc<str>>,
+ client: Arc<Client>,
+ cx: &mut Context<LanguageModelRegistry>,
+) {
+ for provider_id in old {
+ if !new.contains(provider_id) {
+ registry.unregister_provider(LanguageModelProviderId::from(provider_id.clone()), cx);
+ }
+ }
+
+ for provider_id in new {
+ if !old.contains(provider_id) {
+ registry.register_provider(
+ OpenAiCompatibleLanguageModelProvider::new(
+ provider_id.clone(),
+ client.http_client(),
+ cx,
+ ),
+ cx,
+ );
+ }
+ }
}
fn register_language_model_providers(
@@ -8,6 +8,7 @@ pub mod lmstudio;
pub mod mistral;
pub mod ollama;
pub mod open_ai;
+pub mod open_ai_compatible;
pub mod open_router;
pub mod vercel;
pub mod x_ai;
@@ -243,7 +243,7 @@ impl State {
pub struct BedrockLanguageModelProvider {
http_client: AwsHttpClient,
- handler: tokio::runtime::Handle,
+ handle: tokio::runtime::Handle,
state: gpui::Entity<State>,
}
@@ -258,13 +258,9 @@ impl BedrockLanguageModelProvider {
}),
});
- let tokio_handle = Tokio::handle(cx);
-
- let coerced_client = AwsHttpClient::new(http_client.clone(), tokio_handle.clone());
-
Self {
- http_client: coerced_client,
- handler: tokio_handle.clone(),
+ http_client: AwsHttpClient::new(http_client.clone()),
+ handle: Tokio::handle(cx),
state,
}
}
@@ -274,7 +270,7 @@ impl BedrockLanguageModelProvider {
id: LanguageModelId::from(model.id().to_string()),
model,
http_client: self.http_client.clone(),
- handler: self.handler.clone(),
+ handle: self.handle.clone(),
state: self.state.clone(),
client: OnceCell::new(),
request_limiter: RateLimiter::new(4),
@@ -375,7 +371,7 @@ struct BedrockModel {
id: LanguageModelId,
model: Model,
http_client: AwsHttpClient,
- handler: tokio::runtime::Handle,
+ handle: tokio::runtime::Handle,
client: OnceCell<BedrockClient>,
state: gpui::Entity<State>,
request_limiter: RateLimiter,
@@ -447,7 +443,7 @@ impl BedrockModel {
}
}
- let config = self.handler.block_on(config_builder.load());
+ let config = self.handle.block_on(config_builder.load());
anyhow::Ok(BedrockClient::new(&config))
})
.context("initializing Bedrock client")?;
@@ -2,7 +2,6 @@ use anyhow::{Context as _, Result, anyhow};
use collections::{BTreeMap, HashMap};
use credentials_provider::CredentialsProvider;
-use fs::Fs;
use futures::Stream;
use futures::{FutureExt, StreamExt, future::BoxFuture};
use gpui::{AnyView, App, AsyncApp, Context, Entity, Subscription, Task, Window};
@@ -18,7 +17,7 @@ use menu;
use open_ai::{ImageUrl, Model, ResponseStreamEvent, stream_completion};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
-use settings::{Settings, SettingsStore, update_settings_file};
+use settings::{Settings, SettingsStore};
use std::pin::Pin;
use std::str::FromStr as _;
use std::sync::Arc;
@@ -28,7 +27,6 @@ use ui::{ElevationIndex, List, Tooltip, prelude::*};
use ui_input::SingleLineInput;
use util::ResultExt;
-use crate::OpenAiSettingsContent;
use crate::{AllLanguageModelSettings, ui::InstructionListItem};
const PROVIDER_ID: LanguageModelProviderId = language_model::OPEN_AI_PROVIDER_ID;
@@ -621,26 +619,32 @@ struct RawToolCall {
arguments: String,
}
+pub(crate) fn collect_tiktoken_messages(
+ request: LanguageModelRequest,
+) -> Vec<tiktoken_rs::ChatCompletionRequestMessage> {
+ request
+ .messages
+ .into_iter()
+ .map(|message| tiktoken_rs::ChatCompletionRequestMessage {
+ role: match message.role {
+ Role::User => "user".into(),
+ Role::Assistant => "assistant".into(),
+ Role::System => "system".into(),
+ },
+ content: Some(message.string_contents()),
+ name: None,
+ function_call: None,
+ })
+ .collect::<Vec<_>>()
+}
+
pub fn count_open_ai_tokens(
request: LanguageModelRequest,
model: Model,
cx: &App,
) -> BoxFuture<'static, Result<u64>> {
cx.background_spawn(async move {
- let messages = request
- .messages
- .into_iter()
- .map(|message| tiktoken_rs::ChatCompletionRequestMessage {
- role: match message.role {
- Role::User => "user".into(),
- Role::Assistant => "assistant".into(),
- Role::System => "system".into(),
- },
- content: Some(message.string_contents()),
- name: None,
- function_call: None,
- })
- .collect::<Vec<_>>();
+ let messages = collect_tiktoken_messages(request);
match model {
Model::Custom { max_tokens, .. } => {
@@ -678,7 +682,6 @@ pub fn count_open_ai_tokens(
struct ConfigurationView {
api_key_editor: Entity<SingleLineInput>,
- api_url_editor: Entity<SingleLineInput>,
state: gpui::Entity<State>,
load_credentials_task: Option<Task<()>>,
}
@@ -691,23 +694,6 @@ impl ConfigurationView {
cx,
"sk-000000000000000000000000000000000000000000000000",
)
- .label("API key")
- });
-
- let api_url = AllLanguageModelSettings::get_global(cx)
- .openai
- .api_url
- .clone();
-
- let api_url_editor = cx.new(|cx| {
- let input = SingleLineInput::new(window, cx, open_ai::OPEN_AI_API_URL).label("API URL");
-
- if !api_url.is_empty() {
- input.editor.update(cx, |editor, cx| {
- editor.set_text(&*api_url, window, cx);
- });
- }
- input
});
cx.observe(&state, |_, _, cx| {
@@ -735,7 +721,6 @@ impl ConfigurationView {
Self {
api_key_editor,
- api_url_editor,
state,
load_credentials_task,
}
@@ -783,57 +768,6 @@ impl ConfigurationView {
cx.notify();
}
- fn save_api_url(&mut self, cx: &mut Context<Self>) {
- let api_url = self
- .api_url_editor
- .read(cx)
- .editor()
- .read(cx)
- .text(cx)
- .trim()
- .to_string();
-
- let current_url = AllLanguageModelSettings::get_global(cx)
- .openai
- .api_url
- .clone();
-
- let effective_current_url = if current_url.is_empty() {
- open_ai::OPEN_AI_API_URL
- } else {
- ¤t_url
- };
-
- if !api_url.is_empty() && api_url != effective_current_url {
- let fs = <dyn Fs>::global(cx);
- update_settings_file::<AllLanguageModelSettings>(fs, cx, move |settings, _| {
- if let Some(settings) = settings.openai.as_mut() {
- settings.api_url = Some(api_url.clone());
- } else {
- settings.openai = Some(OpenAiSettingsContent {
- api_url: Some(api_url.clone()),
- available_models: None,
- });
- }
- });
- }
- }
-
- fn reset_api_url(&mut self, window: &mut Window, cx: &mut Context<Self>) {
- self.api_url_editor.update(cx, |input, cx| {
- input.editor.update(cx, |editor, cx| {
- editor.set_text("", window, cx);
- });
- });
- let fs = <dyn Fs>::global(cx);
- update_settings_file::<AllLanguageModelSettings>(fs, cx, |settings, _cx| {
- if let Some(settings) = settings.openai.as_mut() {
- settings.api_url = None;
- }
- });
- cx.notify();
- }
-
fn should_render_editor(&self, cx: &mut Context<Self>) -> bool {
!self.state.read(cx).is_authenticated()
}
@@ -846,7 +780,6 @@ impl Render for ConfigurationView {
let api_key_section = if self.should_render_editor(cx) {
v_flex()
.on_action(cx.listener(Self::save_api_key))
-
.child(Label::new("To use Zed's assistant with OpenAI, you need to add an API key. Follow these steps:"))
.child(
List::new()
@@ -910,59 +843,34 @@ impl Render for ConfigurationView {
.into_any()
};
- let custom_api_url_set =
- AllLanguageModelSettings::get_global(cx).openai.api_url != open_ai::OPEN_AI_API_URL;
-
- let api_url_section = if custom_api_url_set {
- h_flex()
- .mt_1()
- .p_1()
- .justify_between()
- .rounded_md()
- .border_1()
- .border_color(cx.theme().colors().border)
- .bg(cx.theme().colors().background)
- .child(
- h_flex()
- .gap_1()
- .child(Icon::new(IconName::Check).color(Color::Success))
- .child(Label::new("Custom API URL configured.")),
- )
- .child(
- Button::new("reset-api-url", "Reset API URL")
- .label_size(LabelSize::Small)
- .icon(IconName::Undo)
- .icon_size(IconSize::Small)
- .icon_position(IconPosition::Start)
- .layer(ElevationIndex::ModalSurface)
- .on_click(
- cx.listener(|this, _, window, cx| this.reset_api_url(window, cx)),
- ),
- )
- .into_any()
- } else {
- v_flex()
- .on_action(cx.listener(|this, _: &menu::Confirm, _window, cx| {
- this.save_api_url(cx);
- cx.notify();
- }))
- .mt_2()
- .pt_2()
- .border_t_1()
- .border_color(cx.theme().colors().border_variant)
- .gap_1()
- .child(
- List::new()
- .child(InstructionListItem::text_only(
- "Optionally, you can change the base URL for the OpenAI API request.",
- ))
- .child(InstructionListItem::text_only(
- "Paste the new API endpoint below and hit enter",
- )),
- )
- .child(self.api_url_editor.clone())
- .into_any()
- };
+ let compatible_api_section = h_flex()
+ .mt_1p5()
+ .gap_0p5()
+ .flex_wrap()
+ .when(self.should_render_editor(cx), |this| {
+ this.pt_1p5()
+ .border_t_1()
+ .border_color(cx.theme().colors().border_variant)
+ })
+ .child(
+ h_flex()
+ .gap_2()
+ .child(
+ Icon::new(IconName::Info)
+ .size(IconSize::XSmall)
+ .color(Color::Muted),
+ )
+ .child(Label::new("Zed also supports OpenAI-compatible models.")),
+ )
+ .child(
+ Button::new("docs", "Learn More")
+ .icon(IconName::ArrowUpRight)
+ .icon_size(IconSize::XSmall)
+ .icon_color(Color::Muted)
+ .on_click(move |_, _window, cx| {
+ cx.open_url("https://zed.dev/docs/ai/configuration#openai-api-compatible")
+ }),
+ );
if self.load_credentials_task.is_some() {
div().child(Label::new("Loading credentials…")).into_any()
@@ -970,7 +878,7 @@ impl Render for ConfigurationView {
v_flex()
.size_full()
.child(api_key_section)
- .child(api_url_section)
+ .child(compatible_api_section)
.into_any()
}
}
@@ -0,0 +1,522 @@
+use anyhow::{Context as _, Result, anyhow};
+use credentials_provider::CredentialsProvider;
+
+use convert_case::{Case, Casing};
+use futures::{FutureExt, StreamExt, future::BoxFuture};
+use gpui::{AnyView, App, AsyncApp, Context, Entity, Subscription, Task, Window};
+use http_client::HttpClient;
+use language_model::{
+ AuthenticateError, LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent,
+ LanguageModelId, LanguageModelName, LanguageModelProvider, LanguageModelProviderId,
+ LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest,
+ LanguageModelToolChoice, RateLimiter,
+};
+use menu;
+use open_ai::{ResponseStreamEvent, stream_completion};
+use schemars::JsonSchema;
+use serde::{Deserialize, Serialize};
+use settings::{Settings, SettingsStore};
+use std::sync::Arc;
+
+use ui::{ElevationIndex, Tooltip, prelude::*};
+use ui_input::SingleLineInput;
+use util::ResultExt;
+
+use crate::AllLanguageModelSettings;
+use crate::provider::open_ai::{OpenAiEventMapper, into_open_ai};
+
+#[derive(Default, Clone, Debug, PartialEq)]
+pub struct OpenAiCompatibleSettings {
+ pub api_url: String,
+ pub available_models: Vec<AvailableModel>,
+}
+
+#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
+pub struct AvailableModel {
+ pub name: String,
+ pub display_name: Option<String>,
+ pub max_tokens: u64,
+ pub max_output_tokens: Option<u64>,
+ pub max_completion_tokens: Option<u64>,
+}
+
+pub struct OpenAiCompatibleLanguageModelProvider {
+ id: LanguageModelProviderId,
+ name: LanguageModelProviderName,
+ http_client: Arc<dyn HttpClient>,
+ state: gpui::Entity<State>,
+}
+
+pub struct State {
+ id: Arc<str>,
+ env_var_name: Arc<str>,
+ api_key: Option<String>,
+ api_key_from_env: bool,
+ settings: OpenAiCompatibleSettings,
+ _subscription: Subscription,
+}
+
+impl State {
+ fn is_authenticated(&self) -> bool {
+ self.api_key.is_some()
+ }
+
+ fn reset_api_key(&self, cx: &mut Context<Self>) -> Task<Result<()>> {
+ let credentials_provider = <dyn CredentialsProvider>::global(cx);
+ let api_url = self.settings.api_url.clone();
+ cx.spawn(async move |this, cx| {
+ credentials_provider
+ .delete_credentials(&api_url, &cx)
+ .await
+ .log_err();
+ this.update(cx, |this, cx| {
+ this.api_key = None;
+ this.api_key_from_env = false;
+ cx.notify();
+ })
+ })
+ }
+
+ fn set_api_key(&mut self, api_key: String, cx: &mut Context<Self>) -> Task<Result<()>> {
+ let credentials_provider = <dyn CredentialsProvider>::global(cx);
+ let api_url = self.settings.api_url.clone();
+ cx.spawn(async move |this, cx| {
+ credentials_provider
+ .write_credentials(&api_url, "Bearer", api_key.as_bytes(), &cx)
+ .await
+ .log_err();
+ this.update(cx, |this, cx| {
+ this.api_key = Some(api_key);
+ cx.notify();
+ })
+ })
+ }
+
+ fn authenticate(&self, cx: &mut Context<Self>) -> Task<Result<(), AuthenticateError>> {
+ if self.is_authenticated() {
+ return Task::ready(Ok(()));
+ }
+
+ let credentials_provider = <dyn CredentialsProvider>::global(cx);
+ let env_var_name = self.env_var_name.clone();
+ let api_url = self.settings.api_url.clone();
+ cx.spawn(async move |this, cx| {
+ let (api_key, from_env) = if let Ok(api_key) = std::env::var(env_var_name.as_ref()) {
+ (api_key, true)
+ } else {
+ let (_, api_key) = credentials_provider
+ .read_credentials(&api_url, &cx)
+ .await?
+ .ok_or(AuthenticateError::CredentialsNotFound)?;
+ (
+ String::from_utf8(api_key).context("invalid {PROVIDER_NAME} API key")?,
+ false,
+ )
+ };
+ this.update(cx, |this, cx| {
+ this.api_key = Some(api_key);
+ this.api_key_from_env = from_env;
+ cx.notify();
+ })?;
+
+ Ok(())
+ })
+ }
+}
+
+impl OpenAiCompatibleLanguageModelProvider {
+ pub fn new(id: Arc<str>, http_client: Arc<dyn HttpClient>, cx: &mut App) -> Self {
+ fn resolve_settings<'a>(id: &'a str, cx: &'a App) -> Option<&'a OpenAiCompatibleSettings> {
+ AllLanguageModelSettings::get_global(cx)
+ .openai_compatible
+ .get(id)
+ }
+
+ let state = cx.new(|cx| State {
+ id: id.clone(),
+ env_var_name: format!("{}_API_KEY", id).to_case(Case::Constant).into(),
+ settings: resolve_settings(&id, cx).cloned().unwrap_or_default(),
+ api_key: None,
+ api_key_from_env: false,
+ _subscription: cx.observe_global::<SettingsStore>(|this: &mut State, cx| {
+ let Some(settings) = resolve_settings(&this.id, cx) else {
+ return;
+ };
+ if &this.settings != settings {
+ this.settings = settings.clone();
+ cx.notify();
+ }
+ }),
+ });
+
+ Self {
+ id: id.clone().into(),
+ name: id.into(),
+ http_client,
+ state,
+ }
+ }
+
+ fn create_language_model(&self, model: AvailableModel) -> Arc<dyn LanguageModel> {
+ Arc::new(OpenAiCompatibleLanguageModel {
+ id: LanguageModelId::from(model.name.clone()),
+ provider_id: self.id.clone(),
+ provider_name: self.name.clone(),
+ model,
+ state: self.state.clone(),
+ http_client: self.http_client.clone(),
+ request_limiter: RateLimiter::new(4),
+ })
+ }
+}
+
+impl LanguageModelProviderState for OpenAiCompatibleLanguageModelProvider {
+ type ObservableEntity = State;
+
+ fn observable_entity(&self) -> Option<gpui::Entity<Self::ObservableEntity>> {
+ Some(self.state.clone())
+ }
+}
+
+impl LanguageModelProvider for OpenAiCompatibleLanguageModelProvider {
+ fn id(&self) -> LanguageModelProviderId {
+ self.id.clone()
+ }
+
+ fn name(&self) -> LanguageModelProviderName {
+ self.name.clone()
+ }
+
+ fn icon(&self) -> IconName {
+ IconName::AiOpenAiCompat
+ }
+
+ fn default_model(&self, cx: &App) -> Option<Arc<dyn LanguageModel>> {
+ self.state
+ .read(cx)
+ .settings
+ .available_models
+ .first()
+ .map(|model| self.create_language_model(model.clone()))
+ }
+
+ fn default_fast_model(&self, _cx: &App) -> Option<Arc<dyn LanguageModel>> {
+ None
+ }
+
+ fn provided_models(&self, cx: &App) -> Vec<Arc<dyn LanguageModel>> {
+ self.state
+ .read(cx)
+ .settings
+ .available_models
+ .iter()
+ .map(|model| self.create_language_model(model.clone()))
+ .collect()
+ }
+
+ fn is_authenticated(&self, cx: &App) -> bool {
+ self.state.read(cx).is_authenticated()
+ }
+
+ fn authenticate(&self, cx: &mut App) -> Task<Result<(), AuthenticateError>> {
+ self.state.update(cx, |state, cx| state.authenticate(cx))
+ }
+
+ fn configuration_view(&self, window: &mut Window, cx: &mut App) -> AnyView {
+ cx.new(|cx| ConfigurationView::new(self.state.clone(), window, cx))
+ .into()
+ }
+
+ fn reset_credentials(&self, cx: &mut App) -> Task<Result<()>> {
+ self.state.update(cx, |state, cx| state.reset_api_key(cx))
+ }
+}
+
+pub struct OpenAiCompatibleLanguageModel {
+ id: LanguageModelId,
+ provider_id: LanguageModelProviderId,
+ provider_name: LanguageModelProviderName,
+ model: AvailableModel,
+ state: gpui::Entity<State>,
+ http_client: Arc<dyn HttpClient>,
+ request_limiter: RateLimiter,
+}
+
+impl OpenAiCompatibleLanguageModel {
+ fn stream_completion(
+ &self,
+ request: open_ai::Request,
+ cx: &AsyncApp,
+ ) -> BoxFuture<'static, Result<futures::stream::BoxStream<'static, Result<ResponseStreamEvent>>>>
+ {
+ let http_client = self.http_client.clone();
+ let Ok((api_key, api_url)) = cx.read_entity(&self.state, |state, _| {
+ (state.api_key.clone(), state.settings.api_url.clone())
+ }) else {
+ return futures::future::ready(Err(anyhow!("App state dropped"))).boxed();
+ };
+
+ let provider = self.provider_name.clone();
+ let future = self.request_limiter.stream(async move {
+ let Some(api_key) = api_key else {
+ return Err(LanguageModelCompletionError::NoApiKey { provider });
+ };
+ let request = stream_completion(http_client.as_ref(), &api_url, &api_key, request);
+ let response = request.await?;
+ Ok(response)
+ });
+
+ async move { Ok(future.await?.boxed()) }.boxed()
+ }
+}
+
+impl LanguageModel for OpenAiCompatibleLanguageModel {
+ fn id(&self) -> LanguageModelId {
+ self.id.clone()
+ }
+
+ fn name(&self) -> LanguageModelName {
+ LanguageModelName::from(
+ self.model
+ .display_name
+ .clone()
+ .unwrap_or_else(|| self.model.name.clone()),
+ )
+ }
+
+ fn provider_id(&self) -> LanguageModelProviderId {
+ self.provider_id.clone()
+ }
+
+ fn provider_name(&self) -> LanguageModelProviderName {
+ self.provider_name.clone()
+ }
+
+ fn supports_tools(&self) -> bool {
+ true
+ }
+
+ fn supports_images(&self) -> bool {
+ false
+ }
+
+ fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool {
+ match choice {
+ LanguageModelToolChoice::Auto => true,
+ LanguageModelToolChoice::Any => true,
+ LanguageModelToolChoice::None => true,
+ }
+ }
+
+ fn telemetry_id(&self) -> String {
+ format!("openai/{}", self.model.name)
+ }
+
+ fn max_token_count(&self) -> u64 {
+ self.model.max_tokens
+ }
+
+ fn max_output_tokens(&self) -> Option<u64> {
+ self.model.max_output_tokens
+ }
+
+ fn count_tokens(
+ &self,
+ request: LanguageModelRequest,
+ cx: &App,
+ ) -> BoxFuture<'static, Result<u64>> {
+ let max_token_count = self.max_token_count();
+ cx.background_spawn(async move {
+ let messages = super::open_ai::collect_tiktoken_messages(request);
+ let model = if max_token_count >= 100_000 {
+ // If the max tokens is 100k or more, it is likely the o200k_base tokenizer from gpt4o
+ "gpt-4o"
+ } else {
+ // Otherwise fallback to gpt-4, since only cl100k_base and o200k_base are
+ // supported with this tiktoken method
+ "gpt-4"
+ };
+ tiktoken_rs::num_tokens_from_messages(model, &messages).map(|tokens| tokens as u64)
+ })
+ .boxed()
+ }
+
+ fn stream_completion(
+ &self,
+ request: LanguageModelRequest,
+ cx: &AsyncApp,
+ ) -> BoxFuture<
+ 'static,
+ Result<
+ futures::stream::BoxStream<
+ 'static,
+ Result<LanguageModelCompletionEvent, LanguageModelCompletionError>,
+ >,
+ LanguageModelCompletionError,
+ >,
+ > {
+ let request = into_open_ai(request, &self.model.name, true, self.max_output_tokens());
+ let completions = self.stream_completion(request, cx);
+ async move {
+ let mapper = OpenAiEventMapper::new();
+ Ok(mapper.map_stream(completions.await?).boxed())
+ }
+ .boxed()
+ }
+}
+
+struct ConfigurationView {
+ api_key_editor: Entity<SingleLineInput>,
+ state: gpui::Entity<State>,
+ load_credentials_task: Option<Task<()>>,
+}
+
+impl ConfigurationView {
+ fn new(state: gpui::Entity<State>, window: &mut Window, cx: &mut Context<Self>) -> Self {
+ let api_key_editor = cx.new(|cx| {
+ SingleLineInput::new(
+ window,
+ cx,
+ "000000000000000000000000000000000000000000000000000",
+ )
+ });
+
+ cx.observe(&state, |_, _, cx| {
+ cx.notify();
+ })
+ .detach();
+
+ let load_credentials_task = Some(cx.spawn_in(window, {
+ let state = state.clone();
+ async move |this, cx| {
+ if let Some(task) = state
+ .update(cx, |state, cx| state.authenticate(cx))
+ .log_err()
+ {
+ // We don't log an error, because "not signed in" is also an error.
+ let _ = task.await;
+ }
+ this.update(cx, |this, cx| {
+ this.load_credentials_task = None;
+ cx.notify();
+ })
+ .log_err();
+ }
+ }));
+
+ Self {
+ api_key_editor,
+ state,
+ load_credentials_task,
+ }
+ }
+
+ fn save_api_key(&mut self, _: &menu::Confirm, window: &mut Window, cx: &mut Context<Self>) {
+ let api_key = self
+ .api_key_editor
+ .read(cx)
+ .editor()
+ .read(cx)
+ .text(cx)
+ .trim()
+ .to_string();
+
+ // Don't proceed if no API key is provided and we're not authenticated
+ if api_key.is_empty() && !self.state.read(cx).is_authenticated() {
+ return;
+ }
+
+ let state = self.state.clone();
+ cx.spawn_in(window, async move |_, cx| {
+ state
+ .update(cx, |state, cx| state.set_api_key(api_key, cx))?
+ .await
+ })
+ .detach_and_log_err(cx);
+
+ cx.notify();
+ }
+
+ fn reset_api_key(&mut self, window: &mut Window, cx: &mut Context<Self>) {
+ self.api_key_editor.update(cx, |input, cx| {
+ input.editor.update(cx, |editor, cx| {
+ editor.set_text("", window, cx);
+ });
+ });
+
+ let state = self.state.clone();
+ cx.spawn_in(window, async move |_, cx| {
+ state.update(cx, |state, cx| state.reset_api_key(cx))?.await
+ })
+ .detach_and_log_err(cx);
+
+ cx.notify();
+ }
+
+ fn should_render_editor(&self, cx: &mut Context<Self>) -> bool {
+ !self.state.read(cx).is_authenticated()
+ }
+}
+
+impl Render for ConfigurationView {
+ fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
+ let env_var_set = self.state.read(cx).api_key_from_env;
+ let env_var_name = self.state.read(cx).env_var_name.clone();
+
+ let api_key_section = if self.should_render_editor(cx) {
+ v_flex()
+ .on_action(cx.listener(Self::save_api_key))
+ .child(Label::new("To use Zed's assistant with an OpenAI compatible provider, you need to add an API key."))
+ .child(
+ div()
+ .pt(DynamicSpacing::Base04.rems(cx))
+ .child(self.api_key_editor.clone())
+ )
+ .child(
+ Label::new(
+ format!("You can also assign the {env_var_name} environment variable and restart Zed."),
+ )
+ .size(LabelSize::Small).color(Color::Muted),
+ )
+ .into_any()
+ } else {
+ h_flex()
+ .mt_1()
+ .p_1()
+ .justify_between()
+ .rounded_md()
+ .border_1()
+ .border_color(cx.theme().colors().border)
+ .bg(cx.theme().colors().background)
+ .child(
+ h_flex()
+ .gap_1()
+ .child(Icon::new(IconName::Check).color(Color::Success))
+ .child(Label::new(if env_var_set {
+ format!("API key set in {env_var_name} environment variable.")
+ } else {
+ "API key configured.".to_string()
+ })),
+ )
+ .child(
+ Button::new("reset-api-key", "Reset API Key")
+ .label_size(LabelSize::Small)
+ .icon(IconName::Undo)
+ .icon_size(IconSize::Small)
+ .icon_position(IconPosition::Start)
+ .layer(ElevationIndex::ModalSurface)
+ .when(env_var_set, |this| {
+ this.tooltip(Tooltip::text(format!("To reset your API key, unset the {env_var_name} environment variable.")))
+ })
+ .on_click(cx.listener(|this, _, window, cx| this.reset_api_key(window, cx))),
+ )
+ .into_any()
+ };
+
+ if self.load_credentials_task.is_some() {
+ div().child(Label::new("Loading credentials…")).into_any()
+ } else {
+ v_flex().size_full().child(api_key_section).into_any()
+ }
+ }
+}
@@ -1,4 +1,7 @@
+use std::sync::Arc;
+
use anyhow::Result;
+use collections::HashMap;
use gpui::App;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
@@ -15,13 +18,14 @@ use crate::provider::{
mistral::MistralSettings,
ollama::OllamaSettings,
open_ai::OpenAiSettings,
+ open_ai_compatible::OpenAiCompatibleSettings,
open_router::OpenRouterSettings,
vercel::VercelSettings,
x_ai::XAiSettings,
};
/// Initializes the language model settings.
-pub fn init(cx: &mut App) {
+pub fn init_settings(cx: &mut App) {
AllLanguageModelSettings::register(cx);
}
@@ -36,6 +40,7 @@ pub struct AllLanguageModelSettings {
pub ollama: OllamaSettings,
pub open_router: OpenRouterSettings,
pub openai: OpenAiSettings,
+ pub openai_compatible: HashMap<Arc<str>, OpenAiCompatibleSettings>,
pub vercel: VercelSettings,
pub x_ai: XAiSettings,
pub zed_dot_dev: ZedDotDevSettings,
@@ -52,6 +57,7 @@ pub struct AllLanguageModelSettingsContent {
pub ollama: Option<OllamaSettingsContent>,
pub open_router: Option<OpenRouterSettingsContent>,
pub openai: Option<OpenAiSettingsContent>,
+ pub openai_compatible: Option<HashMap<Arc<str>, OpenAiCompatibleSettingsContent>>,
pub vercel: Option<VercelSettingsContent>,
pub x_ai: Option<XAiSettingsContent>,
#[serde(rename = "zed.dev")]
@@ -103,6 +109,12 @@ pub struct OpenAiSettingsContent {
pub available_models: Option<Vec<provider::open_ai::AvailableModel>>,
}
+#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
+pub struct OpenAiCompatibleSettingsContent {
+ pub api_url: String,
+ pub available_models: Vec<provider::open_ai_compatible::AvailableModel>,
+}
+
#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
pub struct VercelSettingsContent {
pub api_url: Option<String>,
@@ -226,6 +238,19 @@ impl settings::Settings for AllLanguageModelSettings {
openai.as_ref().and_then(|s| s.available_models.clone()),
);
+ // OpenAI Compatible
+ if let Some(openai_compatible) = value.openai_compatible.clone() {
+ for (id, openai_compatible_settings) in openai_compatible {
+ settings.openai_compatible.insert(
+ id,
+ OpenAiCompatibleSettings {
+ api_url: openai_compatible_settings.api_url,
+ available_models: openai_compatible_settings.available_models,
+ },
+ );
+ }
+ }
+
// Vercel
let vercel = value.vercel.clone();
merge(
@@ -1211,7 +1211,7 @@ impl MultiBuffer {
let buffer = buffer_state.buffer.read(cx);
for range in buffer.edited_ranges_for_transaction_id::<D>(*buffer_transaction) {
for excerpt_id in &buffer_state.excerpts {
- cursor.seek(excerpt_id, Bias::Left, &());
+ cursor.seek(excerpt_id, Bias::Left);
if let Some(excerpt) = cursor.item() {
if excerpt.locator == *excerpt_id {
let excerpt_buffer_start =
@@ -1322,7 +1322,7 @@ impl MultiBuffer {
let start_locator = snapshot.excerpt_locator_for_id(selection.start.excerpt_id);
let end_locator = snapshot.excerpt_locator_for_id(selection.end.excerpt_id);
- cursor.seek(&Some(start_locator), Bias::Left, &());
+ cursor.seek(&Some(start_locator), Bias::Left);
while let Some(excerpt) = cursor.item() {
if excerpt.locator > *end_locator {
break;
@@ -1347,7 +1347,7 @@ impl MultiBuffer {
goal: selection.goal,
});
- cursor.next(&());
+ cursor.next();
}
}
@@ -1769,13 +1769,13 @@ impl MultiBuffer {
let mut next_excerpt_id = move || ExcerptId(post_inc(&mut next_excerpt_id));
let mut excerpts_cursor = snapshot.excerpts.cursor::<Option<&Locator>>(&());
- excerpts_cursor.next(&());
+ excerpts_cursor.next();
loop {
let new = new_iter.peek();
let existing = if let Some(existing_id) = existing_iter.peek() {
let locator = snapshot.excerpt_locator_for_id(*existing_id);
- excerpts_cursor.seek_forward(&Some(locator), Bias::Left, &());
+ excerpts_cursor.seek_forward(&Some(locator), Bias::Left);
if let Some(excerpt) = excerpts_cursor.item() {
if excerpt.buffer_id != buffer_snapshot.remote_id() {
to_remove.push(*existing_id);
@@ -1970,7 +1970,7 @@ impl MultiBuffer {
let mut prev_locator = snapshot.excerpt_locator_for_id(prev_excerpt_id).clone();
let mut new_excerpt_ids = mem::take(&mut snapshot.excerpt_ids);
let mut cursor = snapshot.excerpts.cursor::<Option<&Locator>>(&());
- let mut new_excerpts = cursor.slice(&prev_locator, Bias::Right, &());
+ let mut new_excerpts = cursor.slice(&prev_locator, Bias::Right);
prev_locator = cursor.start().unwrap_or(Locator::min_ref()).clone();
let edit_start = ExcerptOffset::new(new_excerpts.summary().text.len);
@@ -2019,7 +2019,7 @@ impl MultiBuffer {
let edit_end = ExcerptOffset::new(new_excerpts.summary().text.len);
- let suffix = cursor.suffix(&());
+ let suffix = cursor.suffix();
let changed_trailing_excerpt = suffix.is_empty();
new_excerpts.append(suffix, &());
drop(cursor);
@@ -2104,7 +2104,7 @@ impl MultiBuffer {
.into_iter()
.flatten()
{
- cursor.seek_forward(&Some(locator), Bias::Left, &());
+ cursor.seek_forward(&Some(locator), Bias::Left);
if let Some(excerpt) = cursor.item() {
if excerpt.locator == *locator {
excerpts.push((excerpt.id, excerpt.range.clone()));
@@ -2124,25 +2124,25 @@ impl MultiBuffer {
let mut diff_transforms = snapshot
.diff_transforms
.cursor::<(ExcerptDimension<Point>, OutputDimension<Point>)>(&());
- diff_transforms.next(&());
+ diff_transforms.next();
let locators = buffers
.get(&buffer_id)
.into_iter()
.flat_map(|state| &state.excerpts);
let mut result = Vec::new();
for locator in locators {
- excerpts.seek_forward(&Some(locator), Bias::Left, &());
+ excerpts.seek_forward(&Some(locator), Bias::Left);
if let Some(excerpt) = excerpts.item() {
if excerpt.locator == *locator {
let excerpt_start = excerpts.start().1.clone();
let excerpt_end =
ExcerptDimension(excerpt_start.0 + excerpt.text_summary.lines);
- diff_transforms.seek_forward(&excerpt_start, Bias::Left, &());
+ diff_transforms.seek_forward(&excerpt_start, Bias::Left);
let overshoot = excerpt_start.0 - diff_transforms.start().0.0;
let start = diff_transforms.start().1.0 + overshoot;
- diff_transforms.seek_forward(&excerpt_end, Bias::Right, &());
+ diff_transforms.seek_forward(&excerpt_end, Bias::Right);
let overshoot = excerpt_end.0 - diff_transforms.start().0.0;
let end = diff_transforms.start().1.0 + overshoot;
@@ -2290,7 +2290,7 @@ impl MultiBuffer {
self.paths_by_excerpt.remove(&excerpt_id);
// Seek to the next excerpt to remove, preserving any preceding excerpts.
let locator = snapshot.excerpt_locator_for_id(excerpt_id);
- new_excerpts.append(cursor.slice(&Some(locator), Bias::Left, &()), &());
+ new_excerpts.append(cursor.slice(&Some(locator), Bias::Left), &());
if let Some(mut excerpt) = cursor.item() {
if excerpt.id != excerpt_id {
@@ -2311,7 +2311,7 @@ impl MultiBuffer {
removed_buffer_ids.push(excerpt.buffer_id);
}
}
- cursor.next(&());
+ cursor.next();
// Skip over any subsequent excerpts that are also removed.
if let Some(&next_excerpt_id) = excerpt_ids.peek() {
@@ -2344,7 +2344,7 @@ impl MultiBuffer {
});
}
}
- let suffix = cursor.suffix(&());
+ let suffix = cursor.suffix();
let changed_trailing_excerpt = suffix.is_empty();
new_excerpts.append(suffix, &());
drop(cursor);
@@ -2493,7 +2493,7 @@ impl MultiBuffer {
let mut cursor = snapshot
.excerpts
.cursor::<(Option<&Locator>, ExcerptOffset)>(&());
- cursor.seek_forward(&Some(locator), Bias::Left, &());
+ cursor.seek_forward(&Some(locator), Bias::Left);
if let Some(excerpt) = cursor.item() {
if excerpt.locator == *locator {
let excerpt_buffer_range = excerpt.range.context.to_offset(&excerpt.buffer);
@@ -2724,7 +2724,7 @@ impl MultiBuffer {
let snapshot = self.read(cx);
let mut cursor = snapshot.diff_transforms.cursor::<usize>(&());
let offset_range = range.to_offset(&snapshot);
- cursor.seek(&offset_range.start, Bias::Left, &());
+ cursor.seek(&offset_range.start, Bias::Left);
while let Some(item) = cursor.item() {
if *cursor.start() >= offset_range.end && *cursor.start() > offset_range.start {
break;
@@ -2732,7 +2732,7 @@ impl MultiBuffer {
if item.hunk_info().is_some() {
return true;
}
- cursor.next(&());
+ cursor.next();
}
false
}
@@ -2746,7 +2746,7 @@ impl MultiBuffer {
let end = snapshot.point_to_offset(Point::new(range.end.row + 1, 0));
let start = start.saturating_sub(1);
let end = snapshot.len().min(end + 1);
- cursor.seek(&start, Bias::Right, &());
+ cursor.seek(&start, Bias::Right);
while let Some(item) = cursor.item() {
if *cursor.start() >= end {
break;
@@ -2754,7 +2754,7 @@ impl MultiBuffer {
if item.hunk_info().is_some() {
return true;
}
- cursor.next(&());
+ cursor.next();
}
}
false
@@ -2848,7 +2848,7 @@ impl MultiBuffer {
.cursor::<(Option<&Locator>, ExcerptOffset)>(&());
let mut edits = Vec::<Edit<ExcerptOffset>>::new();
- let prefix = cursor.slice(&Some(locator), Bias::Left, &());
+ let prefix = cursor.slice(&Some(locator), Bias::Left);
new_excerpts.append(prefix, &());
let mut excerpt = cursor.item().unwrap().clone();
@@ -2883,9 +2883,9 @@ impl MultiBuffer {
new_excerpts.push(excerpt, &());
- cursor.next(&());
+ cursor.next();
- new_excerpts.append(cursor.suffix(&()), &());
+ new_excerpts.append(cursor.suffix(), &());
drop(cursor);
snapshot.excerpts = new_excerpts;
@@ -2925,7 +2925,7 @@ impl MultiBuffer {
let mut edits = Vec::<Edit<ExcerptOffset>>::new();
for locator in &locators {
- let prefix = cursor.slice(&Some(locator), Bias::Left, &());
+ let prefix = cursor.slice(&Some(locator), Bias::Left);
new_excerpts.append(prefix, &());
let mut excerpt = cursor.item().unwrap().clone();
@@ -2987,10 +2987,10 @@ impl MultiBuffer {
new_excerpts.push(excerpt, &());
- cursor.next(&());
+ cursor.next();
}
- new_excerpts.append(cursor.suffix(&()), &());
+ new_excerpts.append(cursor.suffix(), &());
drop(cursor);
snapshot.excerpts = new_excerpts;
@@ -3070,7 +3070,7 @@ impl MultiBuffer {
.cursor::<(Option<&Locator>, ExcerptOffset)>(&());
for (locator, buffer, buffer_edited) in excerpts_to_edit {
- new_excerpts.append(cursor.slice(&Some(locator), Bias::Left, &()), &());
+ new_excerpts.append(cursor.slice(&Some(locator), Bias::Left), &());
let old_excerpt = cursor.item().unwrap();
let buffer = buffer.read(cx);
let buffer_id = buffer.remote_id();
@@ -3112,9 +3112,9 @@ impl MultiBuffer {
}
new_excerpts.push(new_excerpt, &());
- cursor.next(&());
+ cursor.next();
}
- new_excerpts.append(cursor.suffix(&()), &());
+ new_excerpts.append(cursor.suffix(), &());
drop(cursor);
snapshot.excerpts = new_excerpts;
@@ -3145,23 +3145,22 @@ impl MultiBuffer {
let mut excerpt_edits = excerpt_edits.into_iter().peekable();
while let Some(edit) = excerpt_edits.next() {
- excerpts.seek_forward(&edit.new.start, Bias::Right, &());
+ excerpts.seek_forward(&edit.new.start, Bias::Right);
if excerpts.item().is_none() && *excerpts.start() == edit.new.start {
- excerpts.prev(&());
+ excerpts.prev();
}
// Keep any transforms that are before the edit.
if at_transform_boundary {
at_transform_boundary = false;
- let transforms_before_edit =
- old_diff_transforms.slice(&edit.old.start, Bias::Left, &());
+ let transforms_before_edit = old_diff_transforms.slice(&edit.old.start, Bias::Left);
self.append_diff_transforms(&mut new_diff_transforms, transforms_before_edit);
if let Some(transform) = old_diff_transforms.item() {
- if old_diff_transforms.end(&()).0 == edit.old.start
+ if old_diff_transforms.end().0 == edit.old.start
&& old_diff_transforms.start().0 < edit.old.start
{
self.push_diff_transform(&mut new_diff_transforms, transform.clone());
- old_diff_transforms.next(&());
+ old_diff_transforms.next();
}
}
}
@@ -3203,7 +3202,7 @@ impl MultiBuffer {
// then recreate the content up to the end of this transform, to prepare
// for reusing additional slices of the old transforms.
if excerpt_edits.peek().map_or(true, |next_edit| {
- next_edit.old.start >= old_diff_transforms.end(&()).0
+ next_edit.old.start >= old_diff_transforms.end().0
}) {
let keep_next_old_transform = (old_diff_transforms.start().0 >= edit.old.end)
&& match old_diff_transforms.item() {
@@ -3218,8 +3217,8 @@ impl MultiBuffer {
let mut excerpt_offset = edit.new.end;
if !keep_next_old_transform {
- excerpt_offset += old_diff_transforms.end(&()).0 - edit.old.end;
- old_diff_transforms.next(&());
+ excerpt_offset += old_diff_transforms.end().0 - edit.old.end;
+ old_diff_transforms.next();
}
old_expanded_hunks.clear();
@@ -3234,7 +3233,7 @@ impl MultiBuffer {
}
// Keep any transforms that are after the last edit.
- self.append_diff_transforms(&mut new_diff_transforms, old_diff_transforms.suffix(&()));
+ self.append_diff_transforms(&mut new_diff_transforms, old_diff_transforms.suffix());
// Ensure there's always at least one buffer content transform.
if new_diff_transforms.is_empty() {
@@ -3283,10 +3282,10 @@ impl MultiBuffer {
);
old_expanded_hunks.insert(hunk_info);
}
- if old_diff_transforms.end(&()).0 > edit.old.end {
+ if old_diff_transforms.end().0 > edit.old.end {
break;
}
- old_diff_transforms.next(&());
+ old_diff_transforms.next();
}
// Avoid querying diff hunks if there's no possibility of hunks being expanded.
@@ -3413,8 +3412,8 @@ impl MultiBuffer {
}
}
- if excerpts.end(&()) <= edit.new.end {
- excerpts.next(&());
+ if excerpts.end() <= edit.new.end {
+ excerpts.next();
} else {
break;
}
@@ -3439,9 +3438,9 @@ impl MultiBuffer {
*summary,
) {
let mut cursor = subtree.cursor::<()>(&());
- cursor.next(&());
- cursor.next(&());
- new_transforms.append(cursor.suffix(&()), &());
+ cursor.next();
+ cursor.next();
+ new_transforms.append(cursor.suffix(), &());
return;
}
}
@@ -4715,14 +4714,14 @@ impl MultiBufferSnapshot {
{
let range = range.start.to_offset(self)..range.end.to_offset(self);
let mut cursor = self.diff_transforms.cursor::<(usize, ExcerptOffset)>(&());
- cursor.seek(&range.start, Bias::Right, &());
+ cursor.seek(&range.start, Bias::Right);
let Some(first_transform) = cursor.item() else {
return D::from_text_summary(&TextSummary::default());
};
let diff_transform_start = cursor.start().0;
- let diff_transform_end = cursor.end(&()).0;
+ let diff_transform_end = cursor.end().0;
let diff_start = range.start;
let start_overshoot = diff_start - diff_transform_start;
let end_overshoot = std::cmp::min(range.end, diff_transform_end) - diff_transform_start;
@@ -4765,12 +4764,10 @@ impl MultiBufferSnapshot {
return result;
}
- cursor.next(&());
- result.add_assign(&D::from_text_summary(&cursor.summary(
- &range.end,
- Bias::Right,
- &(),
- )));
+ cursor.next();
+ result.add_assign(&D::from_text_summary(
+ &cursor.summary(&range.end, Bias::Right),
+ ));
let Some(last_transform) = cursor.item() else {
return result;
@@ -4813,9 +4810,9 @@ impl MultiBufferSnapshot {
// let mut range = range.start..range.end;
let mut summary = D::zero(&());
let mut cursor = self.excerpts.cursor::<ExcerptOffset>(&());
- cursor.seek(&range.start, Bias::Right, &());
+ cursor.seek(&range.start, Bias::Right);
if let Some(excerpt) = cursor.item() {
- let mut end_before_newline = cursor.end(&());
+ let mut end_before_newline = cursor.end();
if excerpt.has_trailing_newline {
end_before_newline -= ExcerptOffset::new(1);
}
@@ -4834,13 +4831,13 @@ impl MultiBufferSnapshot {
summary.add_assign(&D::from_text_summary(&TextSummary::from("\n")));
}
- cursor.next(&());
+ cursor.next();
}
if range.end > *cursor.start() {
summary.add_assign(
&cursor
- .summary::<_, ExcerptDimension<D>>(&range.end, Bias::Right, &())
+ .summary::<_, ExcerptDimension<D>>(&range.end, Bias::Right)
.0,
);
if let Some(excerpt) = cursor.item() {
@@ -4876,11 +4873,11 @@ impl MultiBufferSnapshot {
D: TextDimension + Ord + Sub<D, Output = D>,
{
loop {
- let transform_end_position = diff_transforms.end(&()).0.0;
+ let transform_end_position = diff_transforms.end().0.0;
let at_transform_end =
excerpt_position == transform_end_position && diff_transforms.item().is_some();
if at_transform_end && anchor.text_anchor.bias == Bias::Right {
- diff_transforms.next(&());
+ diff_transforms.next();
continue;
}
@@ -4906,7 +4903,7 @@ impl MultiBufferSnapshot {
);
position.add_assign(&position_in_hunk);
} else if at_transform_end {
- diff_transforms.next(&());
+ diff_transforms.next();
continue;
}
}
@@ -4915,7 +4912,7 @@ impl MultiBufferSnapshot {
}
_ => {
if at_transform_end && anchor.diff_base_anchor.is_some() {
- diff_transforms.next(&());
+ diff_transforms.next();
continue;
}
let overshoot = excerpt_position - diff_transforms.start().0.0;
@@ -4933,9 +4930,9 @@ impl MultiBufferSnapshot {
.cursor::<(Option<&Locator>, ExcerptOffset)>(&());
let locator = self.excerpt_locator_for_id(anchor.excerpt_id);
- cursor.seek(&Some(locator), Bias::Left, &());
+ cursor.seek(&Some(locator), Bias::Left);
if cursor.item().is_none() {
- cursor.next(&());
+ cursor.next();
}
let mut position = cursor.start().1;
@@ -4975,7 +4972,7 @@ impl MultiBufferSnapshot {
let mut diff_transforms_cursor = self
.diff_transforms
.cursor::<(ExcerptDimension<D>, OutputDimension<D>)>(&());
- diff_transforms_cursor.next(&());
+ diff_transforms_cursor.next();
let mut summaries = Vec::new();
while let Some(anchor) = anchors.peek() {
@@ -4990,9 +4987,9 @@ impl MultiBufferSnapshot {
});
let locator = self.excerpt_locator_for_id(excerpt_id);
- cursor.seek_forward(locator, Bias::Left, &());
+ cursor.seek_forward(locator, Bias::Left);
if cursor.item().is_none() {
- cursor.next(&());
+ cursor.next();
}
let excerpt_start_position = D::from_text_summary(&cursor.start().text);
@@ -5022,11 +5019,8 @@ impl MultiBufferSnapshot {
}
if position > diff_transforms_cursor.start().0.0 {
- diff_transforms_cursor.seek_forward(
- &ExcerptDimension(position),
- Bias::Left,
- &(),
- );
+ diff_transforms_cursor
+ .seek_forward(&ExcerptDimension(position), Bias::Left);
}
summaries.push(self.resolve_summary_for_anchor(
@@ -5036,11 +5030,8 @@ impl MultiBufferSnapshot {
));
}
} else {
- diff_transforms_cursor.seek_forward(
- &ExcerptDimension(excerpt_start_position),
- Bias::Left,
- &(),
- );
+ diff_transforms_cursor
+ .seek_forward(&ExcerptDimension(excerpt_start_position), Bias::Left);
let position = self.resolve_summary_for_anchor(
&Anchor::max(),
excerpt_start_position,
@@ -5099,7 +5090,7 @@ impl MultiBufferSnapshot {
{
let mut anchors = anchors.into_iter().enumerate().peekable();
let mut cursor = self.excerpts.cursor::<Option<&Locator>>(&());
- cursor.next(&());
+ cursor.next();
let mut result = Vec::new();
@@ -5108,10 +5099,10 @@ impl MultiBufferSnapshot {
// Find the location where this anchor's excerpt should be.
let old_locator = self.excerpt_locator_for_id(old_excerpt_id);
- cursor.seek_forward(&Some(old_locator), Bias::Left, &());
+ cursor.seek_forward(&Some(old_locator), Bias::Left);
if cursor.item().is_none() {
- cursor.next(&());
+ cursor.next();
}
let next_excerpt = cursor.item();
@@ -5211,13 +5202,13 @@ impl MultiBufferSnapshot {
// Find the given position in the diff transforms. Determine the corresponding
// offset in the excerpts, and whether the position is within a deleted hunk.
let mut diff_transforms = self.diff_transforms.cursor::<(usize, ExcerptOffset)>(&());
- diff_transforms.seek(&offset, Bias::Right, &());
+ diff_transforms.seek(&offset, Bias::Right);
if offset == diff_transforms.start().0 && bias == Bias::Left {
if let Some(prev_item) = diff_transforms.prev_item() {
match prev_item {
DiffTransform::DeletedHunk { .. } => {
- diff_transforms.prev(&());
+ diff_transforms.prev();
}
_ => {}
}
@@ -5260,13 +5251,13 @@ impl MultiBufferSnapshot {
let mut excerpts = self
.excerpts
.cursor::<(ExcerptOffset, Option<ExcerptId>)>(&());
- excerpts.seek(&excerpt_offset, Bias::Right, &());
+ excerpts.seek(&excerpt_offset, Bias::Right);
if excerpts.item().is_none() && excerpt_offset == excerpts.start().0 && bias == Bias::Left {
- excerpts.prev(&());
+ excerpts.prev();
}
if let Some(excerpt) = excerpts.item() {
let mut overshoot = excerpt_offset.saturating_sub(excerpts.start().0).value;
- if excerpt.has_trailing_newline && excerpt_offset == excerpts.end(&()).0 {
+ if excerpt.has_trailing_newline && excerpt_offset == excerpts.end().0 {
overshoot -= 1;
bias = Bias::Right;
}
@@ -5297,7 +5288,7 @@ impl MultiBufferSnapshot {
let excerpt_id = self.latest_excerpt_id(excerpt_id);
let locator = self.excerpt_locator_for_id(excerpt_id);
let mut cursor = self.excerpts.cursor::<Option<&Locator>>(&());
- cursor.seek(locator, Bias::Left, &());
+ cursor.seek(locator, Bias::Left);
if let Some(excerpt) = cursor.item() {
if excerpt.id == excerpt_id {
let text_anchor = excerpt.clip_anchor(text_anchor);
@@ -5351,13 +5342,13 @@ impl MultiBufferSnapshot {
let mut excerpts = self
.excerpts
.cursor::<(Option<&Locator>, ExcerptDimension<usize>)>(&());
- excerpts.seek(&Some(start_locator), Bias::Left, &());
- excerpts.prev(&());
+ excerpts.seek(&Some(start_locator), Bias::Left);
+ excerpts.prev();
let mut diff_transforms = self.diff_transforms.cursor::<DiffTransforms<usize>>(&());
- diff_transforms.seek(&excerpts.start().1, Bias::Left, &());
- if diff_transforms.end(&()).excerpt_dimension < excerpts.start().1 {
- diff_transforms.next(&());
+ diff_transforms.seek(&excerpts.start().1, Bias::Left);
+ if diff_transforms.end().excerpt_dimension < excerpts.start().1 {
+ diff_transforms.next();
}
let excerpt = excerpts.item()?;
@@ -6193,7 +6184,7 @@ impl MultiBufferSnapshot {
Locator::max_ref()
} else {
let mut cursor = self.excerpt_ids.cursor::<ExcerptId>(&());
- cursor.seek(&id, Bias::Left, &());
+ cursor.seek(&id, Bias::Left);
if let Some(entry) = cursor.item() {
if entry.id == id {
return &entry.locator;
@@ -6229,7 +6220,7 @@ impl MultiBufferSnapshot {
let mut cursor = self.excerpt_ids.cursor::<ExcerptId>(&());
for id in sorted_ids {
- if cursor.seek_forward(&id, Bias::Left, &()) {
+ if cursor.seek_forward(&id, Bias::Left) {
locators.push(cursor.item().unwrap().locator.clone());
} else {
panic!("invalid excerpt id {:?}", id);
@@ -6253,16 +6244,16 @@ impl MultiBufferSnapshot {
.excerpts
.cursor::<(Option<&Locator>, ExcerptDimension<Point>)>(&());
let locator = self.excerpt_locator_for_id(excerpt_id);
- if cursor.seek(&Some(locator), Bias::Left, &()) {
+ if cursor.seek(&Some(locator), Bias::Left) {
let start = cursor.start().1.clone();
- let end = cursor.end(&()).1;
+ let end = cursor.end().1;
let mut diff_transforms = self
.diff_transforms
.cursor::<(ExcerptDimension<Point>, OutputDimension<Point>)>(&());
- diff_transforms.seek(&start, Bias::Left, &());
+ diff_transforms.seek(&start, Bias::Left);
let overshoot = start.0 - diff_transforms.start().0.0;
let start = diff_transforms.start().1.0 + overshoot;
- diff_transforms.seek(&end, Bias::Right, &());
+ diff_transforms.seek(&end, Bias::Right);
let overshoot = end.0 - diff_transforms.start().0.0;
let end = diff_transforms.start().1.0 + overshoot;
Some(start..end)
@@ -6274,7 +6265,7 @@ impl MultiBufferSnapshot {
pub fn buffer_range_for_excerpt(&self, excerpt_id: ExcerptId) -> Option<Range<text::Anchor>> {
let mut cursor = self.excerpts.cursor::<Option<&Locator>>(&());
let locator = self.excerpt_locator_for_id(excerpt_id);
- if cursor.seek(&Some(locator), Bias::Left, &()) {
+ if cursor.seek(&Some(locator), Bias::Left) {
if let Some(excerpt) = cursor.item() {
return Some(excerpt.range.context.clone());
}
@@ -6285,7 +6276,7 @@ impl MultiBufferSnapshot {
fn excerpt(&self, excerpt_id: ExcerptId) -> Option<&Excerpt> {
let mut cursor = self.excerpts.cursor::<Option<&Locator>>(&());
let locator = self.excerpt_locator_for_id(excerpt_id);
- cursor.seek(&Some(locator), Bias::Left, &());
+ cursor.seek(&Some(locator), Bias::Left);
if let Some(excerpt) = cursor.item() {
if excerpt.id == excerpt_id {
return Some(excerpt);
@@ -6333,7 +6324,7 @@ impl MultiBufferSnapshot {
let mut cursor = self.excerpts.cursor::<ExcerptSummary>(&());
let start_locator = self.excerpt_locator_for_id(range.start.excerpt_id);
let end_locator = self.excerpt_locator_for_id(range.end.excerpt_id);
- cursor.seek(start_locator, Bias::Left, &());
+ cursor.seek(start_locator, Bias::Left);
cursor
.take_while(move |excerpt| excerpt.locator <= *end_locator)
.flat_map(move |excerpt| {
@@ -6472,11 +6463,11 @@ where
fn seek(&mut self, position: &D) {
self.cached_region.take();
self.diff_transforms
- .seek(&OutputDimension(*position), Bias::Right, &());
+ .seek(&OutputDimension(*position), Bias::Right);
if self.diff_transforms.item().is_none()
&& *position == self.diff_transforms.start().output_dimension.0
{
- self.diff_transforms.prev(&());
+ self.diff_transforms.prev();
}
let mut excerpt_position = self.diff_transforms.start().excerpt_dimension.0;
@@ -6486,20 +6477,20 @@ where
}
self.excerpts
- .seek(&ExcerptDimension(excerpt_position), Bias::Right, &());
+ .seek(&ExcerptDimension(excerpt_position), Bias::Right);
if self.excerpts.item().is_none() && excerpt_position == self.excerpts.start().0 {
- self.excerpts.prev(&());
+ self.excerpts.prev();
}
}
fn seek_forward(&mut self, position: &D) {
self.cached_region.take();
self.diff_transforms
- .seek_forward(&OutputDimension(*position), Bias::Right, &());
+ .seek_forward(&OutputDimension(*position), Bias::Right);
if self.diff_transforms.item().is_none()
&& *position == self.diff_transforms.start().output_dimension.0
{
- self.diff_transforms.prev(&());
+ self.diff_transforms.prev();
}
let overshoot = *position - self.diff_transforms.start().output_dimension.0;
@@ -6509,31 +6500,30 @@ where
}
self.excerpts
- .seek_forward(&ExcerptDimension(excerpt_position), Bias::Right, &());
+ .seek_forward(&ExcerptDimension(excerpt_position), Bias::Right);
if self.excerpts.item().is_none() && excerpt_position == self.excerpts.start().0 {
- self.excerpts.prev(&());
+ self.excerpts.prev();
}
}
fn next_excerpt(&mut self) {
- self.excerpts.next(&());
+ self.excerpts.next();
self.seek_to_start_of_current_excerpt();
}
fn prev_excerpt(&mut self) {
- self.excerpts.prev(&());
+ self.excerpts.prev();
self.seek_to_start_of_current_excerpt();
}
fn seek_to_start_of_current_excerpt(&mut self) {
self.cached_region.take();
- self.diff_transforms
- .seek(self.excerpts.start(), Bias::Left, &());
- if self.diff_transforms.end(&()).excerpt_dimension == *self.excerpts.start()
+ self.diff_transforms.seek(self.excerpts.start(), Bias::Left);
+ if self.diff_transforms.end().excerpt_dimension == *self.excerpts.start()
&& self.diff_transforms.start().excerpt_dimension < *self.excerpts.start()
&& self.diff_transforms.next_item().is_some()
{
- self.diff_transforms.next(&());
+ self.diff_transforms.next();
}
}
@@ -6541,18 +6531,18 @@ where
self.cached_region.take();
match self
.diff_transforms
- .end(&())
+ .end()
.excerpt_dimension
- .cmp(&self.excerpts.end(&()))
+ .cmp(&self.excerpts.end())
{
- cmp::Ordering::Less => self.diff_transforms.next(&()),
- cmp::Ordering::Greater => self.excerpts.next(&()),
+ cmp::Ordering::Less => self.diff_transforms.next(),
+ cmp::Ordering::Greater => self.excerpts.next(),
cmp::Ordering::Equal => {
- self.diff_transforms.next(&());
- if self.diff_transforms.end(&()).excerpt_dimension > self.excerpts.end(&())
+ self.diff_transforms.next();
+ if self.diff_transforms.end().excerpt_dimension > self.excerpts.end()
|| self.diff_transforms.item().is_none()
{
- self.excerpts.next(&());
+ self.excerpts.next();
} else if let Some(DiffTransform::DeletedHunk { hunk_info, .. }) =
self.diff_transforms.item()
{
@@ -6561,7 +6551,7 @@ where
.item()
.map_or(false, |excerpt| excerpt.id != hunk_info.excerpt_id)
{
- self.excerpts.next(&());
+ self.excerpts.next();
}
}
}
@@ -6576,14 +6566,14 @@ where
.excerpt_dimension
.cmp(self.excerpts.start())
{
- cmp::Ordering::Less => self.excerpts.prev(&()),
- cmp::Ordering::Greater => self.diff_transforms.prev(&()),
+ cmp::Ordering::Less => self.excerpts.prev(),
+ cmp::Ordering::Greater => self.diff_transforms.prev(),
cmp::Ordering::Equal => {
- self.diff_transforms.prev(&());
+ self.diff_transforms.prev();
if self.diff_transforms.start().excerpt_dimension < *self.excerpts.start()
|| self.diff_transforms.item().is_none()
{
- self.excerpts.prev(&());
+ self.excerpts.prev();
}
}
}
@@ -6603,9 +6593,9 @@ where
return true;
}
- self.diff_transforms.prev(&());
+ self.diff_transforms.prev();
let prev_transform = self.diff_transforms.item();
- self.diff_transforms.next(&());
+ self.diff_transforms.next();
prev_transform.map_or(true, |next_transform| {
matches!(next_transform, DiffTransform::BufferContent { .. })
@@ -6613,9 +6603,9 @@ where
}
fn is_at_end_of_excerpt(&mut self) -> bool {
- if self.diff_transforms.end(&()).excerpt_dimension < self.excerpts.end(&()) {
+ if self.diff_transforms.end().excerpt_dimension < self.excerpts.end() {
return false;
- } else if self.diff_transforms.end(&()).excerpt_dimension > self.excerpts.end(&())
+ } else if self.diff_transforms.end().excerpt_dimension > self.excerpts.end()
|| self.diff_transforms.item().is_none()
{
return true;
@@ -6636,7 +6626,7 @@ where
let buffer = &excerpt.buffer;
let buffer_context_start = excerpt.range.context.start.summary::<D>(buffer);
let mut buffer_start = buffer_context_start;
- let overshoot = self.diff_transforms.end(&()).excerpt_dimension.0 - self.excerpts.start().0;
+ let overshoot = self.diff_transforms.end().excerpt_dimension.0 - self.excerpts.start().0;
buffer_start.add_assign(&overshoot);
Some(buffer_start)
}
@@ -6659,7 +6649,7 @@ where
let mut buffer_end = buffer_start;
buffer_end.add_assign(&buffer_range_len);
let start = self.diff_transforms.start().output_dimension.0;
- let end = self.diff_transforms.end(&()).output_dimension.0;
+ let end = self.diff_transforms.end().output_dimension.0;
return Some(MultiBufferRegion {
buffer,
excerpt,
@@ -6693,16 +6683,16 @@ where
let mut end;
let mut buffer_end;
let has_trailing_newline;
- if self.diff_transforms.end(&()).excerpt_dimension.0 < self.excerpts.end(&()).0 {
+ if self.diff_transforms.end().excerpt_dimension.0 < self.excerpts.end().0 {
let overshoot =
- self.diff_transforms.end(&()).excerpt_dimension.0 - self.excerpts.start().0;
- end = self.diff_transforms.end(&()).output_dimension.0;
+ self.diff_transforms.end().excerpt_dimension.0 - self.excerpts.start().0;
+ end = self.diff_transforms.end().output_dimension.0;
buffer_end = buffer_context_start;
buffer_end.add_assign(&overshoot);
has_trailing_newline = false;
} else {
let overshoot =
- self.excerpts.end(&()).0 - self.diff_transforms.start().excerpt_dimension.0;
+ self.excerpts.end().0 - self.diff_transforms.start().excerpt_dimension.0;
end = self.diff_transforms.start().output_dimension.0;
end.add_assign(&overshoot);
buffer_end = excerpt.range.context.end.summary::<D>(buffer);
@@ -7086,11 +7076,11 @@ impl<'a> MultiBufferExcerpt<'a> {
/// Maps a range within the [`MultiBuffer`] to a range within the [`Buffer`]
pub fn map_range_to_buffer(&mut self, range: Range<usize>) -> Range<usize> {
self.diff_transforms
- .seek(&OutputDimension(range.start), Bias::Right, &());
+ .seek(&OutputDimension(range.start), Bias::Right);
let start = self.map_offset_to_buffer_internal(range.start);
let end = if range.end > range.start {
self.diff_transforms
- .seek_forward(&OutputDimension(range.end), Bias::Right, &());
+ .seek_forward(&OutputDimension(range.end), Bias::Right);
self.map_offset_to_buffer_internal(range.end)
} else {
start
@@ -7123,7 +7113,7 @@ impl<'a> MultiBufferExcerpt<'a> {
}
let overshoot = buffer_range.start - self.buffer_offset;
let excerpt_offset = ExcerptDimension(self.excerpt_offset.0 + overshoot);
- self.diff_transforms.seek(&excerpt_offset, Bias::Right, &());
+ self.diff_transforms.seek(&excerpt_offset, Bias::Right);
if excerpt_offset.0 < self.diff_transforms.start().excerpt_dimension.0 {
log::warn!(
"Attempting to map a range from a buffer offset that starts before the current buffer offset"
@@ -7137,7 +7127,7 @@ impl<'a> MultiBufferExcerpt<'a> {
let overshoot = buffer_range.end - self.buffer_offset;
let excerpt_offset = ExcerptDimension(self.excerpt_offset.0 + overshoot);
self.diff_transforms
- .seek_forward(&excerpt_offset, Bias::Right, &());
+ .seek_forward(&excerpt_offset, Bias::Right);
let overshoot = excerpt_offset.0 - self.diff_transforms.start().excerpt_dimension.0;
self.diff_transforms.start().output_dimension.0 + overshoot
} else {
@@ -7509,7 +7499,7 @@ impl Iterator for MultiBufferRows<'_> {
if let Some(next_region) = self.cursor.region() {
region = next_region;
} else {
- if self.point == self.cursor.diff_transforms.end(&()).output_dimension.0 {
+ if self.point == self.cursor.diff_transforms.end().output_dimension.0 {
let multibuffer_row = MultiBufferRow(self.point.row);
let last_excerpt = self
.cursor
@@ -7615,14 +7605,14 @@ impl<'a> MultiBufferChunks<'a> {
}
pub fn seek(&mut self, range: Range<usize>) {
- self.diff_transforms.seek(&range.end, Bias::Right, &());
+ self.diff_transforms.seek(&range.end, Bias::Right);
let mut excerpt_end = self.diff_transforms.start().1;
if let Some(DiffTransform::BufferContent { .. }) = self.diff_transforms.item() {
let overshoot = range.end - self.diff_transforms.start().0;
excerpt_end.value += overshoot;
}
- self.diff_transforms.seek(&range.start, Bias::Right, &());
+ self.diff_transforms.seek(&range.start, Bias::Right);
let mut excerpt_start = self.diff_transforms.start().1;
if let Some(DiffTransform::BufferContent { .. }) = self.diff_transforms.item() {
let overshoot = range.start - self.diff_transforms.start().0;
@@ -7636,7 +7626,7 @@ impl<'a> MultiBufferChunks<'a> {
fn seek_to_excerpt_offset_range(&mut self, new_range: Range<ExcerptOffset>) {
self.excerpt_offset_range = new_range.clone();
- self.excerpts.seek(&new_range.start, Bias::Right, &());
+ self.excerpts.seek(&new_range.start, Bias::Right);
if let Some(excerpt) = self.excerpts.item() {
let excerpt_start = *self.excerpts.start();
if let Some(excerpt_chunks) = self
@@ -7669,7 +7659,7 @@ impl<'a> MultiBufferChunks<'a> {
self.excerpt_offset_range.start.value += chunk.text.len();
return Some(chunk);
} else {
- self.excerpts.next(&());
+ self.excerpts.next();
let excerpt = self.excerpts.item()?;
self.excerpt_chunks = Some(excerpt.chunks_in_range(
0..(self.excerpt_offset_range.end - *self.excerpts.start()).value,
@@ -7712,12 +7702,12 @@ impl<'a> Iterator for MultiBufferChunks<'a> {
if self.range.start >= self.range.end {
return None;
}
- if self.range.start == self.diff_transforms.end(&()).0 {
- self.diff_transforms.next(&());
+ if self.range.start == self.diff_transforms.end().0 {
+ self.diff_transforms.next();
}
let diff_transform_start = self.diff_transforms.start().0;
- let diff_transform_end = self.diff_transforms.end(&()).0;
+ let diff_transform_end = self.diff_transforms.end().0;
debug_assert!(self.range.start < diff_transform_end);
let diff_transform = self.diff_transforms.item()?;
@@ -132,12 +132,12 @@ impl NotificationStore {
}
let ix = count - 1 - ix;
let mut cursor = self.notifications.cursor::<Count>(&());
- cursor.seek(&Count(ix), Bias::Right, &());
+ cursor.seek(&Count(ix), Bias::Right);
cursor.item()
}
pub fn notification_for_id(&self, id: u64) -> Option<&NotificationEntry> {
let mut cursor = self.notifications.cursor::<NotificationId>(&());
- cursor.seek(&NotificationId(id), Bias::Left, &());
+ cursor.seek(&NotificationId(id), Bias::Left);
if let Some(item) = cursor.item() {
if item.id == id {
return Some(item);
@@ -365,7 +365,7 @@ impl NotificationStore {
let mut old_range = 0..0;
for (i, (id, new_notification)) in notifications.into_iter().enumerate() {
- new_notifications.append(cursor.slice(&NotificationId(id), Bias::Left, &()), &());
+ new_notifications.append(cursor.slice(&NotificationId(id), Bias::Left), &());
if i == 0 {
old_range.start = cursor.start().1.0;
@@ -374,7 +374,7 @@ impl NotificationStore {
let old_notification = cursor.item();
if let Some(old_notification) = old_notification {
if old_notification.id == id {
- cursor.next(&());
+ cursor.next();
if let Some(new_notification) = &new_notification {
if new_notification.is_read {
@@ -403,7 +403,7 @@ impl NotificationStore {
old_range.end = cursor.start().1.0;
let new_count = new_notifications.summary().count - old_range.start;
- new_notifications.append(cursor.suffix(&()), &());
+ new_notifications.append(cursor.suffix(), &());
drop(cursor);
self.notifications = new_notifications;
@@ -1 +1 @@
-../../../LICENSE-GPL
+../../LICENSE-GPL
@@ -4279,7 +4279,7 @@ impl Repository {
for (repo_path, status) in &*statuses.entries {
changed_paths.remove(repo_path);
- if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left, &()) {
+ if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left) {
if cursor.item().is_some_and(|entry| entry.status == *status) {
continue;
}
@@ -4292,7 +4292,7 @@ impl Repository {
}
let mut cursor = prev_statuses.cursor::<PathProgress>(&());
for path in changed_paths.into_iter() {
- if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left, &()) {
+ if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
changed_path_statuses.push(Edit::Remove(PathKey(path.0)));
}
}
@@ -72,14 +72,13 @@ impl<'a> GitTraversal<'a> {
if entry.is_dir() {
let mut statuses = statuses.clone();
- statuses.seek_forward(&PathTarget::Path(repo_path.as_ref()), Bias::Left, &());
- let summary =
- statuses.summary(&PathTarget::Successor(repo_path.as_ref()), Bias::Left, &());
+ statuses.seek_forward(&PathTarget::Path(repo_path.as_ref()), Bias::Left);
+ let summary = statuses.summary(&PathTarget::Successor(repo_path.as_ref()), Bias::Left);
self.current_entry_summary = Some(summary);
} else if entry.is_file() {
// For a file entry, park the cursor on the corresponding status
- if statuses.seek_forward(&PathTarget::Path(repo_path.as_ref()), Bias::Left, &()) {
+ if statuses.seek_forward(&PathTarget::Path(repo_path.as_ref()), Bias::Left) {
// TODO: Investigate statuses.item() being None here.
self.current_entry_summary = statuses.item().map(|item| item.status.into());
} else {
@@ -1274,15 +1274,11 @@ impl LocalLspStore {
// grouped with the previous transaction in the history
// based on the transaction group interval
buffer.finalize_last_transaction();
- let transaction_id = buffer
+ buffer
.start_transaction()
.context("transaction already open")?;
- let transaction = buffer
- .get_transaction(transaction_id)
- .expect("transaction started")
- .clone();
buffer.end_transaction(cx);
- buffer.push_transaction(transaction, cx.background_executor().now());
+ let transaction_id = buffer.push_empty_transaction(cx.background_executor().now());
buffer.finalize_last_transaction();
anyhow::Ok(transaction_id)
})??;
@@ -193,6 +193,30 @@ impl SearchQuery {
}
pub fn from_proto(message: proto::SearchQuery) -> Result<Self> {
+ let files_to_include = if message.files_to_include.is_empty() {
+ message
+ .files_to_include_legacy
+ .split(',')
+ .map(str::trim)
+ .filter(|&glob_str| !glob_str.is_empty())
+ .map(|s| s.to_string())
+ .collect()
+ } else {
+ message.files_to_include
+ };
+
+ let files_to_exclude = if message.files_to_exclude.is_empty() {
+ message
+ .files_to_exclude_legacy
+ .split(',')
+ .map(str::trim)
+ .filter(|&glob_str| !glob_str.is_empty())
+ .map(|s| s.to_string())
+ .collect()
+ } else {
+ message.files_to_exclude
+ };
+
if message.regex {
Self::regex(
message.query,
@@ -200,8 +224,8 @@ impl SearchQuery {
message.case_sensitive,
message.include_ignored,
false,
- deserialize_path_matches(&message.files_to_include)?,
- deserialize_path_matches(&message.files_to_exclude)?,
+ PathMatcher::new(files_to_include)?,
+ PathMatcher::new(files_to_exclude)?,
message.match_full_paths,
None, // search opened only don't need search remote
)
@@ -211,8 +235,8 @@ impl SearchQuery {
message.whole_word,
message.case_sensitive,
message.include_ignored,
- deserialize_path_matches(&message.files_to_include)?,
- deserialize_path_matches(&message.files_to_exclude)?,
+ PathMatcher::new(files_to_include)?,
+ PathMatcher::new(files_to_exclude)?,
false,
None, // search opened only don't need search remote
)
@@ -236,15 +260,20 @@ impl SearchQuery {
}
pub fn to_proto(&self) -> proto::SearchQuery {
+ let files_to_include = self.files_to_include().sources().to_vec();
+ let files_to_exclude = self.files_to_exclude().sources().to_vec();
proto::SearchQuery {
query: self.as_str().to_string(),
regex: self.is_regex(),
whole_word: self.whole_word(),
case_sensitive: self.case_sensitive(),
include_ignored: self.include_ignored(),
- files_to_include: self.files_to_include().sources().join(","),
- files_to_exclude: self.files_to_exclude().sources().join(","),
+ files_to_include: files_to_include.clone(),
+ files_to_exclude: files_to_exclude.clone(),
match_full_paths: self.match_full_paths(),
+ // Populate legacy fields for backwards compatibility
+ files_to_include_legacy: files_to_include.join(","),
+ files_to_exclude_legacy: files_to_exclude.join(","),
}
}
@@ -520,14 +549,6 @@ impl SearchQuery {
}
}
-pub fn deserialize_path_matches(glob_set: &str) -> anyhow::Result<PathMatcher> {
- let globs = glob_set
- .split(',')
- .map(str::trim)
- .filter(|&glob_str| !glob_str.is_empty());
- Ok(PathMatcher::new(globs)?)
-}
-
#[cfg(test)]
mod tests {
use super::*;
@@ -45,12 +45,6 @@ impl SearchHistory {
}
pub fn add(&mut self, cursor: &mut SearchHistoryCursor, search_string: String) {
- if let Some(selected_ix) = cursor.selection {
- if self.history.get(selected_ix) == Some(&search_string) {
- return;
- }
- }
-
if self.insertion_behavior == QueryInsertionBehavior::ReplacePreviousIfContains {
if let Some(previously_searched) = self.history.back_mut() {
if search_string.contains(previously_searched.as_str()) {
@@ -144,6 +138,14 @@ mod tests {
);
assert_eq!(search_history.current(&cursor), Some("rustlang"));
+ // add item when it equals to current item if it's not the last one
+ search_history.add(&mut cursor, "php".to_string());
+ search_history.previous(&mut cursor);
+ assert_eq!(search_history.current(&cursor), Some("rustlang"));
+ search_history.add(&mut cursor, "rustlang".to_string());
+ assert_eq!(search_history.history.len(), 3, "Should add item");
+ assert_eq!(search_history.current(&cursor), Some("rustlang"));
+
// push enough items to test SEARCH_HISTORY_LIMIT
for i in 0..MAX_HISTORY_LEN * 2 {
search_history.add(&mut cursor, format!("item{i}"));
@@ -288,10 +288,12 @@ message SearchQuery {
bool regex = 3;
bool whole_word = 4;
bool case_sensitive = 5;
- string files_to_include = 6;
- string files_to_exclude = 7;
+ repeated string files_to_include = 10;
+ repeated string files_to_exclude = 11;
bool match_full_paths = 9;
bool include_ignored = 8;
+ string files_to_include_legacy = 6;
+ string files_to_exclude_legacy = 7;
}
message FindSearchCandidates {
@@ -41,9 +41,9 @@ impl Rope {
self.push_chunk(chunk.as_slice());
let mut chunks = rope.chunks.cursor::<()>(&());
- chunks.next(&());
- chunks.next(&());
- self.chunks.append(chunks.suffix(&()), &());
+ chunks.next();
+ chunks.next();
+ self.chunks.append(chunks.suffix(), &());
self.check_invariants();
return;
}
@@ -283,7 +283,7 @@ impl Rope {
return self.summary().len_utf16;
}
let mut cursor = self.chunks.cursor::<(usize, OffsetUtf16)>(&());
- cursor.seek(&offset, Bias::Left, &());
+ cursor.seek(&offset, Bias::Left);
let overshoot = offset - cursor.start().0;
cursor.start().1
+ cursor.item().map_or(Default::default(), |chunk| {
@@ -296,7 +296,7 @@ impl Rope {
return self.summary().len;
}
let mut cursor = self.chunks.cursor::<(OffsetUtf16, usize)>(&());
- cursor.seek(&offset, Bias::Left, &());
+ cursor.seek(&offset, Bias::Left);
let overshoot = offset - cursor.start().0;
cursor.start().1
+ cursor.item().map_or(Default::default(), |chunk| {
@@ -309,7 +309,7 @@ impl Rope {
return self.summary().lines;
}
let mut cursor = self.chunks.cursor::<(usize, Point)>(&());
- cursor.seek(&offset, Bias::Left, &());
+ cursor.seek(&offset, Bias::Left);
let overshoot = offset - cursor.start().0;
cursor.start().1
+ cursor.item().map_or(Point::zero(), |chunk| {
@@ -322,7 +322,7 @@ impl Rope {
return self.summary().lines_utf16();
}
let mut cursor = self.chunks.cursor::<(usize, PointUtf16)>(&());
- cursor.seek(&offset, Bias::Left, &());
+ cursor.seek(&offset, Bias::Left);
let overshoot = offset - cursor.start().0;
cursor.start().1
+ cursor.item().map_or(PointUtf16::zero(), |chunk| {
@@ -335,7 +335,7 @@ impl Rope {
return self.summary().lines_utf16();
}
let mut cursor = self.chunks.cursor::<(Point, PointUtf16)>(&());
- cursor.seek(&point, Bias::Left, &());
+ cursor.seek(&point, Bias::Left);
let overshoot = point - cursor.start().0;
cursor.start().1
+ cursor.item().map_or(PointUtf16::zero(), |chunk| {
@@ -348,7 +348,7 @@ impl Rope {
return self.summary().len;
}
let mut cursor = self.chunks.cursor::<(Point, usize)>(&());
- cursor.seek(&point, Bias::Left, &());
+ cursor.seek(&point, Bias::Left);
let overshoot = point - cursor.start().0;
cursor.start().1
+ cursor
@@ -369,7 +369,7 @@ impl Rope {
return self.summary().len;
}
let mut cursor = self.chunks.cursor::<(PointUtf16, usize)>(&());
- cursor.seek(&point, Bias::Left, &());
+ cursor.seek(&point, Bias::Left);
let overshoot = point - cursor.start().0;
cursor.start().1
+ cursor.item().map_or(0, |chunk| {
@@ -382,7 +382,7 @@ impl Rope {
return self.summary().lines;
}
let mut cursor = self.chunks.cursor::<(PointUtf16, Point)>(&());
- cursor.seek(&point.0, Bias::Left, &());
+ cursor.seek(&point.0, Bias::Left);
let overshoot = Unclipped(point.0 - cursor.start().0);
cursor.start().1
+ cursor.item().map_or(Point::zero(), |chunk| {
@@ -392,7 +392,7 @@ impl Rope {
pub fn clip_offset(&self, mut offset: usize, bias: Bias) -> usize {
let mut cursor = self.chunks.cursor::<usize>(&());
- cursor.seek(&offset, Bias::Left, &());
+ cursor.seek(&offset, Bias::Left);
if let Some(chunk) = cursor.item() {
let mut ix = offset - cursor.start();
while !chunk.text.is_char_boundary(ix) {
@@ -415,7 +415,7 @@ impl Rope {
pub fn clip_offset_utf16(&self, offset: OffsetUtf16, bias: Bias) -> OffsetUtf16 {
let mut cursor = self.chunks.cursor::<OffsetUtf16>(&());
- cursor.seek(&offset, Bias::Right, &());
+ cursor.seek(&offset, Bias::Right);
if let Some(chunk) = cursor.item() {
let overshoot = offset - cursor.start();
*cursor.start() + chunk.as_slice().clip_offset_utf16(overshoot, bias)
@@ -426,7 +426,7 @@ impl Rope {
pub fn clip_point(&self, point: Point, bias: Bias) -> Point {
let mut cursor = self.chunks.cursor::<Point>(&());
- cursor.seek(&point, Bias::Right, &());
+ cursor.seek(&point, Bias::Right);
if let Some(chunk) = cursor.item() {
let overshoot = point - cursor.start();
*cursor.start() + chunk.as_slice().clip_point(overshoot, bias)
@@ -437,7 +437,7 @@ impl Rope {
pub fn clip_point_utf16(&self, point: Unclipped<PointUtf16>, bias: Bias) -> PointUtf16 {
let mut cursor = self.chunks.cursor::<PointUtf16>(&());
- cursor.seek(&point.0, Bias::Right, &());
+ cursor.seek(&point.0, Bias::Right);
if let Some(chunk) = cursor.item() {
let overshoot = Unclipped(point.0 - cursor.start());
*cursor.start() + chunk.as_slice().clip_point_utf16(overshoot, bias)
@@ -450,10 +450,6 @@ impl Rope {
self.clip_point(Point::new(row, u32::MAX), Bias::Left)
.column
}
-
- pub fn ptr_eq(&self, other: &Self) -> bool {
- self.chunks.ptr_eq(&other.chunks)
- }
}
impl<'a> From<&'a str> for Rope {
@@ -514,7 +510,7 @@ pub struct Cursor<'a> {
impl<'a> Cursor<'a> {
pub fn new(rope: &'a Rope, offset: usize) -> Self {
let mut chunks = rope.chunks.cursor(&());
- chunks.seek(&offset, Bias::Right, &());
+ chunks.seek(&offset, Bias::Right);
Self {
rope,
chunks,
@@ -525,7 +521,7 @@ impl<'a> Cursor<'a> {
pub fn seek_forward(&mut self, end_offset: usize) {
debug_assert!(end_offset >= self.offset);
- self.chunks.seek_forward(&end_offset, Bias::Right, &());
+ self.chunks.seek_forward(&end_offset, Bias::Right);
self.offset = end_offset;
}
@@ -540,14 +536,14 @@ impl<'a> Cursor<'a> {
let mut slice = Rope::new();
if let Some(start_chunk) = self.chunks.item() {
let start_ix = self.offset - self.chunks.start();
- let end_ix = cmp::min(end_offset, self.chunks.end(&())) - self.chunks.start();
+ let end_ix = cmp::min(end_offset, self.chunks.end()) - self.chunks.start();
slice.push_chunk(start_chunk.slice(start_ix..end_ix));
}
- if end_offset > self.chunks.end(&()) {
- self.chunks.next(&());
+ if end_offset > self.chunks.end() {
+ self.chunks.next();
slice.append(Rope {
- chunks: self.chunks.slice(&end_offset, Bias::Right, &()),
+ chunks: self.chunks.slice(&end_offset, Bias::Right),
});
if let Some(end_chunk) = self.chunks.item() {
let end_ix = end_offset - self.chunks.start();
@@ -565,13 +561,13 @@ impl<'a> Cursor<'a> {
let mut summary = D::zero(&());
if let Some(start_chunk) = self.chunks.item() {
let start_ix = self.offset - self.chunks.start();
- let end_ix = cmp::min(end_offset, self.chunks.end(&())) - self.chunks.start();
+ let end_ix = cmp::min(end_offset, self.chunks.end()) - self.chunks.start();
summary.add_assign(&D::from_chunk(start_chunk.slice(start_ix..end_ix)));
}
- if end_offset > self.chunks.end(&()) {
- self.chunks.next(&());
- summary.add_assign(&self.chunks.summary(&end_offset, Bias::Right, &()));
+ if end_offset > self.chunks.end() {
+ self.chunks.next();
+ summary.add_assign(&self.chunks.summary(&end_offset, Bias::Right));
if let Some(end_chunk) = self.chunks.item() {
let end_ix = end_offset - self.chunks.start();
summary.add_assign(&D::from_chunk(end_chunk.slice(0..end_ix)));
@@ -603,10 +599,10 @@ impl<'a> Chunks<'a> {
pub fn new(rope: &'a Rope, range: Range<usize>, reversed: bool) -> Self {
let mut chunks = rope.chunks.cursor(&());
let offset = if reversed {
- chunks.seek(&range.end, Bias::Left, &());
+ chunks.seek(&range.end, Bias::Left);
range.end
} else {
- chunks.seek(&range.start, Bias::Right, &());
+ chunks.seek(&range.start, Bias::Right);
range.start
};
Self {
@@ -642,10 +638,10 @@ impl<'a> Chunks<'a> {
Bias::Right
};
- if offset >= self.chunks.end(&()) {
- self.chunks.seek_forward(&offset, bias, &());
+ if offset >= self.chunks.end() {
+ self.chunks.seek_forward(&offset, bias);
} else {
- self.chunks.seek(&offset, bias, &());
+ self.chunks.seek(&offset, bias);
}
self.offset = offset;
@@ -674,25 +670,25 @@ impl<'a> Chunks<'a> {
found = self.offset <= self.range.end;
} else {
self.chunks
- .search_forward(|summary| summary.text.lines.row > 0, &());
+ .search_forward(|summary| summary.text.lines.row > 0);
self.offset = *self.chunks.start();
if let Some(newline_ix) = self.peek().and_then(|chunk| chunk.find('\n')) {
self.offset += newline_ix + 1;
found = self.offset <= self.range.end;
} else {
- self.offset = self.chunks.end(&());
+ self.offset = self.chunks.end();
}
}
- if self.offset == self.chunks.end(&()) {
+ if self.offset == self.chunks.end() {
self.next();
}
}
if self.offset > self.range.end {
self.offset = cmp::min(self.offset, self.range.end);
- self.chunks.seek(&self.offset, Bias::Right, &());
+ self.chunks.seek(&self.offset, Bias::Right);
}
found
@@ -711,7 +707,7 @@ impl<'a> Chunks<'a> {
let initial_offset = self.offset;
if self.offset == *self.chunks.start() {
- self.chunks.prev(&());
+ self.chunks.prev();
}
if let Some(chunk) = self.chunks.item() {
@@ -729,14 +725,14 @@ impl<'a> Chunks<'a> {
}
self.chunks
- .search_backward(|summary| summary.text.lines.row > 0, &());
+ .search_backward(|summary| summary.text.lines.row > 0);
self.offset = *self.chunks.start();
if let Some(chunk) = self.chunks.item() {
if let Some(newline_ix) = chunk.text.rfind('\n') {
self.offset += newline_ix + 1;
if self.offset_is_valid() {
- if self.offset == self.chunks.end(&()) {
- self.chunks.next(&());
+ if self.offset == self.chunks.end() {
+ self.chunks.next();
}
return true;
@@ -746,7 +742,7 @@ impl<'a> Chunks<'a> {
if !self.offset_is_valid() || self.chunks.item().is_none() {
self.offset = self.range.start;
- self.chunks.seek(&self.offset, Bias::Right, &());
+ self.chunks.seek(&self.offset, Bias::Right);
}
self.offset < initial_offset && self.offset == 0
@@ -765,7 +761,7 @@ impl<'a> Chunks<'a> {
slice_start..slice_end
} else {
let slice_start = self.offset - chunk_start;
- let slice_end = cmp::min(self.chunks.end(&()), self.range.end) - chunk_start;
+ let slice_end = cmp::min(self.chunks.end(), self.range.end) - chunk_start;
slice_start..slice_end
};
@@ -825,12 +821,12 @@ impl<'a> Iterator for Chunks<'a> {
if self.reversed {
self.offset -= chunk.len();
if self.offset <= *self.chunks.start() {
- self.chunks.prev(&());
+ self.chunks.prev();
}
} else {
self.offset += chunk.len();
- if self.offset >= self.chunks.end(&()) {
- self.chunks.next(&());
+ if self.offset >= self.chunks.end() {
+ self.chunks.next();
}
}
@@ -848,9 +844,9 @@ impl<'a> Bytes<'a> {
pub fn new(rope: &'a Rope, range: Range<usize>, reversed: bool) -> Self {
let mut chunks = rope.chunks.cursor(&());
if reversed {
- chunks.seek(&range.end, Bias::Left, &());
+ chunks.seek(&range.end, Bias::Left);
} else {
- chunks.seek(&range.start, Bias::Right, &());
+ chunks.seek(&range.start, Bias::Right);
}
Self {
chunks,
@@ -861,7 +857,7 @@ impl<'a> Bytes<'a> {
pub fn peek(&self) -> Option<&'a [u8]> {
let chunk = self.chunks.item()?;
- if self.reversed && self.range.start >= self.chunks.end(&()) {
+ if self.reversed && self.range.start >= self.chunks.end() {
return None;
}
let chunk_start = *self.chunks.start();
@@ -881,9 +877,9 @@ impl<'a> Iterator for Bytes<'a> {
let result = self.peek();
if result.is_some() {
if self.reversed {
- self.chunks.prev(&());
+ self.chunks.prev();
} else {
- self.chunks.next(&());
+ self.chunks.next();
}
}
result
@@ -905,9 +901,9 @@ impl io::Read for Bytes<'_> {
if len == chunk.len() {
if self.reversed {
- self.chunks.prev(&());
+ self.chunks.prev();
} else {
- self.chunks.next(&());
+ self.chunks.next();
}
}
Ok(len)
@@ -2784,6 +2784,7 @@ impl KeystrokeInput {
else {
log::trace!("No keybinding to stop recording keystrokes in keystroke input");
self.close_keystrokes.take();
+ self.close_keystrokes_start.take();
return CloseKeystrokeResult::None;
};
let action_keystrokes = keybind_for_close_action.keystrokes();
@@ -2976,7 +2977,9 @@ impl KeystrokeInput {
return;
}
window.focus(&self.outer_focus_handle);
- if let Some(close_keystrokes_start) = self.close_keystrokes_start.take() {
+ if let Some(close_keystrokes_start) = self.close_keystrokes_start.take()
+ && close_keystrokes_start < self.keystrokes.len()
+ {
self.keystrokes.drain(close_keystrokes_start..);
}
self.close_keystrokes.take();
@@ -25,6 +25,7 @@ pub struct Cursor<'a, T: Item, D> {
position: D,
did_seek: bool,
at_end: bool,
+ cx: &'a <T::Summary as Summary>::Context,
}
impl<T: Item + fmt::Debug, D: fmt::Debug> fmt::Debug for Cursor<'_, T, D>
@@ -52,21 +53,22 @@ where
T: Item,
D: Dimension<'a, T::Summary>,
{
- pub fn new(tree: &'a SumTree<T>, cx: &<T::Summary as Summary>::Context) -> Self {
+ pub fn new(tree: &'a SumTree<T>, cx: &'a <T::Summary as Summary>::Context) -> Self {
Self {
tree,
stack: ArrayVec::new(),
position: D::zero(cx),
did_seek: false,
at_end: tree.is_empty(),
+ cx,
}
}
- fn reset(&mut self, cx: &<T::Summary as Summary>::Context) {
+ fn reset(&mut self) {
self.did_seek = false;
self.at_end = self.tree.is_empty();
self.stack.truncate(0);
- self.position = D::zero(cx);
+ self.position = D::zero(self.cx);
}
pub fn start(&self) -> &D {
@@ -74,10 +76,10 @@ where
}
#[track_caller]
- pub fn end(&self, cx: &<T::Summary as Summary>::Context) -> D {
+ pub fn end(&self) -> D {
if let Some(item_summary) = self.item_summary() {
let mut end = self.start().clone();
- end.add_summary(item_summary, cx);
+ end.add_summary(item_summary, self.cx);
end
} else {
self.start().clone()
@@ -202,12 +204,12 @@ where
}
#[track_caller]
- pub fn prev(&mut self, cx: &<T::Summary as Summary>::Context) {
- self.search_backward(|_| true, cx)
+ pub fn prev(&mut self) {
+ self.search_backward(|_| true)
}
#[track_caller]
- pub fn search_backward<F>(&mut self, mut filter_node: F, cx: &<T::Summary as Summary>::Context)
+ pub fn search_backward<F>(&mut self, mut filter_node: F)
where
F: FnMut(&T::Summary) -> bool,
{
@@ -217,13 +219,13 @@ where
}
if self.at_end {
- self.position = D::zero(cx);
+ self.position = D::zero(self.cx);
self.at_end = self.tree.is_empty();
if !self.tree.is_empty() {
self.stack.push(StackEntry {
tree: self.tree,
index: self.tree.0.child_summaries().len(),
- position: D::from_summary(self.tree.summary(), cx),
+ position: D::from_summary(self.tree.summary(), self.cx),
});
}
}
@@ -233,7 +235,7 @@ where
if let Some(StackEntry { position, .. }) = self.stack.iter().rev().nth(1) {
self.position = position.clone();
} else {
- self.position = D::zero(cx);
+ self.position = D::zero(self.cx);
}
let entry = self.stack.last_mut().unwrap();
@@ -247,7 +249,7 @@ where
}
for summary in &entry.tree.0.child_summaries()[..entry.index] {
- self.position.add_summary(summary, cx);
+ self.position.add_summary(summary, self.cx);
}
entry.position = self.position.clone();
@@ -257,7 +259,7 @@ where
if descending {
let tree = &child_trees[entry.index];
self.stack.push(StackEntry {
- position: D::zero(cx),
+ position: D::zero(self.cx),
tree,
index: tree.0.child_summaries().len() - 1,
})
@@ -273,12 +275,12 @@ where
}
#[track_caller]
- pub fn next(&mut self, cx: &<T::Summary as Summary>::Context) {
- self.search_forward(|_| true, cx)
+ pub fn next(&mut self) {
+ self.search_forward(|_| true)
}
#[track_caller]
- pub fn search_forward<F>(&mut self, mut filter_node: F, cx: &<T::Summary as Summary>::Context)
+ pub fn search_forward<F>(&mut self, mut filter_node: F)
where
F: FnMut(&T::Summary) -> bool,
{
@@ -289,7 +291,7 @@ where
self.stack.push(StackEntry {
tree: self.tree,
index: 0,
- position: D::zero(cx),
+ position: D::zero(self.cx),
});
descend = true;
}
@@ -316,8 +318,8 @@ where
break;
} else {
entry.index += 1;
- entry.position.add_summary(next_summary, cx);
- self.position.add_summary(next_summary, cx);
+ entry.position.add_summary(next_summary, self.cx);
+ self.position.add_summary(next_summary, self.cx);
}
}
@@ -327,8 +329,8 @@ where
if !descend {
let item_summary = &item_summaries[entry.index];
entry.index += 1;
- entry.position.add_summary(item_summary, cx);
- self.position.add_summary(item_summary, cx);
+ entry.position.add_summary(item_summary, self.cx);
+ self.position.add_summary(item_summary, self.cx);
}
loop {
@@ -337,8 +339,8 @@ where
return;
} else {
entry.index += 1;
- entry.position.add_summary(next_item_summary, cx);
- self.position.add_summary(next_item_summary, cx);
+ entry.position.add_summary(next_item_summary, self.cx);
+ self.position.add_summary(next_item_summary, self.cx);
}
} else {
break None;
@@ -380,71 +382,51 @@ where
D: Dimension<'a, T::Summary>,
{
#[track_caller]
- pub fn seek<Target>(
- &mut self,
- pos: &Target,
- bias: Bias,
- cx: &<T::Summary as Summary>::Context,
- ) -> bool
+ pub fn seek<Target>(&mut self, pos: &Target, bias: Bias) -> bool
where
Target: SeekTarget<'a, T::Summary, D>,
{
- self.reset(cx);
- self.seek_internal(pos, bias, &mut (), cx)
+ self.reset();
+ self.seek_internal(pos, bias, &mut ())
}
#[track_caller]
- pub fn seek_forward<Target>(
- &mut self,
- pos: &Target,
- bias: Bias,
- cx: &<T::Summary as Summary>::Context,
- ) -> bool
+ pub fn seek_forward<Target>(&mut self, pos: &Target, bias: Bias) -> bool
where
Target: SeekTarget<'a, T::Summary, D>,
{
- self.seek_internal(pos, bias, &mut (), cx)
+ self.seek_internal(pos, bias, &mut ())
}
/// Advances the cursor and returns traversed items as a tree.
#[track_caller]
- pub fn slice<Target>(
- &mut self,
- end: &Target,
- bias: Bias,
- cx: &<T::Summary as Summary>::Context,
- ) -> SumTree<T>
+ pub fn slice<Target>(&mut self, end: &Target, bias: Bias) -> SumTree<T>
where
Target: SeekTarget<'a, T::Summary, D>,
{
let mut slice = SliceSeekAggregate {
- tree: SumTree::new(cx),
+ tree: SumTree::new(self.cx),
leaf_items: ArrayVec::new(),
leaf_item_summaries: ArrayVec::new(),
- leaf_summary: <T::Summary as Summary>::zero(cx),
+ leaf_summary: <T::Summary as Summary>::zero(self.cx),
};
- self.seek_internal(end, bias, &mut slice, cx);
+ self.seek_internal(end, bias, &mut slice);
slice.tree
}
#[track_caller]
- pub fn suffix(&mut self, cx: &<T::Summary as Summary>::Context) -> SumTree<T> {
- self.slice(&End::new(), Bias::Right, cx)
+ pub fn suffix(&mut self) -> SumTree<T> {
+ self.slice(&End::new(), Bias::Right)
}
#[track_caller]
- pub fn summary<Target, Output>(
- &mut self,
- end: &Target,
- bias: Bias,
- cx: &<T::Summary as Summary>::Context,
- ) -> Output
+ pub fn summary<Target, Output>(&mut self, end: &Target, bias: Bias) -> Output
where
Target: SeekTarget<'a, T::Summary, D>,
Output: Dimension<'a, T::Summary>,
{
- let mut summary = SummarySeekAggregate(Output::zero(cx));
- self.seek_internal(end, bias, &mut summary, cx);
+ let mut summary = SummarySeekAggregate(Output::zero(self.cx));
+ self.seek_internal(end, bias, &mut summary);
summary.0
}
@@ -455,10 +437,9 @@ where
target: &dyn SeekTarget<'a, T::Summary, D>,
bias: Bias,
aggregate: &mut dyn SeekAggregate<'a, T>,
- cx: &<T::Summary as Summary>::Context,
) -> bool {
assert!(
- target.cmp(&self.position, cx) >= Ordering::Equal,
+ target.cmp(&self.position, self.cx) >= Ordering::Equal,
"cannot seek backward",
);
@@ -467,7 +448,7 @@ where
self.stack.push(StackEntry {
tree: self.tree,
index: 0,
- position: D::zero(cx),
+ position: D::zero(self.cx),
});
}
@@ -489,14 +470,14 @@ where
.zip(&child_summaries[entry.index..])
{
let mut child_end = self.position.clone();
- child_end.add_summary(child_summary, cx);
+ child_end.add_summary(child_summary, self.cx);
- let comparison = target.cmp(&child_end, cx);
+ let comparison = target.cmp(&child_end, self.cx);
if comparison == Ordering::Greater
|| (comparison == Ordering::Equal && bias == Bias::Right)
{
self.position = child_end;
- aggregate.push_tree(child_tree, child_summary, cx);
+ aggregate.push_tree(child_tree, child_summary, self.cx);
entry.index += 1;
entry.position = self.position.clone();
} else {
@@ -522,22 +503,22 @@ where
.zip(&item_summaries[entry.index..])
{
let mut child_end = self.position.clone();
- child_end.add_summary(item_summary, cx);
+ child_end.add_summary(item_summary, self.cx);
- let comparison = target.cmp(&child_end, cx);
+ let comparison = target.cmp(&child_end, self.cx);
if comparison == Ordering::Greater
|| (comparison == Ordering::Equal && bias == Bias::Right)
{
self.position = child_end;
- aggregate.push_item(item, item_summary, cx);
+ aggregate.push_item(item, item_summary, self.cx);
entry.index += 1;
} else {
- aggregate.end_leaf(cx);
+ aggregate.end_leaf(self.cx);
break 'outer;
}
}
- aggregate.end_leaf(cx);
+ aggregate.end_leaf(self.cx);
}
}
@@ -551,11 +532,11 @@ where
let mut end = self.position.clone();
if bias == Bias::Left {
if let Some(summary) = self.item_summary() {
- end.add_summary(summary, cx);
+ end.add_summary(summary, self.cx);
}
}
- target.cmp(&end, cx) == Ordering::Equal
+ target.cmp(&end, self.cx) == Ordering::Equal
}
}
@@ -624,21 +605,19 @@ impl<'a, T: Item> Iterator for Iter<'a, T> {
}
}
-impl<'a, T, S, D> Iterator for Cursor<'a, T, D>
+impl<'a, T: Item, D> Iterator for Cursor<'a, T, D>
where
- T: Item<Summary = S>,
- S: Summary<Context = ()>,
D: Dimension<'a, T::Summary>,
{
type Item = &'a T;
fn next(&mut self) -> Option<Self::Item> {
if !self.did_seek {
- self.next(&());
+ self.next();
}
if let Some(item) = self.item() {
- self.next(&());
+ self.next();
Some(item)
} else {
None
@@ -651,7 +630,7 @@ pub struct FilterCursor<'a, F, T: Item, D> {
filter_node: F,
}
-impl<'a, F, T, D> FilterCursor<'a, F, T, D>
+impl<'a, F, T: Item, D> FilterCursor<'a, F, T, D>
where
F: FnMut(&T::Summary) -> bool,
T: Item,
@@ -659,7 +638,7 @@ where
{
pub fn new(
tree: &'a SumTree<T>,
- cx: &<T::Summary as Summary>::Context,
+ cx: &'a <T::Summary as Summary>::Context,
filter_node: F,
) -> Self {
let cursor = tree.cursor::<D>(cx);
@@ -673,8 +652,8 @@ where
self.cursor.start()
}
- pub fn end(&self, cx: &<T::Summary as Summary>::Context) -> D {
- self.cursor.end(cx)
+ pub fn end(&self) -> D {
+ self.cursor.end()
}
pub fn item(&self) -> Option<&'a T> {
@@ -685,31 +664,29 @@ where
self.cursor.item_summary()
}
- pub fn next(&mut self, cx: &<T::Summary as Summary>::Context) {
- self.cursor.search_forward(&mut self.filter_node, cx);
+ pub fn next(&mut self) {
+ self.cursor.search_forward(&mut self.filter_node);
}
- pub fn prev(&mut self, cx: &<T::Summary as Summary>::Context) {
- self.cursor.search_backward(&mut self.filter_node, cx);
+ pub fn prev(&mut self) {
+ self.cursor.search_backward(&mut self.filter_node);
}
}
-impl<'a, F, T, S, U> Iterator for FilterCursor<'a, F, T, U>
+impl<'a, F, T: Item, U> Iterator for FilterCursor<'a, F, T, U>
where
F: FnMut(&T::Summary) -> bool,
- T: Item<Summary = S>,
- S: Summary<Context = ()>, //Context for the summary must be unit type, as .next() doesn't take arguments
U: Dimension<'a, T::Summary>,
{
type Item = &'a T;
fn next(&mut self) -> Option<Self::Item> {
if !self.cursor.did_seek {
- self.next(&());
+ self.next();
}
if let Some(item) = self.item() {
- self.cursor.search_forward(&mut self.filter_node, &());
+ self.cursor.search_forward(&mut self.filter_node);
Some(item)
} else {
None
@@ -795,3 +772,23 @@ where
self.0.add_summary(summary, cx);
}
}
+
+struct End<D>(PhantomData<D>);
+
+impl<D> End<D> {
+ fn new() -> Self {
+ Self(PhantomData)
+ }
+}
+
+impl<'a, S: Summary, D: Dimension<'a, S>> SeekTarget<'a, S, D> for End<D> {
+ fn cmp(&self, _: &D, _: &S::Context) -> Ordering {
+ Ordering::Greater
+ }
+}
+
+impl<D> fmt::Debug for End<D> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_tuple("End").finish()
+ }
+}
@@ -38,7 +38,6 @@ pub trait Summary: Clone {
type Context;
fn zero(cx: &Self::Context) -> Self;
-
fn add_summary(&mut self, summary: &Self, cx: &Self::Context);
}
@@ -138,26 +137,6 @@ where
}
}
-struct End<D>(PhantomData<D>);
-
-impl<D> End<D> {
- fn new() -> Self {
- Self(PhantomData)
- }
-}
-
-impl<'a, S: Summary, D: Dimension<'a, S>> SeekTarget<'a, S, D> for End<D> {
- fn cmp(&self, _: &D, _: &S::Context) -> Ordering {
- Ordering::Greater
- }
-}
-
-impl<D> fmt::Debug for End<D> {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- f.debug_tuple("End").finish()
- }
-}
-
/// Bias is used to settle ambiguities when determining positions in an ordered sequence.
///
/// The primary use case is for text, where Bias influences
@@ -372,10 +351,10 @@ impl<T: Item> SumTree<T> {
pub fn items(&self, cx: &<T::Summary as Summary>::Context) -> Vec<T> {
let mut items = Vec::new();
let mut cursor = self.cursor::<()>(cx);
- cursor.next(cx);
+ cursor.next();
while let Some(item) = cursor.item() {
items.push(item.clone());
- cursor.next(cx);
+ cursor.next();
}
items
}
@@ -384,7 +363,7 @@ impl<T: Item> SumTree<T> {
Iter::new(self)
}
- pub fn cursor<'a, S>(&'a self, cx: &<T::Summary as Summary>::Context) -> Cursor<'a, T, S>
+ pub fn cursor<'a, S>(&'a self, cx: &'a <T::Summary as Summary>::Context) -> Cursor<'a, T, S>
where
S: Dimension<'a, T::Summary>,
{
@@ -395,7 +374,7 @@ impl<T: Item> SumTree<T> {
/// that is returned cannot be used with Rust's iterators.
pub fn filter<'a, F, U>(
&'a self,
- cx: &<T::Summary as Summary>::Context,
+ cx: &'a <T::Summary as Summary>::Context,
filter_node: F,
) -> FilterCursor<'a, F, T, U>
where
@@ -525,10 +504,6 @@ impl<T: Item> SumTree<T> {
}
}
- pub fn ptr_eq(&self, other: &Self) -> bool {
- Arc::ptr_eq(&self.0, &other.0)
- }
-
fn push_tree_recursive(
&mut self,
other: SumTree<T>,
@@ -686,11 +661,6 @@ impl<T: Item> SumTree<T> {
} => child_trees.last().unwrap().rightmost_leaf(),
}
}
-
- #[cfg(debug_assertions)]
- pub fn _debug_entries(&self) -> Vec<&T> {
- self.iter().collect::<Vec<_>>()
- }
}
impl<T: Item + PartialEq> PartialEq for SumTree<T> {
@@ -710,15 +680,15 @@ impl<T: KeyedItem> SumTree<T> {
let mut replaced = None;
*self = {
let mut cursor = self.cursor::<T::Key>(cx);
- let mut new_tree = cursor.slice(&item.key(), Bias::Left, cx);
+ let mut new_tree = cursor.slice(&item.key(), Bias::Left);
if let Some(cursor_item) = cursor.item() {
if cursor_item.key() == item.key() {
replaced = Some(cursor_item.clone());
- cursor.next(cx);
+ cursor.next();
}
}
new_tree.push(item, cx);
- new_tree.append(cursor.suffix(cx), cx);
+ new_tree.append(cursor.suffix(), cx);
new_tree
};
replaced
@@ -728,14 +698,14 @@ impl<T: KeyedItem> SumTree<T> {
let mut removed = None;
*self = {
let mut cursor = self.cursor::<T::Key>(cx);
- let mut new_tree = cursor.slice(key, Bias::Left, cx);
+ let mut new_tree = cursor.slice(key, Bias::Left);
if let Some(item) = cursor.item() {
if item.key() == *key {
removed = Some(item.clone());
- cursor.next(cx);
+ cursor.next();
}
}
- new_tree.append(cursor.suffix(cx), cx);
+ new_tree.append(cursor.suffix(), cx);
new_tree
};
removed
@@ -758,7 +728,7 @@ impl<T: KeyedItem> SumTree<T> {
let mut new_tree = SumTree::new(cx);
let mut buffered_items = Vec::new();
- cursor.seek(&T::Key::zero(cx), Bias::Left, cx);
+ cursor.seek(&T::Key::zero(cx), Bias::Left);
for edit in edits {
let new_key = edit.key();
let mut old_item = cursor.item();
@@ -768,7 +738,7 @@ impl<T: KeyedItem> SumTree<T> {
.map_or(false, |old_item| old_item.key() < new_key)
{
new_tree.extend(buffered_items.drain(..), cx);
- let slice = cursor.slice(&new_key, Bias::Left, cx);
+ let slice = cursor.slice(&new_key, Bias::Left);
new_tree.append(slice, cx);
old_item = cursor.item();
}
@@ -776,7 +746,7 @@ impl<T: KeyedItem> SumTree<T> {
if let Some(old_item) = old_item {
if old_item.key() == new_key {
removed.push(old_item.clone());
- cursor.next(cx);
+ cursor.next();
}
}
@@ -789,70 +759,25 @@ impl<T: KeyedItem> SumTree<T> {
}
new_tree.extend(buffered_items, cx);
- new_tree.append(cursor.suffix(cx), cx);
+ new_tree.append(cursor.suffix(), cx);
new_tree
};
removed
}
- pub fn get(&self, key: &T::Key, cx: &<T::Summary as Summary>::Context) -> Option<&T> {
+ pub fn get<'a>(
+ &'a self,
+ key: &T::Key,
+ cx: &'a <T::Summary as Summary>::Context,
+ ) -> Option<&'a T> {
let mut cursor = self.cursor::<T::Key>(cx);
- if cursor.seek(key, Bias::Left, cx) {
+ if cursor.seek(key, Bias::Left) {
cursor.item()
} else {
None
}
}
-
- #[inline]
- pub fn contains(&self, key: &T::Key, cx: &<T::Summary as Summary>::Context) -> bool {
- self.get(key, cx).is_some()
- }
-
- pub fn update<F, R>(
- &mut self,
- key: &T::Key,
- cx: &<T::Summary as Summary>::Context,
- f: F,
- ) -> Option<R>
- where
- F: FnOnce(&mut T) -> R,
- {
- let mut cursor = self.cursor::<T::Key>(cx);
- let mut new_tree = cursor.slice(key, Bias::Left, cx);
- let mut result = None;
- if Ord::cmp(key, &cursor.end(cx)) == Ordering::Equal {
- let mut updated = cursor.item().unwrap().clone();
- result = Some(f(&mut updated));
- new_tree.push(updated, cx);
- cursor.next(cx);
- }
- new_tree.append(cursor.suffix(cx), cx);
- drop(cursor);
- *self = new_tree;
- result
- }
-
- pub fn retain<F: FnMut(&T) -> bool>(
- &mut self,
- cx: &<T::Summary as Summary>::Context,
- mut predicate: F,
- ) {
- let mut new_map = SumTree::new(cx);
-
- let mut cursor = self.cursor::<T::Key>(cx);
- cursor.next(cx);
- while let Some(item) = cursor.item() {
- if predicate(&item) {
- new_map.push(item.clone(), cx);
- }
- cursor.next(cx);
- }
- drop(cursor);
-
- *self = new_map;
- }
}
impl<T, S> Default for SumTree<T>
@@ -1061,14 +986,14 @@ mod tests {
tree = {
let mut cursor = tree.cursor::<Count>(&());
- let mut new_tree = cursor.slice(&Count(splice_start), Bias::Right, &());
+ let mut new_tree = cursor.slice(&Count(splice_start), Bias::Right);
if rng.r#gen() {
new_tree.extend(new_items, &());
} else {
new_tree.par_extend(new_items, &());
}
- cursor.seek(&Count(splice_end), Bias::Right, &());
- new_tree.append(cursor.slice(&tree_end, Bias::Right, &()), &());
+ cursor.seek(&Count(splice_end), Bias::Right);
+ new_tree.append(cursor.slice(&tree_end, Bias::Right), &());
new_tree
};
@@ -1090,10 +1015,10 @@ mod tests {
.collect::<Vec<_>>();
let mut item_ix = if rng.r#gen() {
- filter_cursor.next(&());
+ filter_cursor.next();
0
} else {
- filter_cursor.prev(&());
+ filter_cursor.prev();
expected_filtered_items.len().saturating_sub(1)
};
while item_ix < expected_filtered_items.len() {
@@ -1103,19 +1028,19 @@ mod tests {
assert_eq!(actual_item, &reference_item);
assert_eq!(filter_cursor.start().0, reference_index);
log::info!("next");
- filter_cursor.next(&());
+ filter_cursor.next();
item_ix += 1;
while item_ix > 0 && rng.gen_bool(0.2) {
log::info!("prev");
- filter_cursor.prev(&());
+ filter_cursor.prev();
item_ix -= 1;
if item_ix == 0 && rng.gen_bool(0.2) {
- filter_cursor.prev(&());
+ filter_cursor.prev();
assert_eq!(filter_cursor.item(), None);
assert_eq!(filter_cursor.start().0, 0);
- filter_cursor.next(&());
+ filter_cursor.next();
}
}
}
@@ -1124,9 +1049,9 @@ mod tests {
let mut before_start = false;
let mut cursor = tree.cursor::<Count>(&());
let start_pos = rng.gen_range(0..=reference_items.len());
- cursor.seek(&Count(start_pos), Bias::Right, &());
+ cursor.seek(&Count(start_pos), Bias::Right);
let mut pos = rng.gen_range(start_pos..=reference_items.len());
- cursor.seek_forward(&Count(pos), Bias::Right, &());
+ cursor.seek_forward(&Count(pos), Bias::Right);
for i in 0..10 {
assert_eq!(cursor.start().0, pos);
@@ -1152,13 +1077,13 @@ mod tests {
}
if i < 5 {
- cursor.next(&());
+ cursor.next();
if pos < reference_items.len() {
pos += 1;
before_start = false;
}
} else {
- cursor.prev(&());
+ cursor.prev();
if pos == 0 {
before_start = true;
}
@@ -1174,11 +1099,11 @@ mod tests {
let end_bias = if rng.r#gen() { Bias::Left } else { Bias::Right };
let mut cursor = tree.cursor::<Count>(&());
- cursor.seek(&Count(start), start_bias, &());
- let slice = cursor.slice(&Count(end), end_bias, &());
+ cursor.seek(&Count(start), start_bias);
+ let slice = cursor.slice(&Count(end), end_bias);
- cursor.seek(&Count(start), start_bias, &());
- let summary = cursor.summary::<_, Sum>(&Count(end), end_bias, &());
+ cursor.seek(&Count(start), start_bias);
+ let summary = cursor.summary::<_, Sum>(&Count(end), end_bias);
assert_eq!(summary.0, slice.summary().sum);
}
@@ -1191,19 +1116,19 @@ mod tests {
let tree = SumTree::<u8>::default();
let mut cursor = tree.cursor::<IntegersSummary>(&());
assert_eq!(
- cursor.slice(&Count(0), Bias::Right, &()).items(&()),
+ cursor.slice(&Count(0), Bias::Right).items(&()),
Vec::<u8>::new()
);
assert_eq!(cursor.item(), None);
assert_eq!(cursor.prev_item(), None);
assert_eq!(cursor.next_item(), None);
assert_eq!(cursor.start().sum, 0);
- cursor.prev(&());
+ cursor.prev();
assert_eq!(cursor.item(), None);
assert_eq!(cursor.prev_item(), None);
assert_eq!(cursor.next_item(), None);
assert_eq!(cursor.start().sum, 0);
- cursor.next(&());
+ cursor.next();
assert_eq!(cursor.item(), None);
assert_eq!(cursor.prev_item(), None);
assert_eq!(cursor.next_item(), None);
@@ -1214,7 +1139,7 @@ mod tests {
tree.extend(vec![1], &());
let mut cursor = tree.cursor::<IntegersSummary>(&());
assert_eq!(
- cursor.slice(&Count(0), Bias::Right, &()).items(&()),
+ cursor.slice(&Count(0), Bias::Right).items(&()),
Vec::<u8>::new()
);
assert_eq!(cursor.item(), Some(&1));
@@ -1222,29 +1147,29 @@ mod tests {
assert_eq!(cursor.next_item(), None);
assert_eq!(cursor.start().sum, 0);
- cursor.next(&());
+ cursor.next();
assert_eq!(cursor.item(), None);
assert_eq!(cursor.prev_item(), Some(&1));
assert_eq!(cursor.next_item(), None);
assert_eq!(cursor.start().sum, 1);
- cursor.prev(&());
+ cursor.prev();
assert_eq!(cursor.item(), Some(&1));
assert_eq!(cursor.prev_item(), None);
assert_eq!(cursor.next_item(), None);
assert_eq!(cursor.start().sum, 0);
let mut cursor = tree.cursor::<IntegersSummary>(&());
- assert_eq!(cursor.slice(&Count(1), Bias::Right, &()).items(&()), [1]);
+ assert_eq!(cursor.slice(&Count(1), Bias::Right).items(&()), [1]);
assert_eq!(cursor.item(), None);
assert_eq!(cursor.prev_item(), Some(&1));
assert_eq!(cursor.next_item(), None);
assert_eq!(cursor.start().sum, 1);
- cursor.seek(&Count(0), Bias::Right, &());
+ cursor.seek(&Count(0), Bias::Right);
assert_eq!(
cursor
- .slice(&tree.extent::<Count>(&()), Bias::Right, &())
+ .slice(&tree.extent::<Count>(&()), Bias::Right)
.items(&()),
[1]
);
@@ -1258,80 +1183,80 @@ mod tests {
tree.extend(vec![1, 2, 3, 4, 5, 6], &());
let mut cursor = tree.cursor::<IntegersSummary>(&());
- assert_eq!(cursor.slice(&Count(2), Bias::Right, &()).items(&()), [1, 2]);
+ assert_eq!(cursor.slice(&Count(2), Bias::Right).items(&()), [1, 2]);
assert_eq!(cursor.item(), Some(&3));
assert_eq!(cursor.prev_item(), Some(&2));
assert_eq!(cursor.next_item(), Some(&4));
assert_eq!(cursor.start().sum, 3);
- cursor.next(&());
+ cursor.next();
assert_eq!(cursor.item(), Some(&4));
assert_eq!(cursor.prev_item(), Some(&3));
assert_eq!(cursor.next_item(), Some(&5));
assert_eq!(cursor.start().sum, 6);
- cursor.next(&());
+ cursor.next();
assert_eq!(cursor.item(), Some(&5));
assert_eq!(cursor.prev_item(), Some(&4));
assert_eq!(cursor.next_item(), Some(&6));
assert_eq!(cursor.start().sum, 10);
- cursor.next(&());
+ cursor.next();
assert_eq!(cursor.item(), Some(&6));
assert_eq!(cursor.prev_item(), Some(&5));
assert_eq!(cursor.next_item(), None);
assert_eq!(cursor.start().sum, 15);
- cursor.next(&());
- cursor.next(&());
+ cursor.next();
+ cursor.next();
assert_eq!(cursor.item(), None);
assert_eq!(cursor.prev_item(), Some(&6));
assert_eq!(cursor.next_item(), None);
assert_eq!(cursor.start().sum, 21);
- cursor.prev(&());
+ cursor.prev();
assert_eq!(cursor.item(), Some(&6));
assert_eq!(cursor.prev_item(), Some(&5));
assert_eq!(cursor.next_item(), None);
assert_eq!(cursor.start().sum, 15);
- cursor.prev(&());
+ cursor.prev();
assert_eq!(cursor.item(), Some(&5));
assert_eq!(cursor.prev_item(), Some(&4));
assert_eq!(cursor.next_item(), Some(&6));
assert_eq!(cursor.start().sum, 10);
- cursor.prev(&());
+ cursor.prev();
assert_eq!(cursor.item(), Some(&4));
assert_eq!(cursor.prev_item(), Some(&3));
assert_eq!(cursor.next_item(), Some(&5));
assert_eq!(cursor.start().sum, 6);
- cursor.prev(&());
+ cursor.prev();
assert_eq!(cursor.item(), Some(&3));
assert_eq!(cursor.prev_item(), Some(&2));
assert_eq!(cursor.next_item(), Some(&4));
assert_eq!(cursor.start().sum, 3);
- cursor.prev(&());
+ cursor.prev();
assert_eq!(cursor.item(), Some(&2));
assert_eq!(cursor.prev_item(), Some(&1));
assert_eq!(cursor.next_item(), Some(&3));
assert_eq!(cursor.start().sum, 1);
- cursor.prev(&());
+ cursor.prev();
assert_eq!(cursor.item(), Some(&1));
assert_eq!(cursor.prev_item(), None);
assert_eq!(cursor.next_item(), Some(&2));
assert_eq!(cursor.start().sum, 0);
- cursor.prev(&());
+ cursor.prev();
assert_eq!(cursor.item(), None);
assert_eq!(cursor.prev_item(), None);
assert_eq!(cursor.next_item(), Some(&1));
assert_eq!(cursor.start().sum, 0);
- cursor.next(&());
+ cursor.next();
assert_eq!(cursor.item(), Some(&1));
assert_eq!(cursor.prev_item(), None);
assert_eq!(cursor.next_item(), Some(&2));
@@ -1340,7 +1265,7 @@ mod tests {
let mut cursor = tree.cursor::<IntegersSummary>(&());
assert_eq!(
cursor
- .slice(&tree.extent::<Count>(&()), Bias::Right, &())
+ .slice(&tree.extent::<Count>(&()), Bias::Right)
.items(&()),
tree.items(&())
);
@@ -1349,10 +1274,10 @@ mod tests {
assert_eq!(cursor.next_item(), None);
assert_eq!(cursor.start().sum, 21);
- cursor.seek(&Count(3), Bias::Right, &());
+ cursor.seek(&Count(3), Bias::Right);
assert_eq!(
cursor
- .slice(&tree.extent::<Count>(&()), Bias::Right, &())
+ .slice(&tree.extent::<Count>(&()), Bias::Right)
.items(&()),
[4, 5, 6]
);
@@ -1362,25 +1287,16 @@ mod tests {
assert_eq!(cursor.start().sum, 21);
// Seeking can bias left or right
- cursor.seek(&Count(1), Bias::Left, &());
+ cursor.seek(&Count(1), Bias::Left);
assert_eq!(cursor.item(), Some(&1));
- cursor.seek(&Count(1), Bias::Right, &());
+ cursor.seek(&Count(1), Bias::Right);
assert_eq!(cursor.item(), Some(&2));
// Slicing without resetting starts from where the cursor is parked at.
- cursor.seek(&Count(1), Bias::Right, &());
- assert_eq!(
- cursor.slice(&Count(3), Bias::Right, &()).items(&()),
- vec![2, 3]
- );
- assert_eq!(
- cursor.slice(&Count(6), Bias::Left, &()).items(&()),
- vec![4, 5]
- );
- assert_eq!(
- cursor.slice(&Count(6), Bias::Right, &()).items(&()),
- vec![6]
- );
+ cursor.seek(&Count(1), Bias::Right);
+ assert_eq!(cursor.slice(&Count(3), Bias::Right).items(&()), vec![2, 3]);
+ assert_eq!(cursor.slice(&Count(6), Bias::Left).items(&()), vec![4, 5]);
+ assert_eq!(cursor.slice(&Count(6), Bias::Right).items(&()), vec![6]);
}
#[test]
@@ -54,7 +54,7 @@ impl<K: Clone + Ord, V: Clone> TreeMap<K, V> {
pub fn get(&self, key: &K) -> Option<&V> {
let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>(&());
- cursor.seek(&MapKeyRef(Some(key)), Bias::Left, &());
+ cursor.seek(&MapKeyRef(Some(key)), Bias::Left);
if let Some(item) = cursor.item() {
if Some(key) == item.key().0.as_ref() {
Some(&item.value)
@@ -71,10 +71,10 @@ impl<K: Clone + Ord, V: Clone> TreeMap<K, V> {
}
pub fn extend(&mut self, iter: impl IntoIterator<Item = (K, V)>) {
- let mut edits = Vec::new();
- for (key, value) in iter {
- edits.push(Edit::Insert(MapEntry { key, value }));
- }
+ let edits: Vec<_> = iter
+ .into_iter()
+ .map(|(key, value)| Edit::Insert(MapEntry { key, value }))
+ .collect();
self.0.edit(edits, &());
}
@@ -86,12 +86,12 @@ impl<K: Clone + Ord, V: Clone> TreeMap<K, V> {
let mut removed = None;
let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>(&());
let key = MapKeyRef(Some(key));
- let mut new_tree = cursor.slice(&key, Bias::Left, &());
- if key.cmp(&cursor.end(&()), &()) == Ordering::Equal {
+ let mut new_tree = cursor.slice(&key, Bias::Left);
+ if key.cmp(&cursor.end(), &()) == Ordering::Equal {
removed = Some(cursor.item().unwrap().value.clone());
- cursor.next(&());
+ cursor.next();
}
- new_tree.append(cursor.suffix(&()), &());
+ new_tree.append(cursor.suffix(), &());
drop(cursor);
self.0 = new_tree;
removed
@@ -101,9 +101,9 @@ impl<K: Clone + Ord, V: Clone> TreeMap<K, V> {
let start = MapSeekTargetAdaptor(start);
let end = MapSeekTargetAdaptor(end);
let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>(&());
- let mut new_tree = cursor.slice(&start, Bias::Left, &());
- cursor.seek(&end, Bias::Left, &());
- new_tree.append(cursor.suffix(&()), &());
+ let mut new_tree = cursor.slice(&start, Bias::Left);
+ cursor.seek(&end, Bias::Left);
+ new_tree.append(cursor.suffix(), &());
drop(cursor);
self.0 = new_tree;
}
@@ -112,15 +112,15 @@ impl<K: Clone + Ord, V: Clone> TreeMap<K, V> {
pub fn closest(&self, key: &K) -> Option<(&K, &V)> {
let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>(&());
let key = MapKeyRef(Some(key));
- cursor.seek(&key, Bias::Right, &());
- cursor.prev(&());
+ cursor.seek(&key, Bias::Right);
+ cursor.prev();
cursor.item().map(|item| (&item.key, &item.value))
}
pub fn iter_from<'a>(&'a self, from: &K) -> impl Iterator<Item = (&'a K, &'a V)> + 'a {
let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>(&());
let from_key = MapKeyRef(Some(from));
- cursor.seek(&from_key, Bias::Left, &());
+ cursor.seek(&from_key, Bias::Left);
cursor.map(|map_entry| (&map_entry.key, &map_entry.value))
}
@@ -131,15 +131,15 @@ impl<K: Clone + Ord, V: Clone> TreeMap<K, V> {
{
let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>(&());
let key = MapKeyRef(Some(key));
- let mut new_tree = cursor.slice(&key, Bias::Left, &());
+ let mut new_tree = cursor.slice(&key, Bias::Left);
let mut result = None;
- if key.cmp(&cursor.end(&()), &()) == Ordering::Equal {
+ if key.cmp(&cursor.end(), &()) == Ordering::Equal {
let mut updated = cursor.item().unwrap().clone();
result = Some(f(&mut updated.value));
new_tree.push(updated, &());
- cursor.next(&());
+ cursor.next();
}
- new_tree.append(cursor.suffix(&()), &());
+ new_tree.append(cursor.suffix(), &());
drop(cursor);
self.0 = new_tree;
result
@@ -149,12 +149,12 @@ impl<K: Clone + Ord, V: Clone> TreeMap<K, V> {
let mut new_map = SumTree::<MapEntry<K, V>>::default();
let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>(&());
- cursor.next(&());
+ cursor.next();
while let Some(item) = cursor.item() {
if predicate(&item.key, &item.value) {
new_map.push(item.clone(), &());
}
- cursor.next(&());
+ cursor.next();
}
drop(cursor);
@@ -101,7 +101,7 @@ impl Anchor {
} else {
let fragment_id = buffer.fragment_id_for_anchor(self);
let mut fragment_cursor = buffer.fragments.cursor::<(Option<&Locator>, usize)>(&None);
- fragment_cursor.seek(&Some(fragment_id), Bias::Left, &None);
+ fragment_cursor.seek(&Some(fragment_id), Bias::Left);
fragment_cursor
.item()
.map_or(false, |fragment| fragment.visible)
@@ -320,7 +320,39 @@ impl History {
last_edit_at: now,
suppress_grouping: false,
});
- self.redo_stack.clear();
+ }
+
+ /// Differs from `push_transaction` in that it does not clear the redo
+ /// stack. Intended to be used to create a parent transaction to merge
+ /// potential child transactions into.
+ ///
+ /// The caller is responsible for removing it from the undo history using
+ /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
+ /// are merged into this transaction, the caller is responsible for ensuring
+ /// the redo stack is cleared. The easiest way to ensure the redo stack is
+ /// cleared is to create transactions with the usual `start_transaction` and
+ /// `end_transaction` methods and merging the resulting transactions into
+ /// the transaction created by this method
+ fn push_empty_transaction(
+ &mut self,
+ start: clock::Global,
+ now: Instant,
+ clock: &mut clock::Lamport,
+ ) -> TransactionId {
+ assert_eq!(self.transaction_depth, 0);
+ let id = clock.tick();
+ let transaction = Transaction {
+ id,
+ start,
+ edit_ids: Vec::new(),
+ };
+ self.undo_stack.push(HistoryEntry {
+ transaction,
+ first_edit_at: now,
+ last_edit_at: now,
+ suppress_grouping: false,
+ });
+ id
}
fn push_undo(&mut self, op_id: clock::Lamport) {
@@ -824,14 +856,13 @@ impl Buffer {
let mut new_ropes =
RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
let mut old_fragments = self.fragments.cursor::<FragmentTextSummary>(&None);
- let mut new_fragments =
- old_fragments.slice(&edits.peek().unwrap().0.start, Bias::Right, &None);
+ let mut new_fragments = old_fragments.slice(&edits.peek().unwrap().0.start, Bias::Right);
new_ropes.append(new_fragments.summary().text);
let mut fragment_start = old_fragments.start().visible;
for (range, new_text) in edits {
let new_text = LineEnding::normalize_arc(new_text.into());
- let fragment_end = old_fragments.end(&None).visible;
+ let fragment_end = old_fragments.end().visible;
// If the current fragment ends before this range, then jump ahead to the first fragment
// that extends past the start of this range, reusing any intervening fragments.
@@ -847,10 +878,10 @@ impl Buffer {
new_ropes.push_fragment(&suffix, suffix.visible);
new_fragments.push(suffix, &None);
}
- old_fragments.next(&None);
+ old_fragments.next();
}
- let slice = old_fragments.slice(&range.start, Bias::Right, &None);
+ let slice = old_fragments.slice(&range.start, Bias::Right);
new_ropes.append(slice.summary().text);
new_fragments.append(slice, &None);
fragment_start = old_fragments.start().visible;
@@ -903,7 +934,7 @@ impl Buffer {
// portions as deleted.
while fragment_start < range.end {
let fragment = old_fragments.item().unwrap();
- let fragment_end = old_fragments.end(&None).visible;
+ let fragment_end = old_fragments.end().visible;
let mut intersection = fragment.clone();
let intersection_end = cmp::min(range.end, fragment_end);
if fragment.visible {
@@ -930,7 +961,7 @@ impl Buffer {
fragment_start = intersection_end;
}
if fragment_end <= range.end {
- old_fragments.next(&None);
+ old_fragments.next();
}
}
@@ -942,7 +973,7 @@ impl Buffer {
// If the current fragment has been partially consumed, then consume the rest of it
// and advance to the next fragment before slicing.
if fragment_start > old_fragments.start().visible {
- let fragment_end = old_fragments.end(&None).visible;
+ let fragment_end = old_fragments.end().visible;
if fragment_end > fragment_start {
let mut suffix = old_fragments.item().unwrap().clone();
suffix.len = fragment_end - fragment_start;
@@ -951,10 +982,10 @@ impl Buffer {
new_ropes.push_fragment(&suffix, suffix.visible);
new_fragments.push(suffix, &None);
}
- old_fragments.next(&None);
+ old_fragments.next();
}
- let suffix = old_fragments.suffix(&None);
+ let suffix = old_fragments.suffix();
new_ropes.append(suffix.summary().text);
new_fragments.append(suffix, &None);
let (visible_text, deleted_text) = new_ropes.finish();
@@ -1041,16 +1072,13 @@ impl Buffer {
let mut new_ropes =
RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
let mut old_fragments = self.fragments.cursor::<(VersionedFullOffset, usize)>(&cx);
- let mut new_fragments = old_fragments.slice(
- &VersionedFullOffset::Offset(ranges[0].start),
- Bias::Left,
- &cx,
- );
+ let mut new_fragments =
+ old_fragments.slice(&VersionedFullOffset::Offset(ranges[0].start), Bias::Left);
new_ropes.append(new_fragments.summary().text);
let mut fragment_start = old_fragments.start().0.full_offset();
for (range, new_text) in edits {
- let fragment_end = old_fragments.end(&cx).0.full_offset();
+ let fragment_end = old_fragments.end().0.full_offset();
// If the current fragment ends before this range, then jump ahead to the first fragment
// that extends past the start of this range, reusing any intervening fragments.
@@ -1067,18 +1095,18 @@ impl Buffer {
new_ropes.push_fragment(&suffix, suffix.visible);
new_fragments.push(suffix, &None);
}
- old_fragments.next(&cx);
+ old_fragments.next();
}
let slice =
- old_fragments.slice(&VersionedFullOffset::Offset(range.start), Bias::Left, &cx);
+ old_fragments.slice(&VersionedFullOffset::Offset(range.start), Bias::Left);
new_ropes.append(slice.summary().text);
new_fragments.append(slice, &None);
fragment_start = old_fragments.start().0.full_offset();
}
// If we are at the end of a non-concurrent fragment, advance to the next one.
- let fragment_end = old_fragments.end(&cx).0.full_offset();
+ let fragment_end = old_fragments.end().0.full_offset();
if fragment_end == range.start && fragment_end > fragment_start {
let mut fragment = old_fragments.item().unwrap().clone();
fragment.len = fragment_end.0 - fragment_start.0;
@@ -1086,7 +1114,7 @@ impl Buffer {
new_insertions.push(InsertionFragment::insert_new(&fragment));
new_ropes.push_fragment(&fragment, fragment.visible);
new_fragments.push(fragment, &None);
- old_fragments.next(&cx);
+ old_fragments.next();
fragment_start = old_fragments.start().0.full_offset();
}
@@ -1096,7 +1124,7 @@ impl Buffer {
if fragment_start == range.start && fragment.timestamp > timestamp {
new_ropes.push_fragment(fragment, fragment.visible);
new_fragments.push(fragment.clone(), &None);
- old_fragments.next(&cx);
+ old_fragments.next();
debug_assert_eq!(fragment_start, range.start);
} else {
break;
@@ -1152,7 +1180,7 @@ impl Buffer {
// portions as deleted.
while fragment_start < range.end {
let fragment = old_fragments.item().unwrap();
- let fragment_end = old_fragments.end(&cx).0.full_offset();
+ let fragment_end = old_fragments.end().0.full_offset();
let mut intersection = fragment.clone();
let intersection_end = cmp::min(range.end, fragment_end);
if fragment.was_visible(version, &self.undo_map) {
@@ -1181,7 +1209,7 @@ impl Buffer {
fragment_start = intersection_end;
}
if fragment_end <= range.end {
- old_fragments.next(&cx);
+ old_fragments.next();
}
}
}
@@ -1189,7 +1217,7 @@ impl Buffer {
// If the current fragment has been partially consumed, then consume the rest of it
// and advance to the next fragment before slicing.
if fragment_start > old_fragments.start().0.full_offset() {
- let fragment_end = old_fragments.end(&cx).0.full_offset();
+ let fragment_end = old_fragments.end().0.full_offset();
if fragment_end > fragment_start {
let mut suffix = old_fragments.item().unwrap().clone();
suffix.len = fragment_end.0 - fragment_start.0;
@@ -1198,10 +1226,10 @@ impl Buffer {
new_ropes.push_fragment(&suffix, suffix.visible);
new_fragments.push(suffix, &None);
}
- old_fragments.next(&cx);
+ old_fragments.next();
}
- let suffix = old_fragments.suffix(&cx);
+ let suffix = old_fragments.suffix();
new_ropes.append(suffix.summary().text);
new_fragments.append(suffix, &None);
let (visible_text, deleted_text) = new_ropes.finish();
@@ -1250,7 +1278,6 @@ impl Buffer {
split_offset: insertion_slice.range.start,
},
Bias::Left,
- &(),
);
}
while let Some(item) = insertions_cursor.item() {
@@ -1260,7 +1287,7 @@ impl Buffer {
break;
}
fragment_ids.push(&item.fragment_id);
- insertions_cursor.next(&());
+ insertions_cursor.next();
}
}
fragment_ids.sort_unstable();
@@ -1277,7 +1304,7 @@ impl Buffer {
RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
for fragment_id in self.fragment_ids_for_edits(undo.counts.keys()) {
- let preceding_fragments = old_fragments.slice(&Some(fragment_id), Bias::Left, &None);
+ let preceding_fragments = old_fragments.slice(&Some(fragment_id), Bias::Left);
new_ropes.append(preceding_fragments.summary().text);
new_fragments.append(preceding_fragments, &None);
@@ -1304,11 +1331,11 @@ impl Buffer {
new_ropes.push_fragment(&fragment, fragment_was_visible);
new_fragments.push(fragment, &None);
- old_fragments.next(&None);
+ old_fragments.next();
}
}
- let suffix = old_fragments.suffix(&None);
+ let suffix = old_fragments.suffix();
new_ropes.append(suffix.summary().text);
new_fragments.append(suffix, &None);
@@ -1495,6 +1522,24 @@ impl Buffer {
self.history.push_transaction(transaction, now);
}
+ /// Differs from `push_transaction` in that it does not clear the redo stack.
+ /// The caller responsible for
+ /// Differs from `push_transaction` in that it does not clear the redo
+ /// stack. Intended to be used to create a parent transaction to merge
+ /// potential child transactions into.
+ ///
+ /// The caller is responsible for removing it from the undo history using
+ /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
+ /// are merged into this transaction, the caller is responsible for ensuring
+ /// the redo stack is cleared. The easiest way to ensure the redo stack is
+ /// cleared is to create transactions with the usual `start_transaction` and
+ /// `end_transaction` methods and merging the resulting transactions into
+ /// the transaction created by this method
+ pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
+ self.history
+ .push_empty_transaction(self.version.clone(), now, &mut self.lamport_clock)
+ }
+
pub fn edited_ranges_for_transaction_id<D>(
&self,
transaction_id: TransactionId,
@@ -1521,7 +1566,7 @@ impl Buffer {
.fragment_ids_for_edits(edit_ids.into_iter())
.into_iter()
.filter_map(move |fragment_id| {
- cursor.seek_forward(&Some(fragment_id), Bias::Left, &None);
+ cursor.seek_forward(&Some(fragment_id), Bias::Left);
let fragment = cursor.item()?;
let start_offset = cursor.start().1;
let end_offset = start_offset + if fragment.visible { fragment.len } else { 0 };
@@ -1743,7 +1788,7 @@ impl Buffer {
let mut cursor = self.snapshot.fragments.cursor::<Option<&Locator>>(&None);
for insertion_fragment in self.snapshot.insertions.cursor::<()>(&()) {
- cursor.seek(&Some(&insertion_fragment.fragment_id), Bias::Left, &None);
+ cursor.seek(&Some(&insertion_fragment.fragment_id), Bias::Left);
let fragment = cursor.item().unwrap();
assert_eq!(insertion_fragment.fragment_id, fragment.id);
assert_eq!(insertion_fragment.split_offset, fragment.insertion_offset);
@@ -1862,7 +1907,7 @@ impl BufferSnapshot {
.filter::<_, FragmentTextSummary>(&None, move |summary| {
!version.observed_all(&summary.max_version)
});
- cursor.next(&None);
+ cursor.next();
let mut visible_cursor = self.visible_text.cursor(0);
let mut deleted_cursor = self.deleted_text.cursor(0);
@@ -1875,18 +1920,18 @@ impl BufferSnapshot {
if fragment.was_visible(version, &self.undo_map) {
if fragment.visible {
- let text = visible_cursor.slice(cursor.end(&None).visible);
+ let text = visible_cursor.slice(cursor.end().visible);
rope.append(text);
} else {
deleted_cursor.seek_forward(cursor.start().deleted);
- let text = deleted_cursor.slice(cursor.end(&None).deleted);
+ let text = deleted_cursor.slice(cursor.end().deleted);
rope.append(text);
}
} else if fragment.visible {
- visible_cursor.seek_forward(cursor.end(&None).visible);
+ visible_cursor.seek_forward(cursor.end().visible);
}
- cursor.next(&None);
+ cursor.next();
}
if cursor.start().visible > visible_cursor.offset() {
@@ -2202,7 +2247,7 @@ impl BufferSnapshot {
timestamp: anchor.timestamp,
split_offset: anchor.offset,
};
- insertion_cursor.seek(&anchor_key, anchor.bias, &());
+ insertion_cursor.seek(&anchor_key, anchor.bias);
if let Some(insertion) = insertion_cursor.item() {
let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
if comparison == Ordering::Greater
@@ -2210,15 +2255,15 @@ impl BufferSnapshot {
&& comparison == Ordering::Equal
&& anchor.offset > 0)
{
- insertion_cursor.prev(&());
+ insertion_cursor.prev();
}
} else {
- insertion_cursor.prev(&());
+ insertion_cursor.prev();
}
let insertion = insertion_cursor.item().expect("invalid insertion");
assert_eq!(insertion.timestamp, anchor.timestamp, "invalid insertion");
- fragment_cursor.seek_forward(&Some(&insertion.fragment_id), Bias::Left, &None);
+ fragment_cursor.seek_forward(&Some(&insertion.fragment_id), Bias::Left);
let fragment = fragment_cursor.item().unwrap();
let mut fragment_offset = fragment_cursor.start().1;
if fragment.visible {
@@ -2249,7 +2294,7 @@ impl BufferSnapshot {
split_offset: anchor.offset,
};
let mut insertion_cursor = self.insertions.cursor::<InsertionFragmentKey>(&());
- insertion_cursor.seek(&anchor_key, anchor.bias, &());
+ insertion_cursor.seek(&anchor_key, anchor.bias);
if let Some(insertion) = insertion_cursor.item() {
let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
if comparison == Ordering::Greater
@@ -2257,10 +2302,10 @@ impl BufferSnapshot {
&& comparison == Ordering::Equal
&& anchor.offset > 0)
{
- insertion_cursor.prev(&());
+ insertion_cursor.prev();
}
} else {
- insertion_cursor.prev(&());
+ insertion_cursor.prev();
}
let Some(insertion) = insertion_cursor
@@ -2274,7 +2319,7 @@ impl BufferSnapshot {
};
let mut fragment_cursor = self.fragments.cursor::<(Option<&Locator>, usize)>(&None);
- fragment_cursor.seek(&Some(&insertion.fragment_id), Bias::Left, &None);
+ fragment_cursor.seek(&Some(&insertion.fragment_id), Bias::Left);
let fragment = fragment_cursor.item().unwrap();
let mut fragment_offset = fragment_cursor.start().1;
if fragment.visible {
@@ -2295,7 +2340,7 @@ impl BufferSnapshot {
split_offset: anchor.offset,
};
let mut insertion_cursor = self.insertions.cursor::<InsertionFragmentKey>(&());
- insertion_cursor.seek(&anchor_key, anchor.bias, &());
+ insertion_cursor.seek(&anchor_key, anchor.bias);
if let Some(insertion) = insertion_cursor.item() {
let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
if comparison == Ordering::Greater
@@ -2303,10 +2348,10 @@ impl BufferSnapshot {
&& comparison == Ordering::Equal
&& anchor.offset > 0)
{
- insertion_cursor.prev(&());
+ insertion_cursor.prev();
}
} else {
- insertion_cursor.prev(&());
+ insertion_cursor.prev();
}
let Some(insertion) = insertion_cursor.item().filter(|insertion| {
@@ -2345,7 +2390,7 @@ impl BufferSnapshot {
Anchor::MAX
} else {
let mut fragment_cursor = self.fragments.cursor::<usize>(&None);
- fragment_cursor.seek(&offset, bias, &None);
+ fragment_cursor.seek(&offset, bias);
let fragment = fragment_cursor.item().unwrap();
let overshoot = offset - *fragment_cursor.start();
Anchor {
@@ -2425,7 +2470,7 @@ impl BufferSnapshot {
let mut cursor = self.fragments.filter(&None, move |summary| {
!since.observed_all(&summary.max_version)
});
- cursor.next(&None);
+ cursor.next();
Some(cursor)
};
let mut cursor = self
@@ -2433,7 +2478,7 @@ impl BufferSnapshot {
.cursor::<(Option<&Locator>, FragmentTextSummary)>(&None);
let start_fragment_id = self.fragment_id_for_anchor(&range.start);
- cursor.seek(&Some(start_fragment_id), Bias::Left, &None);
+ cursor.seek(&Some(start_fragment_id), Bias::Left);
let mut visible_start = cursor.start().1.visible;
let mut deleted_start = cursor.start().1.deleted;
if let Some(fragment) = cursor.item() {
@@ -2466,7 +2511,7 @@ impl BufferSnapshot {
let mut cursor = self.fragments.filter::<_, usize>(&None, move |summary| {
!since.observed_all(&summary.max_version)
});
- cursor.next(&None);
+ cursor.next();
while let Some(fragment) = cursor.item() {
if fragment.id > *end_fragment_id {
break;
@@ -2478,7 +2523,7 @@ impl BufferSnapshot {
return true;
}
}
- cursor.next(&None);
+ cursor.next();
}
}
false
@@ -2489,14 +2534,14 @@ impl BufferSnapshot {
let mut cursor = self.fragments.filter::<_, usize>(&None, move |summary| {
!since.observed_all(&summary.max_version)
});
- cursor.next(&None);
+ cursor.next();
while let Some(fragment) = cursor.item() {
let was_visible = fragment.was_visible(since, &self.undo_map);
let is_visible = fragment.visible;
if was_visible != is_visible {
return true;
}
- cursor.next(&None);
+ cursor.next();
}
}
false
@@ -2601,7 +2646,7 @@ impl<D: TextDimension + Ord, F: FnMut(&FragmentSummary) -> bool> Iterator for Ed
while let Some(fragment) = cursor.item() {
if fragment.id < *self.range.start.0 {
- cursor.next(&None);
+ cursor.next();
continue;
} else if fragment.id > *self.range.end.0 {
break;
@@ -2634,7 +2679,7 @@ impl<D: TextDimension + Ord, F: FnMut(&FragmentSummary) -> bool> Iterator for Ed
};
if !fragment.was_visible(self.since, self.undos) && fragment.visible {
- let mut visible_end = cursor.end(&None).visible;
+ let mut visible_end = cursor.end().visible;
if fragment.id == *self.range.end.0 {
visible_end = cmp::min(
visible_end,
@@ -2660,7 +2705,7 @@ impl<D: TextDimension + Ord, F: FnMut(&FragmentSummary) -> bool> Iterator for Ed
self.new_end = new_end;
} else if fragment.was_visible(self.since, self.undos) && !fragment.visible {
- let mut deleted_end = cursor.end(&None).deleted;
+ let mut deleted_end = cursor.end().deleted;
if fragment.id == *self.range.end.0 {
deleted_end = cmp::min(
deleted_end,
@@ -2690,7 +2735,7 @@ impl<D: TextDimension + Ord, F: FnMut(&FragmentSummary) -> bool> Iterator for Ed
self.old_end = old_end;
}
- cursor.next(&None);
+ cursor.next();
}
pending_edit
@@ -74,7 +74,6 @@ impl UndoMap {
undo_id: Default::default(),
},
Bias::Left,
- &(),
);
let mut undo_count = 0;
@@ -99,7 +98,6 @@ impl UndoMap {
undo_id: Default::default(),
},
Bias::Left,
- &(),
);
let mut undo_count = 0;
@@ -11,8 +11,8 @@ use gpui::{App, Task, Window, actions};
use rpc::proto::{self};
use theme::ActiveTheme;
use ui::{
- Avatar, AvatarAudioStatusIndicator, ContextMenu, ContextMenuItem, Facepile, PopoverMenu,
- SplitButton, TintColor, Tooltip, prelude::*,
+ Avatar, AvatarAudioStatusIndicator, ContextMenu, ContextMenuItem, Divider, Facepile,
+ PopoverMenu, SplitButton, SplitButtonStyle, TintColor, Tooltip, prelude::*,
};
use util::maybe;
use workspace::notifications::DetachAndPromptErr;
@@ -383,6 +383,7 @@ impl TitleBar {
.detach_and_log_err(cx);
}),
)
+ .child(Divider::vertical())
.into_any_element(),
);
@@ -497,6 +498,7 @@ impl TitleBar {
trigger.render(window, cx),
self.render_screen_list().into_any_element(),
)
+ .style(SplitButtonStyle::Outlined)
.into_any_element(),
);
}
@@ -547,10 +549,17 @@ impl TitleBar {
entry_render: Box::new(move |_, _| {
h_flex()
.gap_2()
- .child(Icon::new(IconName::Screen).when(
- active_screenshare_id == Some(meta.id),
- |this| this.color(Color::Accent),
- ))
+ .child(
+ Icon::new(IconName::Screen)
+ .size(IconSize::XSmall)
+ .map(|this| {
+ if active_screenshare_id == Some(meta.id) {
+ this.color(Color::Accent)
+ } else {
+ this.color(Color::Muted)
+ }
+ }),
+ )
.child(Label::new(label.clone()))
.child(
Label::new(resolution.clone())
@@ -1,6 +1,6 @@
use gpui::{
AnyElement, App, BoxShadow, IntoElement, ParentElement, RenderOnce, Styled, Window, div, hsla,
- point, px,
+ point, prelude::FluentBuilder, px,
};
use theme::ActiveTheme;
@@ -8,6 +8,12 @@ use crate::{ElevationIndex, h_flex};
use super::ButtonLike;
+#[derive(Clone, Copy, PartialEq)]
+pub enum SplitButtonStyle {
+ Filled,
+ Outlined,
+}
+
/// /// A button with two parts: a primary action on the left and a secondary action on the right.
///
/// The left side is a [`ButtonLike`] with the main action, while the right side can contain
@@ -18,11 +24,21 @@ use super::ButtonLike;
pub struct SplitButton {
pub left: ButtonLike,
pub right: AnyElement,
+ style: SplitButtonStyle,
}
impl SplitButton {
pub fn new(left: ButtonLike, right: AnyElement) -> Self {
- Self { left, right }
+ Self {
+ left,
+ right,
+ style: SplitButtonStyle::Filled,
+ }
+ }
+
+ pub fn style(mut self, style: SplitButtonStyle) -> Self {
+ self.style = style;
+ self
}
}
@@ -31,21 +47,23 @@ impl RenderOnce for SplitButton {
h_flex()
.rounded_sm()
.border_1()
- .border_color(cx.theme().colors().text_muted.alpha(0.12))
+ .border_color(cx.theme().colors().border.opacity(0.5))
.child(div().flex_grow().child(self.left))
.child(
div()
.h_full()
.w_px()
- .bg(cx.theme().colors().text_muted.alpha(0.16)),
+ .bg(cx.theme().colors().border.opacity(0.5)),
)
.child(self.right)
- .bg(ElevationIndex::Surface.on_elevation_bg(cx))
- .shadow(vec![BoxShadow {
- color: hsla(0.0, 0.0, 0.0, 0.16),
- offset: point(px(0.), px(1.)),
- blur_radius: px(0.),
- spread_radius: px(0.),
- }])
+ .when(self.style == SplitButtonStyle::Filled, |this| {
+ this.bg(ElevationIndex::Surface.on_elevation_bg(cx))
+ .shadow(vec![BoxShadow {
+ color: hsla(0.0, 0.0, 0.0, 0.16),
+ offset: point(px(0.), px(1.)),
+ blur_radius: px(0.),
+ spread_radius: px(0.),
+ }])
+ })
}
}
@@ -84,7 +84,9 @@ impl RenderOnce for List {
(false, _) => this.children(self.children),
(true, Some(false)) => this,
(true, _) => match self.empty_message {
- EmptyMessage::Text(text) => this.child(Label::new(text).color(Color::Muted)),
+ EmptyMessage::Text(text) => {
+ this.px_2().child(Label::new(text).color(Color::Muted))
+ }
EmptyMessage::Element(element) => this.child(element),
},
})
@@ -93,6 +93,7 @@ impl RenderOnce for Modal {
#[derive(IntoElement)]
pub struct ModalHeader {
headline: Option<SharedString>,
+ description: Option<SharedString>,
children: SmallVec<[AnyElement; 2]>,
show_dismiss_button: bool,
show_back_button: bool,
@@ -108,6 +109,7 @@ impl ModalHeader {
pub fn new() -> Self {
Self {
headline: None,
+ description: None,
children: SmallVec::new(),
show_dismiss_button: false,
show_back_button: false,
@@ -123,6 +125,11 @@ impl ModalHeader {
self
}
+ pub fn description(mut self, description: impl Into<SharedString>) -> Self {
+ self.description = Some(description.into());
+ self
+ }
+
pub fn show_dismiss_button(mut self, show: bool) -> Self {
self.show_dismiss_button = show;
self
@@ -171,7 +178,14 @@ impl RenderOnce for ModalHeader {
}),
)
})
- .child(div().flex_1().children(children))
+ .child(
+ v_flex().flex_1().children(children).when_some(
+ self.description,
+ |this, description| {
+ this.child(Label::new(description).color(Color::Muted).mb_2())
+ },
+ ),
+ )
.when(self.show_dismiss_button, |this| {
this.child(
IconButton::new("dismiss", IconName::Close)
@@ -588,7 +588,7 @@ impl SwitchField {
toggle_state: toggle_state.into(),
on_click: Arc::new(on_click),
disabled: false,
- color: SwitchColor::default(),
+ color: SwitchColor::Accent,
}
}
@@ -634,6 +634,15 @@ impl RenderOnce for SwitchField {
}
}),
)
+ .when(!self.disabled, |this| {
+ this.on_click({
+ let on_click = self.on_click.clone();
+ let toggle_state = self.toggle_state;
+ move |_click, window, cx| {
+ (on_click)(&toggle_state.inverse(), window, cx);
+ }
+ })
+ })
}
}
@@ -97,6 +97,10 @@ impl SingleLineInput {
pub fn editor(&self) -> &Entity<Editor> {
&self.editor
}
+
+ pub fn text(&self, cx: &App) -> String {
+ self.editor().read(cx).text(cx)
+ }
}
impl Render for SingleLineInput {
@@ -1,4 +1,4 @@
-use client::{TelemetrySettings, telemetry::Telemetry};
+use client::{DisableAiSettings, TelemetrySettings, telemetry::Telemetry};
use db::kvp::KEY_VALUE_STORE;
use gpui::{
Action, App, Context, Entity, EventEmitter, FocusHandle, Focusable, InteractiveElement,
@@ -174,23 +174,25 @@ impl Render for WelcomePage {
.ok();
})),
)
- .child(
- Button::new(
- "try-zed-edit-prediction",
- edit_prediction_label,
+ .when(!DisableAiSettings::get_global(cx).disable_ai, |parent| {
+ parent.child(
+ Button::new(
+ "edit_prediction_onboarding",
+ edit_prediction_label,
+ )
+ .disabled(edit_prediction_provider_is_zed)
+ .icon(IconName::ZedPredict)
+ .icon_size(IconSize::XSmall)
+ .icon_color(Color::Muted)
+ .icon_position(IconPosition::Start)
+ .on_click(
+ cx.listener(|_, _, window, cx| {
+ telemetry::event!("Welcome Screen Try Edit Prediction clicked");
+ window.dispatch_action(zed_actions::OpenZedPredictOnboarding.boxed_clone(), cx);
+ }),
+ ),
)
- .disabled(edit_prediction_provider_is_zed)
- .icon(IconName::ZedPredict)
- .icon_size(IconSize::XSmall)
- .icon_color(Color::Muted)
- .icon_position(IconPosition::Start)
- .on_click(
- cx.listener(|_, _, window, cx| {
- telemetry::event!("Welcome Screen Try Edit Prediction clicked");
- window.dispatch_action(zed_actions::OpenZedPredictOnboarding.boxed_clone(), cx);
- }),
- ),
- )
+ })
.child(
Button::new("edit settings", "Edit Settings")
.icon(IconName::Settings)
@@ -242,6 +242,7 @@ struct PanelEntry {
pub struct PanelButtons {
dock: Entity<Dock>,
+ _settings_subscription: Subscription,
}
impl Dock {
@@ -833,7 +834,11 @@ impl Render for Dock {
impl PanelButtons {
pub fn new(dock: Entity<Dock>, cx: &mut Context<Self>) -> Self {
cx.observe(&dock, |_, _, cx| cx.notify()).detach();
- Self { dock }
+ let settings_subscription = cx.observe_global::<SettingsStore>(|_, cx| cx.notify());
+ Self {
+ dock,
+ _settings_subscription: settings_subscription,
+ }
}
}
@@ -3239,28 +3239,37 @@ impl Pane {
split_direction = None;
}
- if let Ok(open_task) = workspace.update_in(cx, |workspace, window, cx| {
- if let Some(split_direction) = split_direction {
- to_pane = workspace.split_pane(to_pane, split_direction, window, cx);
- }
- workspace.open_paths(
- paths,
- OpenOptions {
- visible: Some(OpenVisible::OnlyDirectories),
- ..Default::default()
- },
- Some(to_pane.downgrade()),
- window,
- cx,
- )
- }) {
+ if let Ok((open_task, to_pane)) =
+ workspace.update_in(cx, |workspace, window, cx| {
+ if let Some(split_direction) = split_direction {
+ to_pane =
+ workspace.split_pane(to_pane, split_direction, window, cx);
+ }
+ (
+ workspace.open_paths(
+ paths,
+ OpenOptions {
+ visible: Some(OpenVisible::OnlyDirectories),
+ ..Default::default()
+ },
+ Some(to_pane.downgrade()),
+ window,
+ cx,
+ ),
+ to_pane,
+ )
+ })
+ {
let opened_items: Vec<_> = open_task.await;
- _ = workspace.update(cx, |workspace, cx| {
+ _ = workspace.update_in(cx, |workspace, window, cx| {
for item in opened_items.into_iter().flatten() {
if let Err(e) = item {
workspace.show_error(&e, cx);
}
}
+ if to_pane.read(cx).items_len() == 0 {
+ workspace.remove_pane(to_pane, None, window, cx);
+ }
});
}
})
@@ -1336,6 +1336,14 @@ impl WorkspaceDb {
}
}
+ query! {
+ pub(crate) async fn set_session_id(workspace_id: WorkspaceId, session_id: Option<String>) -> Result<()> {
+ UPDATE workspaces
+ SET session_id = ?2
+ WHERE workspace_id = ?1
+ }
+ }
+
pub async fn toolchain(
&self,
workspace_id: WorkspaceId,
@@ -1016,6 +1016,15 @@ pub enum OpenVisible {
OnlyDirectories,
}
+enum WorkspaceLocation {
+ // Valid local paths or SSH project to serialize
+ Location(SerializedWorkspaceLocation),
+ // No valid location found hence clear session id
+ DetachFromSession,
+ // No valid location found to serialize
+ None,
+}
+
type PromptForNewPath = Box<
dyn Fn(
&mut Workspace,
@@ -1135,7 +1144,6 @@ impl Workspace {
this.update_window_title(window, cx);
this.serialize_workspace(window, cx);
// This event could be triggered by `AddFolderToProject` or `RemoveFromProject`.
- // So we need to update the history.
this.update_history(cx);
}
@@ -5218,48 +5226,58 @@ impl Workspace {
}
}
- if let Some(location) = self.serialize_workspace_location(cx) {
- let breakpoints = self.project.update(cx, |project, cx| {
- project
- .breakpoint_store()
- .read(cx)
- .all_source_breakpoints(cx)
- });
+ match self.serialize_workspace_location(cx) {
+ WorkspaceLocation::Location(location) => {
+ let breakpoints = self.project.update(cx, |project, cx| {
+ project
+ .breakpoint_store()
+ .read(cx)
+ .all_source_breakpoints(cx)
+ });
- let center_group = build_serialized_pane_group(&self.center.root, window, cx);
- let docks = build_serialized_docks(self, window, cx);
- let window_bounds = Some(SerializedWindowBounds(window.window_bounds()));
- let serialized_workspace = SerializedWorkspace {
- id: database_id,
- location,
- center_group,
- window_bounds,
- display: Default::default(),
- docks,
- centered_layout: self.centered_layout,
- session_id: self.session_id.clone(),
- breakpoints,
- window_id: Some(window.window_handle().window_id().as_u64()),
- };
+ let center_group = build_serialized_pane_group(&self.center.root, window, cx);
+ let docks = build_serialized_docks(self, window, cx);
+ let window_bounds = Some(SerializedWindowBounds(window.window_bounds()));
+ let serialized_workspace = SerializedWorkspace {
+ id: database_id,
+ location,
+ center_group,
+ window_bounds,
+ display: Default::default(),
+ docks,
+ centered_layout: self.centered_layout,
+ session_id: self.session_id.clone(),
+ breakpoints,
+ window_id: Some(window.window_handle().window_id().as_u64()),
+ };
- return window.spawn(cx, async move |_| {
- persistence::DB.save_workspace(serialized_workspace).await;
- });
+ window.spawn(cx, async move |_| {
+ persistence::DB.save_workspace(serialized_workspace).await;
+ })
+ }
+ WorkspaceLocation::DetachFromSession => window.spawn(cx, async move |_| {
+ persistence::DB
+ .set_session_id(database_id, None)
+ .await
+ .log_err();
+ }),
+ WorkspaceLocation::None => Task::ready(()),
}
- Task::ready(())
}
- fn serialize_workspace_location(&self, cx: &App) -> Option<SerializedWorkspaceLocation> {
+ fn serialize_workspace_location(&self, cx: &App) -> WorkspaceLocation {
if let Some(ssh_project) = &self.serialized_ssh_project {
- Some(SerializedWorkspaceLocation::Ssh(ssh_project.clone()))
+ WorkspaceLocation::Location(SerializedWorkspaceLocation::Ssh(ssh_project.clone()))
} else if let Some(local_paths) = self.local_paths(cx) {
if !local_paths.is_empty() {
- Some(SerializedWorkspaceLocation::from_local_paths(local_paths))
+ WorkspaceLocation::Location(SerializedWorkspaceLocation::from_local_paths(
+ local_paths,
+ ))
} else {
- None
+ WorkspaceLocation::DetachFromSession
}
} else {
- None
+ WorkspaceLocation::None
}
}
@@ -5267,8 +5285,9 @@ impl Workspace {
let Some(id) = self.database_id() else {
return;
};
- let Some(location) = self.serialize_workspace_location(cx) else {
- return;
+ let location = match self.serialize_workspace_location(cx) {
+ WorkspaceLocation::Location(location) => location,
+ _ => return,
};
if let Some(manager) = HistoryManager::global(cx) {
manager.update(cx, |this, cx| {
@@ -7359,6 +7378,17 @@ async fn open_ssh_project_inner(
return Err(project_path_errors.pop().context("no paths given")?);
}
+ if let Some(detach_session_task) = window
+ .update(cx, |_workspace, window, cx| {
+ cx.spawn_in(window, async move |this, cx| {
+ this.update_in(cx, |this, window, cx| this.remove_from_session(window, cx))
+ })
+ })
+ .ok()
+ {
+ detach_session_task.await.ok();
+ }
+
cx.update_window(window.into(), |_, window, cx| {
window.replace_root(cx, |window, cx| {
telemetry::event!("SSH Project Opened");
@@ -2454,16 +2454,16 @@ impl Snapshot {
self.entries_by_path = {
let mut cursor = self.entries_by_path.cursor::<TraversalProgress>(&());
let mut new_entries_by_path =
- cursor.slice(&TraversalTarget::path(&removed_entry.path), Bias::Left, &());
+ cursor.slice(&TraversalTarget::path(&removed_entry.path), Bias::Left);
while let Some(entry) = cursor.item() {
if entry.path.starts_with(&removed_entry.path) {
self.entries_by_id.remove(&entry.id, &());
- cursor.next(&());
+ cursor.next();
} else {
break;
}
}
- new_entries_by_path.append(cursor.suffix(&()), &());
+ new_entries_by_path.append(cursor.suffix(), &());
new_entries_by_path
};
@@ -2576,7 +2576,6 @@ impl Snapshot {
include_ignored,
},
Bias::Right,
- &(),
);
Traversal {
snapshot: self,
@@ -2632,7 +2631,7 @@ impl Snapshot {
options: ChildEntriesOptions,
) -> ChildEntriesIter<'a> {
let mut cursor = self.entries_by_path.cursor(&());
- cursor.seek(&TraversalTarget::path(parent_path), Bias::Right, &());
+ cursor.seek(&TraversalTarget::path(parent_path), Bias::Right);
let traversal = Traversal {
snapshot: self,
cursor,
@@ -3056,9 +3055,9 @@ impl BackgroundScannerState {
.snapshot
.entries_by_path
.cursor::<TraversalProgress>(&());
- new_entries = cursor.slice(&TraversalTarget::path(path), Bias::Left, &());
- removed_entries = cursor.slice(&TraversalTarget::successor(path), Bias::Left, &());
- new_entries.append(cursor.suffix(&()), &());
+ new_entries = cursor.slice(&TraversalTarget::path(path), Bias::Left);
+ removed_entries = cursor.slice(&TraversalTarget::successor(path), Bias::Left);
+ new_entries.append(cursor.suffix(), &());
}
self.snapshot.entries_by_path = new_entries;
@@ -4925,15 +4924,15 @@ fn build_diff(
let mut old_paths = old_snapshot.entries_by_path.cursor::<PathKey>(&());
let mut new_paths = new_snapshot.entries_by_path.cursor::<PathKey>(&());
let mut last_newly_loaded_dir_path = None;
- old_paths.next(&());
- new_paths.next(&());
+ old_paths.next();
+ new_paths.next();
for path in event_paths {
let path = PathKey(path.clone());
if old_paths.item().map_or(false, |e| e.path < path.0) {
- old_paths.seek_forward(&path, Bias::Left, &());
+ old_paths.seek_forward(&path, Bias::Left);
}
if new_paths.item().map_or(false, |e| e.path < path.0) {
- new_paths.seek_forward(&path, Bias::Left, &());
+ new_paths.seek_forward(&path, Bias::Left);
}
loop {
match (old_paths.item(), new_paths.item()) {
@@ -4949,7 +4948,7 @@ fn build_diff(
match Ord::cmp(&old_entry.path, &new_entry.path) {
Ordering::Less => {
changes.push((old_entry.path.clone(), old_entry.id, Removed));
- old_paths.next(&());
+ old_paths.next();
}
Ordering::Equal => {
if phase == EventsReceivedDuringInitialScan {
@@ -4975,8 +4974,8 @@ fn build_diff(
changes.push((new_entry.path.clone(), new_entry.id, Updated));
}
}
- old_paths.next(&());
- new_paths.next(&());
+ old_paths.next();
+ new_paths.next();
}
Ordering::Greater => {
let is_newly_loaded = phase == InitialScan
@@ -4988,13 +4987,13 @@ fn build_diff(
new_entry.id,
if is_newly_loaded { Loaded } else { Added },
));
- new_paths.next(&());
+ new_paths.next();
}
}
}
(Some(old_entry), None) => {
changes.push((old_entry.path.clone(), old_entry.id, Removed));
- old_paths.next(&());
+ old_paths.next();
}
(None, Some(new_entry)) => {
let is_newly_loaded = phase == InitialScan
@@ -5006,7 +5005,7 @@ fn build_diff(
new_entry.id,
if is_newly_loaded { Loaded } else { Added },
));
- new_paths.next(&());
+ new_paths.next();
}
(None, None) => break,
}
@@ -5255,7 +5254,7 @@ impl<'a> Traversal<'a> {
start_path: &Path,
) -> Self {
let mut cursor = snapshot.entries_by_path.cursor(&());
- cursor.seek(&TraversalTarget::path(start_path), Bias::Left, &());
+ cursor.seek(&TraversalTarget::path(start_path), Bias::Left);
let mut traversal = Self {
snapshot,
cursor,
@@ -5282,14 +5281,13 @@ impl<'a> Traversal<'a> {
include_ignored: self.include_ignored,
},
Bias::Left,
- &(),
)
}
pub fn advance_to_sibling(&mut self) -> bool {
while let Some(entry) = self.cursor.item() {
self.cursor
- .seek_forward(&TraversalTarget::successor(&entry.path), Bias::Left, &());
+ .seek_forward(&TraversalTarget::successor(&entry.path), Bias::Left);
if let Some(entry) = self.cursor.item() {
if (self.include_files || !entry.is_file())
&& (self.include_dirs || !entry.is_dir())
@@ -5307,7 +5305,7 @@ impl<'a> Traversal<'a> {
return false;
};
self.cursor
- .seek(&TraversalTarget::path(parent_path), Bias::Left, &())
+ .seek(&TraversalTarget::path(parent_path), Bias::Left)
}
pub fn entry(&self) -> Option<&'a Entry> {
@@ -5326,7 +5324,7 @@ impl<'a> Traversal<'a> {
pub fn end_offset(&self) -> usize {
self.cursor
- .end(&())
+ .end()
.count(self.include_files, self.include_dirs, self.include_ignored)
}
}
@@ -554,6 +554,7 @@ pub fn main() {
supermaven::init(app_state.client.clone(), cx);
language_model::init(app_state.client.clone(), cx);
language_models::init(app_state.user_store.clone(), app_state.client.clone(), cx);
+ agent_settings::init(cx);
agent_servers::init(cx);
web_search::init(cx);
web_search_providers::init(app_state.client.clone(), cx);
@@ -145,15 +145,15 @@ pub fn app_menus() -> Vec<Menu> {
items: vec![
MenuItem::action(
"Zoom In",
- zed_actions::IncreaseBufferFontSize { persist: true },
+ zed_actions::IncreaseBufferFontSize { persist: false },
),
MenuItem::action(
"Zoom Out",
- zed_actions::DecreaseBufferFontSize { persist: true },
+ zed_actions::DecreaseBufferFontSize { persist: false },
),
MenuItem::action(
"Reset Zoom",
- zed_actions::ResetBufferFontSize { persist: true },
+ zed_actions::ResetBufferFontSize { persist: false },
),
MenuItem::separator(),
MenuItem::action("Toggle Left Dock", workspace::ToggleLeftDock),
@@ -12,7 +12,7 @@ use futures::channel::mpsc::{UnboundedReceiver, UnboundedSender};
use futures::channel::{mpsc, oneshot};
use futures::future::join_all;
use futures::{FutureExt, SinkExt, StreamExt};
-use git_ui::diff_view::DiffView;
+use git_ui::file_diff_view::FileDiffView;
use gpui::{App, AsyncApp, Global, WindowHandle};
use language::Point;
use recent_projects::{SshSettings, open_ssh_project};
@@ -262,7 +262,7 @@ pub async fn open_paths_with_positions(
let old_path = Path::new(&diff_pair[0]).canonicalize()?;
let new_path = Path::new(&diff_pair[1]).canonicalize()?;
if let Ok(diff_view) = workspace.update(cx, |workspace, window, cx| {
- DiffView::open(old_path, new_path, workspace, window, cx)
+ FileDiffView::open(old_path, new_path, workspace, window, cx)
}) {
if let Some(diff_view) = diff_view.await.log_err() {
items.push(Some(Ok(Box::new(diff_view))))
@@ -2,6 +2,7 @@ mod preview;
mod repl_menu;
use agent_settings::AgentSettings;
+use client::DisableAiSettings;
use editor::actions::{
AddSelectionAbove, AddSelectionBelow, CodeActionSource, DuplicateLineDown, GoToDiagnostic,
GoToHunk, GoToPreviousDiagnostic, GoToPreviousHunk, MoveLineDown, MoveLineUp, SelectAll,
@@ -32,6 +33,7 @@ const MAX_CODE_ACTION_MENU_LINES: u32 = 16;
pub struct QuickActionBar {
_inlay_hints_enabled_subscription: Option<Subscription>,
+ _ai_settings_subscription: Subscription,
active_item: Option<Box<dyn ItemHandle>>,
buffer_search_bar: Entity<BufferSearchBar>,
show: bool,
@@ -46,8 +48,28 @@ impl QuickActionBar {
workspace: &Workspace,
cx: &mut Context<Self>,
) -> Self {
+ let mut was_ai_disabled = DisableAiSettings::get_global(cx).disable_ai;
+ let mut was_agent_enabled = AgentSettings::get_global(cx).enabled;
+ let mut was_agent_button = AgentSettings::get_global(cx).button;
+
+ let ai_settings_subscription = cx.observe_global::<SettingsStore>(move |_, cx| {
+ let is_ai_disabled = DisableAiSettings::get_global(cx).disable_ai;
+ let agent_settings = AgentSettings::get_global(cx);
+
+ if was_ai_disabled != is_ai_disabled
+ || was_agent_enabled != agent_settings.enabled
+ || was_agent_button != agent_settings.button
+ {
+ was_ai_disabled = is_ai_disabled;
+ was_agent_enabled = agent_settings.enabled;
+ was_agent_button = agent_settings.button;
+ cx.notify();
+ }
+ });
+
let mut this = Self {
_inlay_hints_enabled_subscription: None,
+ _ai_settings_subscription: ai_settings_subscription,
active_item: None,
buffer_search_bar,
show: true,
@@ -575,7 +597,9 @@ impl Render for QuickActionBar {
.children(self.render_preview_button(self.workspace.clone(), cx))
.children(search_button)
.when(
- AgentSettings::get_global(cx).enabled && AgentSettings::get_global(cx).button,
+ AgentSettings::get_global(cx).enabled
+ && AgentSettings::get_global(cx).button
+ && !DisableAiSettings::get_global(cx).disable_ai,
|bar| bar.child(assistant_button),
)
.children(code_actions_dropdown)
@@ -1,10 +1,11 @@
use std::any::{Any, TypeId};
+use client::DisableAiSettings;
use command_palette_hooks::CommandPaletteFilter;
use feature_flags::{FeatureFlagAppExt as _, PredictEditsRateCompletionsFeatureFlag};
use gpui::actions;
use language::language_settings::{AllLanguageSettings, EditPredictionProvider};
-use settings::update_settings_file;
+use settings::{Settings, SettingsStore, update_settings_file};
use ui::App;
use workspace::Workspace;
@@ -21,6 +22,8 @@ actions!(
);
pub fn init(cx: &mut App) {
+ feature_gate_predict_edits_actions(cx);
+
cx.observe_new(move |workspace: &mut Workspace, _, _cx| {
workspace.register_action(|workspace, _: &RateCompletions, window, cx| {
if cx.has_flag::<PredictEditsRateCompletionsFeatureFlag>() {
@@ -53,27 +56,57 @@ pub fn init(cx: &mut App) {
});
})
.detach();
-
- feature_gate_predict_edits_rating_actions(cx);
}
-fn feature_gate_predict_edits_rating_actions(cx: &mut App) {
+fn feature_gate_predict_edits_actions(cx: &mut App) {
let rate_completion_action_types = [TypeId::of::<RateCompletions>()];
+ let reset_onboarding_action_types = [TypeId::of::<ResetOnboarding>()];
+ let zeta_all_action_types = [
+ TypeId::of::<RateCompletions>(),
+ TypeId::of::<ResetOnboarding>(),
+ zed_actions::OpenZedPredictOnboarding.type_id(),
+ TypeId::of::<crate::ClearHistory>(),
+ TypeId::of::<crate::ThumbsUpActiveCompletion>(),
+ TypeId::of::<crate::ThumbsDownActiveCompletion>(),
+ TypeId::of::<crate::NextEdit>(),
+ TypeId::of::<crate::PreviousEdit>(),
+ ];
CommandPaletteFilter::update_global(cx, |filter, _cx| {
filter.hide_action_types(&rate_completion_action_types);
+ filter.hide_action_types(&reset_onboarding_action_types);
filter.hide_action_types(&[zed_actions::OpenZedPredictOnboarding.type_id()]);
});
+ cx.observe_global::<SettingsStore>(move |cx| {
+ let is_ai_disabled = DisableAiSettings::get_global(cx).disable_ai;
+ let has_feature_flag = cx.has_flag::<PredictEditsRateCompletionsFeatureFlag>();
+
+ CommandPaletteFilter::update_global(cx, |filter, _cx| {
+ if is_ai_disabled {
+ filter.hide_action_types(&zeta_all_action_types);
+ } else {
+ if has_feature_flag {
+ filter.show_action_types(rate_completion_action_types.iter());
+ } else {
+ filter.hide_action_types(&rate_completion_action_types);
+ }
+ }
+ });
+ })
+ .detach();
+
cx.observe_flag::<PredictEditsRateCompletionsFeatureFlag, _>(move |is_enabled, cx| {
- if is_enabled {
- CommandPaletteFilter::update_global(cx, |filter, _cx| {
- filter.show_action_types(rate_completion_action_types.iter());
- });
- } else {
- CommandPaletteFilter::update_global(cx, |filter, _cx| {
- filter.hide_action_types(&rate_completion_action_types);
- });
+ if !DisableAiSettings::get_global(cx).disable_ai {
+ if is_enabled {
+ CommandPaletteFilter::update_global(cx, |filter, _cx| {
+ filter.show_action_types(rate_completion_action_types.iter());
+ });
+ } else {
+ CommandPaletteFilter::update_global(cx, |filter, _cx| {
+ filter.hide_action_types(&rate_completion_action_types);
+ });
+ }
}
})
.detach();
@@ -444,14 +444,17 @@ Custom models will be listed in the model dropdown in the Agent Panel.
### OpenAI API Compatible {#openai-api-compatible}
-Zed supports using OpenAI compatible APIs by specifying a custom `endpoint` and `available_models` for the OpenAI provider.
+Zed supports using [OpenAI compatible APIs](https://platform.openai.com/docs/api-reference/chat) by specifying a custom `api_url` and `available_models` for the OpenAI provider. This is useful for connecting to other hosted services (like Together AI, Anyscale, etc.) or local models.
-Zed supports using OpenAI compatible APIs by specifying a custom `api_url` and `available_models` for the OpenAI provider. This is useful for connecting to other hosted services (like Together AI, Anyscale, etc.) or local models.
+To configure a compatible API, you can add a custom API URL for OpenAI either via the UI (currently available only in Preview) or by editing your `settings.json`.
-To configure a compatible API, you can add a custom API URL for OpenAI either via the UI or by editing your `settings.json`. For example, to connect to [Together AI](https://www.together.ai/):
+For example, to connect to [Together AI](https://www.together.ai/) via the UI:
-1. Get an API key from your [Together AI account](https://api.together.ai/settings/api-keys).
-2. Add the following to your `settings.json`:
+1. Get an API key from your [Together AI account](https://api.together.ai/settings/api-keys).
+2. Go to the Agent Panel's settings view, click on the "Add Provider" button, and then on the "OpenAI" menu item
+3. Add the requested fields, such as `api_url`, `api_key`, available models, and others
+
+Alternatively, you can also add it via the `settings.json`:
```json
{
@@ -39,7 +39,7 @@ CRATE_PATH="crates/$CRATE_NAME"
mkdir -p "$CRATE_PATH/src"
# Symlink the license
-ln -sf "../../../$LICENSE_FILE" "$CRATE_PATH/$LICENSE_FILE"
+ln -sf "../../$LICENSE_FILE" "$CRATE_PATH/$LICENSE_FILE"
CARGO_TOML_TEMPLATE=$(cat << 'EOF'
[package]