From 65a395fa9a64a6068cae6ba1f4b24610ee698093 Mon Sep 17 00:00:00 2001 From: Zed Zippy <234243425+zed-zippy[bot]@users.noreply.github.com> Date: Wed, 17 Dec 2025 23:33:12 +0000 Subject: [PATCH] Autofix --- crates/extension_api/src/extension_api.rs | 19 +++++++------- .../src/wasm_host/llm_provider.rs | 25 +++++++++---------- .../src/wasm_host/wit/since_v0_8_0.rs | 4 +-- 3 files changed, 23 insertions(+), 25 deletions(-) diff --git a/crates/extension_api/src/extension_api.rs b/crates/extension_api/src/extension_api.rs index d888bde26d337b1e024480e6d764473c1c38e855..0a67f5965d569b7b531283b2c8f0ac385e4397f8 100644 --- a/crates/extension_api/src/extension_api.rs +++ b/crates/extension_api/src/extension_api.rs @@ -35,20 +35,19 @@ pub use wit::{ DeviceFlowPromptInfo as LlmDeviceFlowPromptInfo, ImageData as LlmImageData, MessageContent as LlmMessageContent, MessageRole as LlmMessageRole, ModelCapabilities as LlmModelCapabilities, ModelInfo as LlmModelInfo, - OauthWebAuthConfig as LlmOauthWebAuthConfig, - OauthWebAuthResult as LlmOauthWebAuthResult, ProviderInfo as LlmProviderInfo, - ProviderSettings as LlmProviderSettings, RequestMessage as LlmRequestMessage, - StopReason as LlmStopReason, ThinkingContent as LlmThinkingContent, - TokenUsage as LlmTokenUsage, ToolChoice as LlmToolChoice, - ToolDefinition as LlmToolDefinition, ToolInputFormat as LlmToolInputFormat, - ToolResult as LlmToolResult, ToolResultContent as LlmToolResultContent, - ToolUse as LlmToolUse, ToolUseJsonParseError as LlmToolUseJsonParseError, + OauthWebAuthConfig as LlmOauthWebAuthConfig, OauthWebAuthResult as LlmOauthWebAuthResult, + ProviderInfo as LlmProviderInfo, ProviderSettings as LlmProviderSettings, + RequestMessage as LlmRequestMessage, StopReason as LlmStopReason, + ThinkingContent as LlmThinkingContent, TokenUsage as LlmTokenUsage, + ToolChoice as LlmToolChoice, ToolDefinition as LlmToolDefinition, + ToolInputFormat as LlmToolInputFormat, ToolResult as LlmToolResult, + ToolResultContent as LlmToolResultContent, ToolUse as LlmToolUse, + ToolUseJsonParseError as LlmToolUseJsonParseError, delete_credential as llm_delete_credential, get_credential as llm_get_credential, get_env_var as llm_get_env_var, get_provider_settings as llm_get_provider_settings, oauth_open_browser as llm_oauth_open_browser, oauth_send_http_request as llm_oauth_send_http_request, - oauth_start_web_auth as llm_oauth_start_web_auth, - store_credential as llm_store_credential, + oauth_start_web_auth as llm_oauth_start_web_auth, store_credential as llm_store_credential, }, zed::extension::nodejs::{ node_binary_path, npm_install_package, npm_package_installed_version, diff --git a/crates/extension_host/src/wasm_host/llm_provider.rs b/crates/extension_host/src/wasm_host/llm_provider.rs index 6ea6198f7b3f14430e22dae63e9ac1123987cb14..c6cc3ffc21c003a44c5dcae9c4b6efec85d24877 100644 --- a/crates/extension_host/src/wasm_host/llm_provider.rs +++ b/crates/extension_host/src/wasm_host/llm_provider.rs @@ -10,8 +10,8 @@ use crate::wasm_host::wit::{ LlmStopReason, LlmThinkingContent, LlmToolChoice, LlmToolDefinition, LlmToolInputFormat, LlmToolResult, LlmToolResultContent, LlmToolUse, }; -use collections::HashMap; use anyhow::{Result, anyhow}; +use collections::HashMap; use credentials_provider::CredentialsProvider; use extension::{LanguageModelAuthConfig, OAuthConfig}; use futures::future::BoxFuture; @@ -29,8 +29,8 @@ use language_model::{ LanguageModelCacheConfiguration, LanguageModelCompletionError, LanguageModelCompletionEvent, LanguageModelId, LanguageModelName, LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest, - LanguageModelToolChoice, LanguageModelToolUse, LanguageModelToolUseId, RateLimiter, - StopReason, TokenUsage, + LanguageModelToolChoice, LanguageModelToolUse, LanguageModelToolUseId, RateLimiter, StopReason, + TokenUsage, }; use markdown::{HeadingLevelStyles, Markdown, MarkdownElement, MarkdownStyle}; use settings::Settings; @@ -150,13 +150,14 @@ impl ExtensionLanguageModelProvider { model_info: &LlmModelInfo, cache_configs: &HashMap, ) -> Arc { - let cache_config = cache_configs.get(&model_info.id).map(|config| { - LanguageModelCacheConfiguration { - max_cache_anchors: config.max_cache_anchors as usize, - should_speculate: false, - min_total_token: config.min_total_token_count, - } - }); + let cache_config = + cache_configs + .get(&model_info.id) + .map(|config| LanguageModelCacheConfiguration { + max_cache_anchors: config.max_cache_anchors as usize, + should_speculate: false, + min_total_token: config.min_total_token_count, + }); Arc::new(ExtensionLanguageModel { extension: self.extension.clone(), @@ -1640,9 +1641,7 @@ impl LanguageModel for ExtensionLanguageModel { fn tool_input_format(&self) -> LanguageModelToolSchemaFormat { match self.model_info.capabilities.tool_input_format { LlmToolInputFormat::JsonSchema => LanguageModelToolSchemaFormat::JsonSchema, - LlmToolInputFormat::JsonSchemaSubset => { - LanguageModelToolSchemaFormat::JsonSchemaSubset - } + LlmToolInputFormat::JsonSchemaSubset => LanguageModelToolSchemaFormat::JsonSchemaSubset, LlmToolInputFormat::Simplified => LanguageModelToolSchemaFormat::JsonSchema, } } diff --git a/crates/extension_host/src/wasm_host/wit/since_v0_8_0.rs b/crates/extension_host/src/wasm_host/wit/since_v0_8_0.rs index 67bea1e5782c1805f4cb412b3d58894070ad2743..152dde16dd755eedf3c528f14e9fa6e7138f48f0 100644 --- a/crates/extension_host/src/wasm_host/wit/since_v0_8_0.rs +++ b/crates/extension_host/src/wasm_host/wit/since_v0_8_0.rs @@ -1448,8 +1448,8 @@ impl llm_provider::Host for WasmState { cx.update(|cx| { let settings_store = cx.global::(); let user_settings = settings_store.raw_user_settings(); - let language_models = user_settings - .and_then(|s| s.content.language_models.as_ref()); + let language_models = + user_settings.and_then(|s| s.content.language_models.as_ref()); // Map provider IDs to their settings // The provider_id from the extension is just the provider part (e.g., "google-ai")