Remove obsolete llm_provider_authenticate

Richard Feldman created

Change summary

crates/extension_api/src/extension_api.rs               | 33 +++-------
crates/extension_api/wit/since_v0.8.0/extension.wit     |  5 -
crates/extension_api/wit/since_v0.8.0/llm-provider.wit  | 17 -----
crates/extension_host/src/wasm_host/llm_provider.rs     | 34 ----------
crates/extension_host/src/wasm_host/wit.rs              | 30 ++------
crates/extension_host/src/wasm_host/wit/since_v0_8_0.rs | 14 ----
extensions/anthropic/src/anthropic.rs                   | 14 ----
extensions/copilot-chat/src/copilot_chat.rs             | 14 ----
extensions/google-ai/Cargo.lock                         | 20 +++---
extensions/google-ai/src/google_ai.rs                   | 14 ----
extensions/open-router/Cargo.lock                       |  2 
extensions/open-router/src/open_router.rs               | 14 ----
extensions/openai/Cargo.lock                            | 20 +++---
extensions/openai/src/openai.rs                         | 14 ----
14 files changed, 45 insertions(+), 200 deletions(-)

Detailed changes

crates/extension_api/src/extension_api.rs 🔗

@@ -31,22 +31,20 @@ pub use wit::{
     },
     zed::extension::llm_provider::{
         CacheConfiguration as LlmCacheConfiguration, CompletionEvent as LlmCompletionEvent,
-        CompletionRequest as LlmCompletionRequest, CredentialType as LlmCredentialType,
-        ImageData as LlmImageData, MessageContent as LlmMessageContent,
-        MessageRole as LlmMessageRole, ModelCapabilities as LlmModelCapabilities,
-        ModelInfo as LlmModelInfo, OauthHttpRequest as LlmOauthHttpRequest,
-        OauthHttpResponse as LlmOauthHttpResponse, OauthWebAuthConfig as LlmOauthWebAuthConfig,
-        OauthWebAuthResult as LlmOauthWebAuthResult, ProviderInfo as LlmProviderInfo,
-        RequestMessage as LlmRequestMessage, StopReason as LlmStopReason,
-        ThinkingContent as LlmThinkingContent, TokenUsage as LlmTokenUsage,
-        ToolChoice as LlmToolChoice, ToolDefinition as LlmToolDefinition,
-        ToolInputFormat as LlmToolInputFormat, ToolResult as LlmToolResult,
-        ToolResultContent as LlmToolResultContent, ToolUse as LlmToolUse,
-        ToolUseJsonParseError as LlmToolUseJsonParseError,
+        CompletionRequest as LlmCompletionRequest, ImageData as LlmImageData,
+        MessageContent as LlmMessageContent, MessageRole as LlmMessageRole,
+        ModelCapabilities as LlmModelCapabilities, ModelInfo as LlmModelInfo,
+        OauthHttpRequest as LlmOauthHttpRequest, OauthHttpResponse as LlmOauthHttpResponse,
+        OauthWebAuthConfig as LlmOauthWebAuthConfig, OauthWebAuthResult as LlmOauthWebAuthResult,
+        ProviderInfo as LlmProviderInfo, RequestMessage as LlmRequestMessage,
+        StopReason as LlmStopReason, ThinkingContent as LlmThinkingContent,
+        TokenUsage as LlmTokenUsage, ToolChoice as LlmToolChoice,
+        ToolDefinition as LlmToolDefinition, ToolInputFormat as LlmToolInputFormat,
+        ToolResult as LlmToolResult, ToolResultContent as LlmToolResultContent,
+        ToolUse as LlmToolUse, ToolUseJsonParseError as LlmToolUseJsonParseError,
         delete_credential as llm_delete_credential, get_credential as llm_get_credential,
         get_env_var as llm_get_env_var, oauth_open_browser as llm_oauth_open_browser,
         oauth_start_web_auth as llm_oauth_start_web_auth,
-        request_credential as llm_request_credential,
         send_oauth_http_request as llm_oauth_http_request,
         store_credential as llm_store_credential,
     },
@@ -302,11 +300,6 @@ pub trait Extension: Send + Sync {
         false
     }
 
-    /// Attempt to authenticate the provider.
-    fn llm_provider_authenticate(&mut self, _provider_id: &str) -> Result<(), String> {
-        Err("`llm_provider_authenticate` not implemented".to_string())
-    }
-
     /// Reset credentials for the provider.
     fn llm_provider_reset_credentials(&mut self, _provider_id: &str) -> Result<(), String> {
         Err("`llm_provider_reset_credentials` not implemented".to_string())
@@ -631,10 +624,6 @@ impl wit::Guest for Component {
         extension().llm_provider_is_authenticated(&provider_id)
     }
 
-    fn llm_provider_authenticate(provider_id: String) -> Result<(), String> {
-        extension().llm_provider_authenticate(&provider_id)
-    }
-
     fn llm_provider_reset_credentials(provider_id: String) -> Result<(), String> {
         extension().llm_provider_reset_credentials(&provider_id)
     }

crates/extension_api/wit/since_v0.8.0/extension.wit 🔗

@@ -18,7 +18,7 @@ world extension {
     use slash-command.{slash-command, slash-command-argument-completion, slash-command-output};
     use llm-provider.{
         provider-info, model-info, completion-request,
-        credential-type, cache-configuration, completion-event, token-usage
+        cache-configuration, completion-event, token-usage
     };
 
     /// Initializes the extension.
@@ -183,9 +183,6 @@ world extension {
     /// Check if the provider is authenticated.
     export llm-provider-is-authenticated: func(provider-id: string) -> bool;
 
-    /// Attempt to authenticate the provider.
-    export llm-provider-authenticate: func(provider-id: string) -> result<_, string>;
-
     /// Reset credentials for the provider.
     export llm-provider-reset-credentials: func(provider-id: string) -> result<_, string>;
 

crates/extension_api/wit/since_v0.8.0/llm-provider.wit 🔗

@@ -235,14 +235,6 @@ interface llm-provider {
         cache-read-input-tokens: option<u64>,
     }
 
-    /// Credential types that can be requested.
-    enum credential-type {
-        /// An API key.
-        api-key,
-        /// An OAuth token.
-        oauth-token,
-    }
-
     /// Cache configuration for prompt caching.
     record cache-configuration {
         /// Maximum number of cache anchors.
@@ -296,15 +288,6 @@ interface llm-provider {
         body: string,
     }
 
-    /// Request a credential from the user.
-    /// Returns true if the credential was provided, false if the user cancelled.
-    request-credential: func(
-        provider-id: string,
-        credential-type: credential-type,
-        label: string,
-        placeholder: string
-    ) -> result<bool, string>;
-
     /// Get a stored credential for this provider.
     get-credential: func(provider-id: string) -> option<string>;
 

crates/extension_host/src/wasm_host/llm_provider.rs 🔗

@@ -182,37 +182,9 @@ impl LanguageModelProvider for ExtensionLanguageModelProvider {
         self.state.read(cx).is_authenticated
     }
 
-    fn authenticate(&self, cx: &mut App) -> Task<Result<(), AuthenticateError>> {
-        let extension = self.extension.clone();
-        let provider_id = self.provider_info.id.clone();
-        let state = self.state.clone();
-
-        cx.spawn(async move |cx| {
-            let result = extension
-                .call(|extension, store| {
-                    async move {
-                        extension
-                            .call_llm_provider_authenticate(store, &provider_id)
-                            .await
-                    }
-                    .boxed()
-                })
-                .await;
-
-            match result {
-                Ok(Ok(Ok(()))) => {
-                    cx.update(|cx| {
-                        state.update(cx, |state, _| {
-                            state.is_authenticated = true;
-                        });
-                    })?;
-                    Ok(())
-                }
-                Ok(Ok(Err(e))) => Err(AuthenticateError::Other(anyhow!("{}", e))),
-                Ok(Err(e)) => Err(AuthenticateError::Other(e)),
-                Err(e) => Err(AuthenticateError::Other(e)),
-            }
-        })
+    fn authenticate(&self, _cx: &mut App) -> Task<Result<(), AuthenticateError>> {
+        // Authentication is handled via the configuration view UI
+        Task::ready(Ok(()))
     }
 
     fn configuration_view(

crates/extension_host/src/wasm_host/wit.rs 🔗

@@ -35,16 +35,15 @@ pub use latest::{
     zed::extension::context_server::ContextServerConfiguration,
     zed::extension::llm_provider::{
         CacheConfiguration as LlmCacheConfiguration, CompletionEvent as LlmCompletionEvent,
-        CompletionRequest as LlmCompletionRequest, CredentialType as LlmCredentialType,
-        ImageData as LlmImageData, MessageContent as LlmMessageContent,
-        MessageRole as LlmMessageRole, ModelCapabilities as LlmModelCapabilities,
-        ModelInfo as LlmModelInfo, ProviderInfo as LlmProviderInfo,
-        RequestMessage as LlmRequestMessage, StopReason as LlmStopReason,
-        ThinkingContent as LlmThinkingContent, TokenUsage as LlmTokenUsage,
-        ToolChoice as LlmToolChoice, ToolDefinition as LlmToolDefinition,
-        ToolInputFormat as LlmToolInputFormat, ToolResult as LlmToolResult,
-        ToolResultContent as LlmToolResultContent, ToolUse as LlmToolUse,
-        ToolUseJsonParseError as LlmToolUseJsonParseError,
+        CompletionRequest as LlmCompletionRequest, ImageData as LlmImageData,
+        MessageContent as LlmMessageContent, MessageRole as LlmMessageRole,
+        ModelCapabilities as LlmModelCapabilities, ModelInfo as LlmModelInfo,
+        ProviderInfo as LlmProviderInfo, RequestMessage as LlmRequestMessage,
+        StopReason as LlmStopReason, ThinkingContent as LlmThinkingContent,
+        TokenUsage as LlmTokenUsage, ToolChoice as LlmToolChoice,
+        ToolDefinition as LlmToolDefinition, ToolInputFormat as LlmToolInputFormat,
+        ToolResult as LlmToolResult, ToolResultContent as LlmToolResultContent,
+        ToolUse as LlmToolUse, ToolUseJsonParseError as LlmToolUseJsonParseError,
     },
     zed::extension::lsp::{
         Completion, CompletionKind, CompletionLabelDetails, InsertTextFormat, Symbol, SymbolKind,
@@ -1230,17 +1229,6 @@ impl Extension {
         }
     }
 
-    pub async fn call_llm_provider_authenticate(
-        &self,
-        store: &mut Store<WasmState>,
-        provider_id: &str,
-    ) -> Result<Result<(), String>> {
-        match self {
-            Extension::V0_8_0(ext) => ext.call_llm_provider_authenticate(store, provider_id).await,
-            _ => anyhow::bail!("`llm_provider_authenticate` not available prior to v0.8.0"),
-        }
-    }
-
     pub async fn call_llm_provider_reset_credentials(
         &self,
         store: &mut Store<WasmState>,

crates/extension_host/src/wasm_host/wit/since_v0_8_0.rs 🔗

@@ -1112,20 +1112,6 @@ impl ExtensionImports for WasmState {
 }
 
 impl llm_provider::Host for WasmState {
-    async fn request_credential(
-        &mut self,
-        _provider_id: String,
-        _credential_type: llm_provider::CredentialType,
-        _label: String,
-        _placeholder: String,
-    ) -> wasmtime::Result<Result<bool, String>> {
-        // For now, credential requests return false (not provided)
-        // Extensions should use get_env_var to check for env vars first,
-        // then store_credential/get_credential for manual storage
-        // Full UI credential prompting will be added in a future phase
-        Ok(Ok(false))
-    }
-
     async fn get_credential(&mut self, provider_id: String) -> wasmtime::Result<Option<String>> {
         let extension_id = self.manifest.id.clone();
 

extensions/anthropic/src/anthropic.rs 🔗

@@ -562,20 +562,6 @@ Uses your Anthropic API credits. See [Anthropic pricing](https://www.anthropic.c
         )
     }
 
-    fn llm_provider_authenticate(&mut self, _provider_id: &str) -> Result<(), String> {
-        let provided = llm_request_credential(
-            "anthropic",
-            LlmCredentialType::ApiKey,
-            "Anthropic API Key",
-            "sk-ant-...",
-        )?;
-        if provided {
-            Ok(())
-        } else {
-            Err("Authentication cancelled".to_string())
-        }
-    }
-
     fn llm_provider_reset_credentials(&mut self, _provider_id: &str) -> Result<(), String> {
         llm_delete_credential("anthropic")
     }

extensions/copilot-chat/src/copilot_chat.rs 🔗

@@ -502,20 +502,6 @@ This extension requires an active GitHub Copilot subscription.
         )
     }
 
-    fn llm_provider_authenticate(&mut self, _provider_id: &str) -> Result<(), String> {
-        let provided = llm_request_credential(
-            "copilot_chat",
-            LlmCredentialType::ApiKey,
-            "GitHub Copilot Token",
-            "ghu_...",
-        )?;
-        if provided {
-            Ok(())
-        } else {
-            Err("Authentication cancelled".to_string())
-        }
-    }
-
     fn llm_provider_reset_credentials(&mut self, _provider_id: &str) -> Result<(), String> {
         llm_delete_credential("copilot_chat")
     }

extensions/google-ai/Cargo.lock 🔗

@@ -80,15 +80,6 @@ version = "0.1.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2"
 
-[[package]]
-name = "foogle"
-version = "0.1.0"
-dependencies = [
- "serde",
- "serde_json",
- "zed_extension_api",
-]
-
 [[package]]
 name = "form_urlencoded"
 version = "1.2.2"
@@ -187,6 +178,15 @@ dependencies = [
  "slab",
 ]
 
+[[package]]
+name = "google-ai"
+version = "0.1.0"
+dependencies = [
+ "serde",
+ "serde_json",
+ "zed_extension_api",
+]
+
 [[package]]
 name = "hashbrown"
 version = "0.15.5"
@@ -761,7 +761,7 @@ dependencies = [
 
 [[package]]
 name = "zed_extension_api"
-version = "0.7.0"
+version = "0.8.0"
 dependencies = [
  "serde",
  "serde_json",

extensions/google-ai/src/google_ai.rs 🔗

@@ -633,20 +633,6 @@ Uses your Google AI API credits. See [Google AI pricing](https://ai.google.dev/p
         )
     }
 
-    fn llm_provider_authenticate(&mut self, _provider_id: &str) -> Result<(), String> {
-        let provided = llm_request_credential(
-            "google-ai",
-            LlmCredentialType::ApiKey,
-            "Google AI API Key",
-            "AIza...",
-        )?;
-        if provided {
-            Ok(())
-        } else {
-            Err("Authentication cancelled".to_string())
-        }
-    }
-
     fn llm_provider_reset_credentials(&mut self, _provider_id: &str) -> Result<(), String> {
         llm_delete_credential("google-ai")
     }

extensions/open-router/Cargo.lock 🔗

@@ -366,7 +366,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d"
 
 [[package]]
-name = "open_router"
+name = "open-router"
 version = "0.1.0"
 dependencies = [
  "serde",

extensions/open-router/src/open_router.rs 🔗

@@ -639,20 +639,6 @@ Pay-per-use based on model. See [openrouter.ai/models](https://openrouter.ai/mod
         )
     }
 
-    fn llm_provider_authenticate(&mut self, _provider_id: &str) -> Result<(), String> {
-        let provided = llm_request_credential(
-            "open_router",
-            LlmCredentialType::ApiKey,
-            "OpenRouter API Key",
-            "sk-or-v1-...",
-        )?;
-        if provided {
-            Ok(())
-        } else {
-            Err("Authentication cancelled".to_string())
-        }
-    }
-
     fn llm_provider_reset_credentials(&mut self, _provider_id: &str) -> Result<(), String> {
         llm_delete_credential("open_router")
     }

extensions/openai/Cargo.lock 🔗

@@ -80,15 +80,6 @@ version = "0.1.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2"
 
-[[package]]
-name = "fopenai"
-version = "0.1.0"
-dependencies = [
- "serde",
- "serde_json",
- "zed_extension_api",
-]
-
 [[package]]
 name = "form_urlencoded"
 version = "1.2.2"
@@ -374,6 +365,15 @@ version = "1.21.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d"
 
+[[package]]
+name = "openai"
+version = "0.1.0"
+dependencies = [
+ "serde",
+ "serde_json",
+ "zed_extension_api",
+]
+
 [[package]]
 name = "percent-encoding"
 version = "2.3.2"
@@ -761,7 +761,7 @@ dependencies = [
 
 [[package]]
 name = "zed_extension_api"
-version = "0.7.0"
+version = "0.8.0"
 dependencies = [
  "serde",
  "serde_json",

extensions/openai/src/openai.rs 🔗

@@ -515,20 +515,6 @@ Uses your OpenAI API credits. See [OpenAI pricing](https://openai.com/pricing) f
         )
     }
 
-    fn llm_provider_authenticate(&mut self, _provider_id: &str) -> Result<(), String> {
-        let provided = llm_request_credential(
-            "openai",
-            LlmCredentialType::ApiKey,
-            "OpenAI API Key",
-            "sk-...",
-        )?;
-        if provided {
-            Ok(())
-        } else {
-            Err("Authentication cancelled".to_string())
-        }
-    }
-
     fn llm_provider_reset_credentials(&mut self, _provider_id: &str) -> Result<(), String> {
         llm_delete_credential("openai")
     }