From f6fd673dbcf3c51b2b54ec0520061c85faca1b93 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tom=20Houl=C3=A9?= <13155277+tomhoule@users.noreply.github.com> Date: Wed, 18 Mar 2026 15:23:47 +0100 Subject: [PATCH] language_models: Apply JSON schema subset transform to xAI (#51835) The conversations on Grok 4 were failing right away on the first message in a conversation when using Zed provided model directly, but not the xAI API directly. The reason we were seeing this is that when sending the JSON schemas for built-in tools to the cloud API, we didn't use the transform we use for xAI in a BYOK context. The xAI API doesn't support the full JSON schema spec, and specifically for the spawn_agent tool, we were generating a `oneOf` type for a field. Applying that transformation in the cloud case too fixes the issue. Release Notes: - Fixed broken (failing on first message) xAI models over the Zed provider on profiles using the spawn_agent tool, including the default Ask and Write profiles. Co-authored-by: Neel --- crates/language_models/src/provider/cloud.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/language_models/src/provider/cloud.rs b/crates/language_models/src/provider/cloud.rs index f2570e6516a9a69811bec726097e6318d9ede04b..1e68ad1971410445c8df731b6d7bae4243074cfe 100644 --- a/crates/language_models/src/provider/cloud.rs +++ b/crates/language_models/src/provider/cloud.rs @@ -641,11 +641,11 @@ impl LanguageModel for CloudLanguageModel { fn tool_input_format(&self) -> LanguageModelToolSchemaFormat { match self.model.provider { cloud_llm_client::LanguageModelProvider::Anthropic - | cloud_llm_client::LanguageModelProvider::OpenAi - | cloud_llm_client::LanguageModelProvider::XAi => { + | cloud_llm_client::LanguageModelProvider::OpenAi => { LanguageModelToolSchemaFormat::JsonSchema } - cloud_llm_client::LanguageModelProvider::Google => { + cloud_llm_client::LanguageModelProvider::Google + | cloud_llm_client::LanguageModelProvider::XAi => { LanguageModelToolSchemaFormat::JsonSchemaSubset } }