language_models: Handle empty tool call arguments consistently (#48958)

Daniel Strobusch created

Normalize handling of empty tool call arguments across all LLM
providers. Many providers return empty strings for tool calls with no
arguments, which would previously fail JSON parsing.

- Created shared parse_tool_arguments() helper in provider/util.rs that
treats empty strings as empty JSON objects ({})
- Refactored 10 occurrences across 9 provider files to use the helper
- Ensures consistent behavior across all providers (anthropic, bedrock,
copilot_chat, deepseek, lmstudio, mistral, open_ai, open_router)

Closes: #48955

Release Notes:

- Fixed tool calls with no arguments failing when using certain LLM
providers

Change summary

crates/language_models/src/provider.rs              |  1 
crates/language_models/src/provider/anthropic.rs    |  9 
crates/language_models/src/provider/bedrock.rs      | 10 -
crates/language_models/src/provider/copilot_chat.rs | 19 --
crates/language_models/src/provider/deepseek.rs     |  5 
crates/language_models/src/provider/lmstudio.rs     |  4 
crates/language_models/src/provider/mistral.rs      |  5 
crates/language_models/src/provider/open_ai.rs      | 94 +++++++++++---
crates/language_models/src/provider/open_router.rs  |  5 
crates/language_models/src/provider/util.rs         | 13 ++
10 files changed, 106 insertions(+), 59 deletions(-)

Detailed changes

crates/language_models/src/provider/anthropic.rs 🔗

@@ -24,6 +24,8 @@ use ui::{ButtonLink, ConfiguredApiCard, List, ListBulletItem, prelude::*};
 use ui_input::InputField;
 use util::ResultExt;
 
+use crate::provider::util::parse_tool_arguments;
+
 pub use settings::AnthropicAvailableModel as AvailableModel;
 
 const PROVIDER_ID: LanguageModelProviderId = language_model::ANTHROPIC_PROVIDER_ID;
@@ -829,12 +831,7 @@ impl AnthropicEventMapper {
             Event::ContentBlockStop { index } => {
                 if let Some(tool_use) = self.tool_uses_by_index.remove(&index) {
                     let input_json = tool_use.input_json.trim();
-                    let input_value = if input_json.is_empty() {
-                        Ok(serde_json::Value::Object(serde_json::Map::default()))
-                    } else {
-                        serde_json::Value::from_str(input_json)
-                    };
-                    let event_result = match input_value {
+                    let event_result = match parse_tool_arguments(input_json) {
                         Ok(input) => Ok(LanguageModelCompletionEvent::ToolUse(
                             LanguageModelToolUse {
                                 id: tool_use.id.into(),

crates/language_models/src/provider/bedrock.rs 🔗

@@ -1,5 +1,4 @@
 use std::pin::Pin;
-use std::str::FromStr;
 use std::sync::Arc;
 
 use anyhow::{Context as _, Result, anyhow};
@@ -48,6 +47,7 @@ use ui_input::InputField;
 use util::ResultExt;
 
 use crate::AllLanguageModelSettings;
+use crate::provider::util::parse_tool_arguments;
 
 actions!(bedrock, [Tab, TabPrev]);
 
@@ -1099,12 +1099,8 @@ pub fn map_to_language_model_completion_events(
                             .tool_uses_by_index
                             .remove(&cb_stop.content_block_index)
                             .map(|tool_use| {
-                                let input = if tool_use.input_json.is_empty() {
-                                    Value::Null
-                                } else {
-                                    serde_json::Value::from_str(&tool_use.input_json)
-                                        .unwrap_or(Value::Null)
-                                };
+                                let input = parse_tool_arguments(&tool_use.input_json)
+                                    .unwrap_or_else(|_| Value::Object(Default::default()));
 
                                 Ok(LanguageModelCompletionEvent::ToolUse(
                                     LanguageModelToolUse {

crates/language_models/src/provider/copilot_chat.rs 🔗

@@ -30,6 +30,8 @@ use settings::SettingsStore;
 use ui::prelude::*;
 use util::debug_panic;
 
+use crate::provider::util::parse_tool_arguments;
+
 const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("copilot_chat");
 const PROVIDER_NAME: LanguageModelProviderName =
     LanguageModelProviderName::new("GitHub Copilot Chat");
@@ -493,17 +495,9 @@ pub fn map_to_language_model_completion_events(
                                 }
 
                                 events.extend(state.tool_calls_by_index.drain().map(
-                                    |(_, tool_call)| {
-                                        // The model can output an empty string
-                                        // to indicate the absence of arguments.
-                                        // When that happens, create an empty
-                                        // object instead.
-                                        let arguments = if tool_call.arguments.is_empty() {
-                                            Ok(serde_json::Value::Object(Default::default()))
-                                        } else {
-                                            serde_json::Value::from_str(&tool_call.arguments)
-                                        };
-                                        match arguments {
+                                    |(_, tool_call)| match parse_tool_arguments(
+                                        &tool_call.arguments,
+                                    ) {
                                         Ok(input) => Ok(LanguageModelCompletionEvent::ToolUse(
                                             LanguageModelToolUse {
                                                 id: tool_call.id.into(),
@@ -522,7 +516,6 @@ pub fn map_to_language_model_completion_events(
                                                 json_parse_error: error.to_string(),
                                             },
                                         ),
-                                    }
                                     },
                                 ));
 
@@ -607,7 +600,7 @@ impl CopilotResponsesEventMapper {
                     ..
                 } => {
                     let mut events = Vec::new();
-                    match serde_json::from_str::<serde_json::Value>(&arguments) {
+                    match parse_tool_arguments(&arguments) {
                         Ok(input) => events.push(Ok(LanguageModelCompletionEvent::ToolUse(
                             LanguageModelToolUse {
                                 id: call_id.into(),

crates/language_models/src/provider/deepseek.rs 🔗

@@ -16,13 +16,14 @@ use language_model::{
 pub use settings::DeepseekAvailableModel as AvailableModel;
 use settings::{Settings, SettingsStore};
 use std::pin::Pin;
-use std::str::FromStr;
 use std::sync::{Arc, LazyLock};
 
 use ui::{ButtonLink, ConfiguredApiCard, List, ListBulletItem, prelude::*};
 use ui_input::InputField;
 use util::ResultExt;
 
+use crate::provider::util::parse_tool_arguments;
+
 const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("deepseek");
 const PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("DeepSeek");
 
@@ -486,7 +487,7 @@ impl DeepSeekEventMapper {
             }
             Some("tool_calls") => {
                 events.extend(self.tool_calls_by_index.drain().map(|(_, tool_call)| {
-                    match serde_json::Value::from_str(&tool_call.arguments) {
+                    match parse_tool_arguments(&tool_call.arguments) {
                         Ok(input) => Ok(LanguageModelCompletionEvent::ToolUse(
                             LanguageModelToolUse {
                                 id: tool_call.id.clone().into(),

crates/language_models/src/provider/lmstudio.rs 🔗

@@ -18,12 +18,12 @@ use lmstudio::{ModelType, get_models};
 pub use settings::LmStudioAvailableModel as AvailableModel;
 use settings::{Settings, SettingsStore};
 use std::pin::Pin;
-use std::str::FromStr;
 use std::{collections::BTreeMap, sync::Arc};
 use ui::{ButtonLike, Indicator, List, ListBulletItem, prelude::*};
 use util::ResultExt;
 
 use crate::AllLanguageModelSettings;
+use crate::provider::util::parse_tool_arguments;
 
 const LMSTUDIO_DOWNLOAD_URL: &str = "https://lmstudio.ai/download";
 const LMSTUDIO_CATALOG_URL: &str = "https://lmstudio.ai/models";
@@ -558,7 +558,7 @@ impl LmStudioEventMapper {
             }
             Some("tool_calls") => {
                 events.extend(self.tool_calls_by_index.drain().map(|(_, tool_call)| {
-                    match serde_json::Value::from_str(&tool_call.arguments) {
+                    match parse_tool_arguments(&tool_call.arguments) {
                         Ok(input) => Ok(LanguageModelCompletionEvent::ToolUse(
                             LanguageModelToolUse {
                                 id: tool_call.id.into(),

crates/language_models/src/provider/mistral.rs 🔗

@@ -16,13 +16,14 @@ pub use settings::MistralAvailableModel as AvailableModel;
 use settings::{Settings, SettingsStore};
 use std::collections::HashMap;
 use std::pin::Pin;
-use std::str::FromStr;
 use std::sync::{Arc, LazyLock};
 use strum::IntoEnumIterator;
 use ui::{ButtonLink, ConfiguredApiCard, List, ListBulletItem, prelude::*};
 use ui_input::InputField;
 use util::ResultExt;
 
+use crate::provider::util::parse_tool_arguments;
+
 const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("mistral");
 const PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("Mistral");
 
@@ -659,7 +660,7 @@ impl MistralEventMapper {
                 continue;
             }
 
-            match serde_json::Value::from_str(&tool_call.arguments) {
+            match parse_tool_arguments(&tool_call.arguments) {
                 Ok(input) => results.push(Ok(LanguageModelCompletionEvent::ToolUse(
                     LanguageModelToolUse {
                         id: tool_call.id.into(),

crates/language_models/src/provider/open_ai.rs 🔗

@@ -28,13 +28,14 @@ use open_ai::{
 };
 use settings::{OpenAiAvailableModel as AvailableModel, Settings, SettingsStore};
 use std::pin::Pin;
-use std::str::FromStr as _;
 use std::sync::{Arc, LazyLock};
 use strum::IntoEnumIterator;
 use ui::{ButtonLink, ConfiguredApiCard, List, ListBulletItem, prelude::*};
 use ui_input::InputField;
 use util::ResultExt;
 
+use crate::provider::util::parse_tool_arguments;
+
 const PROVIDER_ID: LanguageModelProviderId = language_model::OPEN_AI_PROVIDER_ID;
 const PROVIDER_NAME: LanguageModelProviderName = language_model::OPEN_AI_PROVIDER_NAME;
 
@@ -831,7 +832,7 @@ impl OpenAiEventMapper {
             }
             Some("tool_calls") => {
                 events.extend(self.tool_calls_by_index.drain().map(|(_, tool_call)| {
-                    match serde_json::Value::from_str(&tool_call.arguments) {
+                    match parse_tool_arguments(&tool_call.arguments) {
                         Ok(input) => Ok(LanguageModelCompletionEvent::ToolUse(
                             LanguageModelToolUse {
                                 id: tool_call.id.clone().into(),
@@ -963,7 +964,7 @@ impl OpenAiResponseEventMapper {
                     }
                     let raw_input = entry.arguments.clone();
                     self.pending_stop_reason = Some(StopReason::ToolUse);
-                    match serde_json::from_str::<serde_json::Value>(&entry.arguments) {
+                    match parse_tool_arguments(&entry.arguments) {
                         Ok(input) => {
                             vec![Ok(LanguageModelCompletionEvent::ToolUse(
                                 LanguageModelToolUse {
@@ -1087,29 +1088,27 @@ impl OpenAiResponseEventMapper {
                 };
                 let name: Arc<str> = Arc::from(function_call.name.clone().unwrap_or_default());
                 let arguments = &function_call.arguments;
-                if !arguments.is_empty() {
-                    self.pending_stop_reason = Some(StopReason::ToolUse);
-                    match serde_json::from_str::<serde_json::Value>(arguments) {
-                        Ok(input) => {
-                            events.push(Ok(LanguageModelCompletionEvent::ToolUse(
-                                LanguageModelToolUse {
-                                    id: LanguageModelToolUseId::from(call_id.clone()),
-                                    name: name.clone(),
-                                    is_input_complete: true,
-                                    input,
-                                    raw_input: arguments.clone(),
-                                    thought_signature: None,
-                                },
-                            )));
-                        }
-                        Err(error) => {
-                            events.push(Ok(LanguageModelCompletionEvent::ToolUseJsonParseError {
+                self.pending_stop_reason = Some(StopReason::ToolUse);
+                match parse_tool_arguments(arguments) {
+                    Ok(input) => {
+                        events.push(Ok(LanguageModelCompletionEvent::ToolUse(
+                            LanguageModelToolUse {
                                 id: LanguageModelToolUseId::from(call_id.clone()),
-                                tool_name: name.clone(),
-                                raw_input: Arc::<str>::from(arguments.clone()),
-                                json_parse_error: error.to_string(),
-                            }));
-                        }
+                                name: name.clone(),
+                                is_input_complete: true,
+                                input,
+                                raw_input: arguments.clone(),
+                                thought_signature: None,
+                            },
+                        )));
+                    }
+                    Err(error) => {
+                        events.push(Ok(LanguageModelCompletionEvent::ToolUseJsonParseError {
+                            id: LanguageModelToolUseId::from(call_id.clone()),
+                            tool_name: name.clone(),
+                            raw_input: Arc::<str>::from(arguments.clone()),
+                            json_parse_error: error.to_string(),
+                        }));
                     }
                 }
             }
@@ -1928,4 +1927,49 @@ mod tests {
             LanguageModelCompletionEvent::Stop(StopReason::MaxTokens)
         ));
     }
+
+    #[test]
+    fn responses_stream_handles_empty_tool_arguments() {
+        // Test that tools with no arguments (empty string) are handled correctly
+        let events = vec![
+            ResponsesStreamEvent::OutputItemAdded {
+                output_index: 0,
+                sequence_number: None,
+                item: response_item_function_call("item_fn", Some("")),
+            },
+            ResponsesStreamEvent::FunctionCallArgumentsDone {
+                item_id: "item_fn".into(),
+                output_index: 0,
+                arguments: "".into(),
+                sequence_number: None,
+            },
+            ResponsesStreamEvent::Completed {
+                response: ResponseSummary::default(),
+            },
+        ];
+
+        let mapped = map_response_events(events);
+        assert_eq!(mapped.len(), 2);
+
+        // Should produce a ToolUse event with an empty object
+        assert!(matches!(
+            &mapped[0],
+            LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse {
+                id,
+                name,
+                raw_input,
+                input,
+                ..
+            }) if id.to_string() == "call_123"
+                && name.as_ref() == "get_weather"
+                && raw_input == ""
+                && input.is_object()
+                && input.as_object().unwrap().is_empty()
+        ));
+
+        assert!(matches!(
+            mapped[1],
+            LanguageModelCompletionEvent::Stop(StopReason::ToolUse)
+        ));
+    }
 }

crates/language_models/src/provider/open_router.rs 🔗

@@ -16,12 +16,13 @@ use open_router::{
 };
 use settings::{OpenRouterAvailableModel as AvailableModel, Settings, SettingsStore};
 use std::pin::Pin;
-use std::str::FromStr as _;
 use std::sync::{Arc, LazyLock};
 use ui::{ButtonLink, ConfiguredApiCard, List, ListBulletItem, prelude::*};
 use ui_input::InputField;
 use util::ResultExt;
 
+use crate::provider::util::parse_tool_arguments;
+
 const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("openrouter");
 const PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("OpenRouter");
 
@@ -657,7 +658,7 @@ impl OpenRouterEventMapper {
             }
             Some("tool_calls") => {
                 events.extend(self.tool_calls_by_index.drain().map(|(_, tool_call)| {
-                    match serde_json::Value::from_str(&tool_call.arguments) {
+                    match parse_tool_arguments(&tool_call.arguments) {
                         Ok(input) => Ok(LanguageModelCompletionEvent::ToolUse(
                             LanguageModelToolUse {
                                 id: tool_call.id.clone().into(),

crates/language_models/src/provider/util.rs 🔗

@@ -0,0 +1,13 @@
+use std::str::FromStr;
+
+/// Parses tool call arguments JSON, treating empty strings as empty objects.
+///
+/// Many LLM providers return empty strings for tool calls with no arguments.
+/// This helper normalizes that behavior by converting empty strings to `{}`.
+pub fn parse_tool_arguments(arguments: &str) -> Result<serde_json::Value, serde_json::Error> {
+    if arguments.is_empty() {
+        Ok(serde_json::Value::Object(Default::default()))
+    } else {
+        serde_json::Value::from_str(arguments)
+    }
+}