@@ -381,10 +381,13 @@ impl OllamaLanguageModel {
thinking = Some(text)
}
MessageContent::ToolUse(tool_use) => {
- tool_calls.push(OllamaToolCall::Function(OllamaFunctionCall {
- name: tool_use.name.to_string(),
- arguments: tool_use.input,
- }));
+ tool_calls.push(OllamaToolCall {
+ id: Some(tool_use.id.to_string()),
+ function: OllamaFunctionCall {
+ name: tool_use.name.to_string(),
+ arguments: tool_use.input,
+ },
+ });
}
_ => (),
}
@@ -575,25 +578,23 @@ fn map_to_language_model_completion_events(
}
if let Some(tool_call) = tool_calls.and_then(|v| v.into_iter().next()) {
- match tool_call {
- OllamaToolCall::Function(function) => {
- let tool_id = format!(
- "{}-{}",
- &function.name,
- TOOL_CALL_COUNTER.fetch_add(1, Ordering::Relaxed)
- );
- let event =
- LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse {
- id: LanguageModelToolUseId::from(tool_id),
- name: Arc::from(function.name),
- raw_input: function.arguments.to_string(),
- input: function.arguments,
- is_input_complete: true,
- });
- events.push(Ok(event));
- state.used_tools = true;
- }
- }
+ let OllamaToolCall { id, function } = tool_call;
+ let id = id.unwrap_or_else(|| {
+ format!(
+ "{}-{}",
+ &function.name,
+ TOOL_CALL_COUNTER.fetch_add(1, Ordering::Relaxed)
+ )
+ });
+ let event = LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse {
+ id: LanguageModelToolUseId::from(id),
+ name: Arc::from(function.name),
+ raw_input: function.arguments.to_string(),
+ input: function.arguments,
+ is_input_complete: true,
+ });
+ events.push(Ok(event));
+ state.used_tools = true;
} else if !content.is_empty() {
events.push(Ok(LanguageModelCompletionEvent::Text(content)));
}
@@ -102,9 +102,11 @@ pub enum ChatMessage {
}
#[derive(Serialize, Deserialize, Debug)]
-#[serde(rename_all = "lowercase")]
-pub enum OllamaToolCall {
- Function(OllamaFunctionCall),
+pub struct OllamaToolCall {
+ // TODO: Remove `Option` after most users have updated to Ollama v0.12.10,
+ // which was released on the 4th of November 2025
+ pub id: Option<String>,
+ pub function: OllamaFunctionCall,
}
#[derive(Serialize, Deserialize, Debug)]
@@ -444,6 +446,7 @@ mod tests {
"content": "",
"tool_calls": [
{
+ "id": "call_llama3.2:3b_145155",
"function": {
"name": "weather",
"arguments": {
@@ -479,6 +482,56 @@ mod tests {
}
}
+ // Backwards compatibility with Ollama versions prior to v0.12.10 November 2025
+ // This test is a copy of `parse_tool_call()` with the `id` field omitted.
+ #[test]
+ fn parse_tool_call_pre_0_12_10() {
+ let response = serde_json::json!({
+ "model": "llama3.2:3b",
+ "created_at": "2025-04-28T20:02:02.140489Z",
+ "message": {
+ "role": "assistant",
+ "content": "",
+ "tool_calls": [
+ {
+ "function": {
+ "name": "weather",
+ "arguments": {
+ "city": "london",
+ }
+ }
+ }
+ ]
+ },
+ "done_reason": "stop",
+ "done": true,
+ "total_duration": 2758629166u64,
+ "load_duration": 1770059875,
+ "prompt_eval_count": 147,
+ "prompt_eval_duration": 684637583,
+ "eval_count": 16,
+ "eval_duration": 302561917,
+ });
+
+ let result: ChatResponseDelta = serde_json::from_value(response).unwrap();
+ match result.message {
+ ChatMessage::Assistant {
+ content,
+ tool_calls: Some(tool_calls),
+ images: _,
+ thinking,
+ } => {
+ assert!(content.is_empty());
+ assert!(thinking.is_none());
+
+ // When the `Option` around `id` is removed, this test should complain
+ // and be subsequently deleted in favor of `parse_tool_call()`
+ assert!(tool_calls.first().is_some_and(|call| call.id.is_none()))
+ }
+ _ => panic!("Deserialized wrong role"),
+ }
+ }
+
#[test]
fn parse_show_model() {
let response = serde_json::json!({