ollama: Remove backwards compatibility for Ollama tool call IDs (#49246)

Xiaobo Liu created

Release Notes:

- N/A

Signed-off-by: Xiaobo Liu <cppcoffee@gmail.com>

Change summary

crates/language_models/src/provider/ollama.rs | 13 ----
crates/ollama/src/ollama.rs                   | 54 --------------------
2 files changed, 2 insertions(+), 65 deletions(-)

Detailed changes

crates/language_models/src/provider/ollama.rs 🔗

@@ -20,7 +20,6 @@ pub use settings::OllamaAvailableModel as AvailableModel;
 use settings::{Settings, SettingsStore, update_settings_file};
 use std::pin::Pin;
 use std::sync::LazyLock;
-use std::sync::atomic::{AtomicU64, Ordering};
 use std::{collections::HashMap, sync::Arc};
 use ui::{
     ButtonLike, ButtonLink, ConfiguredApiCard, ElevationIndex, List, ListBulletItem, Tooltip,
@@ -368,7 +367,7 @@ impl OllamaLanguageModel {
                             }
                             MessageContent::ToolUse(tool_use) => {
                                 tool_calls.push(OllamaToolCall {
-                                    id: Some(tool_use.id.to_string()),
+                                    id: tool_use.id.to_string(),
                                     function: OllamaFunctionCall {
                                         name: tool_use.name.to_string(),
                                         arguments: tool_use.input,
@@ -514,9 +513,6 @@ impl LanguageModel for OllamaLanguageModel {
 fn map_to_language_model_completion_events(
     stream: Pin<Box<dyn Stream<Item = anyhow::Result<ChatResponseDelta>> + Send>>,
 ) -> impl Stream<Item = Result<LanguageModelCompletionEvent, LanguageModelCompletionError>> {
-    // Used for creating unique tool use ids
-    static TOOL_CALL_COUNTER: AtomicU64 = AtomicU64::new(0);
-
     struct State {
         stream: Pin<Box<dyn Stream<Item = anyhow::Result<ChatResponseDelta>> + Send>>,
         used_tools: bool,
@@ -567,13 +563,6 @@ fn map_to_language_model_completion_events(
 
                     if let Some(tool_call) = tool_calls.and_then(|v| v.into_iter().next()) {
                         let OllamaToolCall { id, function } = tool_call;
-                        let id = id.unwrap_or_else(|| {
-                            format!(
-                                "{}-{}",
-                                &function.name,
-                                TOOL_CALL_COUNTER.fetch_add(1, Ordering::Relaxed)
-                            )
-                        });
                         let event = LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse {
                             id: LanguageModelToolUseId::from(id),
                             name: Arc::from(function.name),

crates/ollama/src/ollama.rs 🔗

@@ -86,9 +86,7 @@ pub enum ChatMessage {
 
 #[derive(Serialize, Deserialize, Debug)]
 pub struct OllamaToolCall {
-    // TODO: Remove `Option` after most users have updated to Ollama v0.12.10,
-    // which was released on the 4th of November 2025
-    pub id: Option<String>,
+    pub id: String,
     pub function: OllamaFunctionCall,
 }
 
@@ -465,56 +463,6 @@ mod tests {
         }
     }
 
-    // Backwards compatibility with Ollama versions prior to v0.12.10 November 2025
-    // This test is a copy of `parse_tool_call()` with the `id` field omitted.
-    #[test]
-    fn parse_tool_call_pre_0_12_10() {
-        let response = serde_json::json!({
-            "model": "llama3.2:3b",
-            "created_at": "2025-04-28T20:02:02.140489Z",
-            "message": {
-                "role": "assistant",
-                "content": "",
-                "tool_calls": [
-                    {
-                        "function": {
-                            "name": "weather",
-                            "arguments": {
-                                "city": "london",
-                            }
-                        }
-                    }
-                ]
-            },
-            "done_reason": "stop",
-            "done": true,
-            "total_duration": 2758629166u64,
-            "load_duration": 1770059875,
-            "prompt_eval_count": 147,
-            "prompt_eval_duration": 684637583,
-            "eval_count": 16,
-            "eval_duration": 302561917,
-        });
-
-        let result: ChatResponseDelta = serde_json::from_value(response).unwrap();
-        match result.message {
-            ChatMessage::Assistant {
-                content,
-                tool_calls: Some(tool_calls),
-                images: _,
-                thinking,
-            } => {
-                assert!(content.is_empty());
-                assert!(thinking.is_none());
-
-                // When the `Option` around `id` is removed, this test should complain
-                // and be subsequently deleted in favor of `parse_tool_call()`
-                assert!(tool_calls.first().is_some_and(|call| call.id.is_none()))
-            }
-            _ => panic!("Deserialized wrong role"),
-        }
-    }
-
     #[test]
     fn parse_show_model() {
         let response = serde_json::json!({