Try using ollama qwen

Agus Zubiaga created

Change summary

crates/language_model/src/language_model.rs   |  4 ++++
crates/language_models/src/provider/ollama.rs | 14 ++++++--------
crates/zeta2/src/related_excerpts.rs          |  5 +++--
3 files changed, 13 insertions(+), 10 deletions(-)

Detailed changes

crates/language_model/src/language_model.rs 🔗

@@ -56,6 +56,10 @@ pub const ZED_CLOUD_PROVIDER_ID: LanguageModelProviderId = LanguageModelProvider
 pub const ZED_CLOUD_PROVIDER_NAME: LanguageModelProviderName =
     LanguageModelProviderName::new("Zed");
 
+pub const OLLAMA_PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("ollama");
+pub const OLLAMA_PROVIDER_NAME: LanguageModelProviderName =
+    LanguageModelProviderName::new("Ollama");
+
 pub fn init(client: Arc<Client>, cx: &mut App) {
     init_settings(cx);
     RefreshLlmTokenListener::register(client, cx);

crates/language_models/src/provider/ollama.rs 🔗

@@ -9,7 +9,8 @@ use language_model::{
     LanguageModelId, LanguageModelName, LanguageModelProvider, LanguageModelProviderId,
     LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest,
     LanguageModelRequestTool, LanguageModelToolChoice, LanguageModelToolUse,
-    LanguageModelToolUseId, MessageContent, RateLimiter, Role, StopReason, TokenUsage,
+    LanguageModelToolUseId, MessageContent, OLLAMA_PROVIDER_ID, OLLAMA_PROVIDER_NAME, RateLimiter,
+    Role, StopReason, TokenUsage,
 };
 use menu;
 use ollama::{
@@ -34,9 +35,6 @@ const OLLAMA_DOWNLOAD_URL: &str = "https://ollama.com/download";
 const OLLAMA_LIBRARY_URL: &str = "https://ollama.com/library";
 const OLLAMA_SITE: &str = "https://ollama.com/";
 
-const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("ollama");
-const PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("Ollama");
-
 const API_KEY_ENV_VAR_NAME: &str = "OLLAMA_API_KEY";
 static API_KEY_ENV_VAR: LazyLock<EnvVar> = env_var!(API_KEY_ENV_VAR_NAME);
 
@@ -216,11 +214,11 @@ impl LanguageModelProviderState for OllamaLanguageModelProvider {
 
 impl LanguageModelProvider for OllamaLanguageModelProvider {
     fn id(&self) -> LanguageModelProviderId {
-        PROVIDER_ID
+        OLLAMA_PROVIDER_ID
     }
 
     fn name(&self) -> LanguageModelProviderName {
-        PROVIDER_NAME
+        OLLAMA_PROVIDER_NAME
     }
 
     fn icon(&self) -> IconName {
@@ -439,11 +437,11 @@ impl LanguageModel for OllamaLanguageModel {
     }
 
     fn provider_id(&self) -> LanguageModelProviderId {
-        PROVIDER_ID
+        OLLAMA_PROVIDER_ID
     }
 
     fn provider_name(&self) -> LanguageModelProviderName {
-        PROVIDER_NAME
+        OLLAMA_PROVIDER_NAME
     }
 
     fn supports_tools(&self) -> bool {

crates/zeta2/src/related_excerpts.rs 🔗

@@ -135,8 +135,8 @@ pub fn find_related_excerpts<'a>(
         .read(cx)
         .available_models(cx)
         .find(|model| {
-            model.provider_id() == language_model::ANTHROPIC_PROVIDER_ID
-                && model.id() == LanguageModelId("claude-haiku-4-5-latest".into())
+            model.provider_id() == language_model::OLLAMA_PROVIDER_ID
+                && model.id() == LanguageModelId("qwen3:8b".into())
         })
     else {
         return Task::ready(Err(anyhow!("could not find claude model")));
@@ -539,6 +539,7 @@ async fn request_tool_call<T: JsonSchema>(
                 .to_string(),
             input_schema: serde_json::to_value(schema).unwrap(),
         }],
+        thinking_allowed: false,
         ..Default::default()
     };