diff --git a/crates/language_model/src/language_model.rs b/crates/language_model/src/language_model.rs index 24f9b84afcfa7b9a40b4a1b7684e9a9b036a5a85..5f4ecdd43257e601585ad87970093b3786a7901a 100644 --- a/crates/language_model/src/language_model.rs +++ b/crates/language_model/src/language_model.rs @@ -56,6 +56,10 @@ pub const ZED_CLOUD_PROVIDER_ID: LanguageModelProviderId = LanguageModelProvider pub const ZED_CLOUD_PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("Zed"); +pub const OLLAMA_PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("ollama"); +pub const OLLAMA_PROVIDER_NAME: LanguageModelProviderName = + LanguageModelProviderName::new("Ollama"); + pub fn init(client: Arc, cx: &mut App) { init_settings(cx); RefreshLlmTokenListener::register(client, cx); diff --git a/crates/language_models/src/provider/ollama.rs b/crates/language_models/src/provider/ollama.rs index 2150966c1af0fdb1bdcc028cba67bcb7b7cbf89f..3bcb2f1ab8d68553360b87bf5a85bc1f3c9fdd1f 100644 --- a/crates/language_models/src/provider/ollama.rs +++ b/crates/language_models/src/provider/ollama.rs @@ -9,7 +9,8 @@ use language_model::{ LanguageModelId, LanguageModelName, LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest, LanguageModelRequestTool, LanguageModelToolChoice, LanguageModelToolUse, - LanguageModelToolUseId, MessageContent, RateLimiter, Role, StopReason, TokenUsage, + LanguageModelToolUseId, MessageContent, OLLAMA_PROVIDER_ID, OLLAMA_PROVIDER_NAME, RateLimiter, + Role, StopReason, TokenUsage, }; use menu; use ollama::{ @@ -34,9 +35,6 @@ const OLLAMA_DOWNLOAD_URL: &str = "https://ollama.com/download"; const OLLAMA_LIBRARY_URL: &str = "https://ollama.com/library"; const OLLAMA_SITE: &str = "https://ollama.com/"; -const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("ollama"); -const PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("Ollama"); - const API_KEY_ENV_VAR_NAME: &str = "OLLAMA_API_KEY"; static API_KEY_ENV_VAR: LazyLock = env_var!(API_KEY_ENV_VAR_NAME); @@ -216,11 +214,11 @@ impl LanguageModelProviderState for OllamaLanguageModelProvider { impl LanguageModelProvider for OllamaLanguageModelProvider { fn id(&self) -> LanguageModelProviderId { - PROVIDER_ID + OLLAMA_PROVIDER_ID } fn name(&self) -> LanguageModelProviderName { - PROVIDER_NAME + OLLAMA_PROVIDER_NAME } fn icon(&self) -> IconName { @@ -439,11 +437,11 @@ impl LanguageModel for OllamaLanguageModel { } fn provider_id(&self) -> LanguageModelProviderId { - PROVIDER_ID + OLLAMA_PROVIDER_ID } fn provider_name(&self) -> LanguageModelProviderName { - PROVIDER_NAME + OLLAMA_PROVIDER_NAME } fn supports_tools(&self) -> bool { diff --git a/crates/zeta2/src/related_excerpts.rs b/crates/zeta2/src/related_excerpts.rs index 7434dbed9e48bb2dcf98131177dc65b2f3930094..f3798f0aa59fbc02ed9bb018dbb5a81e799065c9 100644 --- a/crates/zeta2/src/related_excerpts.rs +++ b/crates/zeta2/src/related_excerpts.rs @@ -135,8 +135,8 @@ pub fn find_related_excerpts<'a>( .read(cx) .available_models(cx) .find(|model| { - model.provider_id() == language_model::ANTHROPIC_PROVIDER_ID - && model.id() == LanguageModelId("claude-haiku-4-5-latest".into()) + model.provider_id() == language_model::OLLAMA_PROVIDER_ID + && model.id() == LanguageModelId("qwen3:8b".into()) }) else { return Task::ready(Err(anyhow!("could not find claude model"))); @@ -539,6 +539,7 @@ async fn request_tool_call( .to_string(), input_schema: serde_json::to_value(schema).unwrap(), }], + thinking_allowed: false, ..Default::default() };