From f5cafe5b95abcef63506496dc61d79761c41a838 Mon Sep 17 00:00:00 2001 From: Oleksiy Syvokon Date: Fri, 19 Dec 2025 14:22:12 +0200 Subject: [PATCH] Change rules for the default Ollama models: - Use qwen2.5-coder only if it's already downloaded - Otherwise, show a warning to configure the model --- .../src/edit_prediction_button.rs | 22 +++++++++--- crates/ollama/src/ollama.rs | 19 ++++++++++- .../src/ollama_edit_prediction_delegate.rs | 34 +++++++++++++++---- .../settings/src/settings_content/language.rs | 2 +- 4 files changed, 64 insertions(+), 13 deletions(-) diff --git a/crates/edit_prediction_ui/src/edit_prediction_button.rs b/crates/edit_prediction_ui/src/edit_prediction_button.rs index 6e2f9386927ede9927f3367bbf8e2d62aa2a790d..64a7a65d538863636cebb54b2d1cf92f2ff188fb 100644 --- a/crates/edit_prediction_ui/src/edit_prediction_button.rs +++ b/crates/edit_prediction_ui/src/edit_prediction_button.rs @@ -92,9 +92,9 @@ impl Render for EditPredictionButton { return div().hidden(); } - let all_language_settings = all_language_settings(None, cx); + let language_settings = all_language_settings(None, cx); - match all_language_settings.edit_predictions.provider { + match language_settings.edit_predictions.provider { EditPredictionProvider::Copilot => { let Some(copilot) = Copilot::global(cx) else { return div().hidden(); @@ -298,8 +298,6 @@ impl Render for EditPredictionButton { let enabled = self.editor_enabled.unwrap_or(true); let this = cx.weak_entity(); - let tooltip_meta = "Powered by Ollama"; - div().child( PopoverMenu::new("ollama") .menu(move |window, cx| { @@ -323,6 +321,22 @@ impl Render for EditPredictionButton { )) }), move |_window, cx| { + let settings = all_language_settings(None, cx); + let tooltip_meta = match settings + .edit_predictions + .ollama + .model + .as_deref() + { + Some(model) if !model.trim().is_empty() => { + format!("Powered by Ollama ({model})") + } + _ => { + "Ollama model not configured — configure a model before use" + .to_string() + } + }; + Tooltip::with_meta( "Edit Prediction", Some(&ToggleMenu), diff --git a/crates/ollama/src/ollama.rs b/crates/ollama/src/ollama.rs index dd81e496c871c7cf2b1897abbfc83f7fc2da0be2..24b87530ec88626b7795a83b4a82f92d4cb9a144 100644 --- a/crates/ollama/src/ollama.rs +++ b/crates/ollama/src/ollama.rs @@ -2,7 +2,24 @@ mod ollama_edit_prediction_delegate; pub use ollama_edit_prediction_delegate::OllamaEditPredictionDelegate; -use anyhow::{Context as _, Result}; +use anyhow::{Context, Result}; + +pub const RECOMMENDED_EDIT_PREDICTION_MODELS: [&str; 4] = [ + "qwen2.5-coder:3b-base", + "qwen2.5-coder:7b-base", + "qwen2.5-coder:3b", + "qwen2.5-coder:7b", +]; + +pub fn pick_recommended_edit_prediction_model<'a>( + available_models: impl IntoIterator, +) -> Option<&'static str> { + let available: std::collections::HashSet<&str> = available_models.into_iter().collect(); + + RECOMMENDED_EDIT_PREDICTION_MODELS + .into_iter() + .find(|recommended| available.contains(recommended)) +} use futures::{AsyncBufReadExt, AsyncReadExt, StreamExt, io::BufReader, stream::BoxStream}; use http_client::{AsyncBody, HttpClient, HttpRequestExt, Method, Request as HttpRequest}; use serde::{Deserialize, Serialize}; diff --git a/crates/ollama/src/ollama_edit_prediction_delegate.rs b/crates/ollama/src/ollama_edit_prediction_delegate.rs index 2c4e4f38f86c1dfa0b557b432526b42b0ba9e24c..78e8edf7908babc0e762fb530e0f586284110eac 100644 --- a/crates/ollama/src/ollama_edit_prediction_delegate.rs +++ b/crates/ollama/src/ollama_edit_prediction_delegate.rs @@ -16,7 +16,7 @@ use std::{ }; use text::ToOffset; -use crate::OLLAMA_API_URL; +use crate::{OLLAMA_API_URL, get_models, pick_recommended_edit_prediction_model}; pub const DEBOUNCE_TIMEOUT: Duration = Duration::from_millis(150); @@ -166,12 +166,7 @@ impl EditPredictionDelegate for OllamaEditPredictionDelegate { let http_client = self.http_client.clone(); let settings = all_language_settings(None, cx); - let model = settings - .edit_predictions - .ollama - .model - .clone() - .unwrap_or_else(|| "qwen2.5-coder:3b-base".to_string()); + let configured_model = settings.edit_predictions.ollama.model.clone(); let api_url = settings .edit_predictions .ollama @@ -185,6 +180,31 @@ impl EditPredictionDelegate for OllamaEditPredictionDelegate { cx.background_executor().timer(DEBOUNCE_TIMEOUT).await; } + let model = if let Some(model) = configured_model + .as_deref() + .map(str::trim) + .filter(|model| !model.is_empty()) + { + model.to_string() + } else { + let local_models = get_models(http_client.as_ref(), &api_url, None).await?; + let available_model_names = local_models.iter().map(|model| model.name.as_str()); + + match pick_recommended_edit_prediction_model(available_model_names) { + Some(recommended) => recommended.to_string(), + None => { + log::debug!( + "Ollama: No model configured and no recommended local model found; skipping edit prediction" + ); + this.update(cx, |this, cx| { + this.pending_request = None; + cx.notify(); + })?; + return Ok(()); + } + } + }; + let cursor_offset = cursor_position.to_offset(&snapshot); let cursor_point = cursor_offset.to_point(&snapshot); let excerpt = EditPredictionExcerpt::select_from_buffer( diff --git a/crates/settings/src/settings_content/language.rs b/crates/settings/src/settings_content/language.rs index a84d156d897a74ae5852e3ebaef3e9909d20ece6..7f78391c5201adbedad589a8e1ab1d0a17018ea0 100644 --- a/crates/settings/src/settings_content/language.rs +++ b/crates/settings/src/settings_content/language.rs @@ -214,7 +214,7 @@ pub struct CodestralSettingsContent { pub struct OllamaEditPredictionSettingsContent { /// Model to use for completions. /// - /// Default: "qwen2.5-coder:1.5b" + /// Default: none pub model: Option, /// Api URL to use for completions. ///