Enable Ollama provider only if Ollama service is running

Oleksiy Syvokon created

Change summary

Cargo.lock                                              |  3 ++
crates/edit_prediction_ui/Cargo.toml                    |  2 +
crates/edit_prediction_ui/src/edit_prediction_button.rs |  6 +++-
crates/ollama/Cargo.toml                                |  1 
crates/ollama/src/ollama_edit_prediction_delegate.rs    | 12 +++++++++-
5 files changed, 20 insertions(+), 4 deletions(-)

Detailed changes

Cargo.lock 🔗

@@ -5344,11 +5344,13 @@ dependencies = [
  "gpui",
  "indoc",
  "language",
+ "language_model",
  "log",
  "lsp",
  "markdown",
  "menu",
  "multi_buffer",
+ "ollama",
  "paths",
  "project",
  "regex",
@@ -10886,6 +10888,7 @@ dependencies = [
  "gpui",
  "http_client",
  "language",
+ "language_model",
  "log",
  "schemars",
  "serde",

crates/edit_prediction_ui/Cargo.toml 🔗

@@ -32,9 +32,11 @@ futures.workspace = true
 gpui.workspace = true
 indoc.workspace = true
 language.workspace = true
+language_model.workspace = true
 markdown.workspace = true
 menu.workspace = true
 multi_buffer.workspace = true
+ollama.workspace = true
 paths.workspace = true
 project.workspace = true
 regex.workspace = true

crates/edit_prediction_ui/src/edit_prediction_button.rs 🔗

@@ -22,6 +22,7 @@ use language::{
     EditPredictionsMode, File, Language,
     language_settings::{self, AllLanguageSettings, EditPredictionProvider, all_language_settings},
 };
+use ollama::OllamaEditPredictionDelegate;
 use project::DisableAiSettings;
 use regex::Regex;
 use settings::{
@@ -581,8 +582,9 @@ impl EditPredictionButton {
             providers.push(EditPredictionProvider::Codestral);
         }
 
-        // Ollama is always available as it runs locally
-        providers.push(EditPredictionProvider::Ollama);
+        if OllamaEditPredictionDelegate::is_available(cx) {
+            providers.push(EditPredictionProvider::Ollama);
+        }
 
         if cx.has_flag::<SweepFeatureFlag>()
             && edit_prediction::sweep_ai::sweep_api_token(cx)

crates/ollama/Cargo.toml 🔗

@@ -23,6 +23,7 @@ futures.workspace = true
 gpui.workspace = true
 http_client.workspace = true
 language.workspace = true
+language_model.workspace = true
 log.workspace = true
 schemars = { workspace = true, optional = true }
 serde.workspace = true

crates/ollama/src/ollama_edit_prediction_delegate.rs 🔗

@@ -7,6 +7,7 @@ use http_client::HttpClient;
 use language::{
     Anchor, Buffer, BufferSnapshot, EditPreview, ToPoint, language_settings::all_language_settings,
 };
+use language_model::{LanguageModelProviderId, LanguageModelRegistry};
 use serde::{Deserialize, Serialize};
 use std::{
     ops::Range,
@@ -53,6 +54,13 @@ impl OllamaEditPredictionDelegate {
         }
     }
 
+    pub fn is_available(cx: &App) -> bool {
+        let ollama_provider_id = LanguageModelProviderId::new("ollama");
+        LanguageModelRegistry::read_global(cx)
+            .provider(&ollama_provider_id)
+            .is_some_and(|provider| provider.is_authenticated(cx))
+    }
+
     async fn fetch_completion(
         http_client: Arc<dyn HttpClient>,
         prompt: String,
@@ -130,8 +138,8 @@ impl EditPredictionDelegate for OllamaEditPredictionDelegate {
         true
     }
 
-    fn is_enabled(&self, _buffer: &Entity<Buffer>, _cursor_position: Anchor, _cx: &App) -> bool {
-        true
+    fn is_enabled(&self, _buffer: &Entity<Buffer>, _cursor_position: Anchor, cx: &App) -> bool {
+        Self::is_available(cx)
     }
 
     fn is_refreshing(&self, _cx: &App) -> bool {