Cargo.lock 🔗
@@ -15215,7 +15215,6 @@ dependencies = [
"heck 0.5.0",
"itertools 0.14.0",
"language",
- "language_model",
"language_models",
"log",
"menu",
Max Brunsfeld created
Previously, Ollama would not show up as available until you opened some
UI that prompted connecting to Ollama, like the agent panel or the edit
prediction settings.
Release Notes:
- N/A
Cargo.lock | 1
crates/edit_prediction/src/ollama.rs | 27 ++++++++-
crates/edit_prediction_ui/src/edit_prediction_button.rs | 1
crates/settings_ui/Cargo.toml | 1
crates/settings_ui/src/components/ollama_model_picker.rs | 25 --------
5 files changed, 26 insertions(+), 29 deletions(-)
@@ -15215,7 +15215,6 @@ dependencies = [
"heck 0.5.0",
"itertools 0.14.0",
"language",
- "language_model",
"language_models",
"log",
"menu",
@@ -8,7 +8,7 @@ use crate::{
};
use anyhow::{Context as _, Result};
use futures::AsyncReadExt as _;
-use gpui::{App, AppContext as _, Entity, Task, http_client};
+use gpui::{App, AppContext as _, Entity, SharedString, Task, http_client};
use language::{
Anchor, Buffer, BufferSnapshot, OffsetRangeExt as _, ToOffset, ToPoint as _,
language_settings::all_language_settings,
@@ -51,13 +51,34 @@ struct OllamaGenerateResponse {
response: String,
}
+const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("ollama");
+
pub fn is_available(cx: &App) -> bool {
- let ollama_provider_id = LanguageModelProviderId::new("ollama");
LanguageModelRegistry::read_global(cx)
- .provider(&ollama_provider_id)
+ .provider(&PROVIDER_ID)
.is_some_and(|provider| provider.is_authenticated(cx))
}
+pub fn ensure_authenticated(cx: &mut App) {
+ if let Some(provider) = LanguageModelRegistry::read_global(cx).provider(&PROVIDER_ID) {
+ provider.authenticate(cx).detach_and_log_err(cx);
+ }
+}
+
+pub fn fetch_models(cx: &mut App) -> Vec<SharedString> {
+ let Some(provider) = LanguageModelRegistry::read_global(cx).provider(&PROVIDER_ID) else {
+ return Vec::new();
+ };
+ provider.authenticate(cx).detach_and_log_err(cx);
+ let mut models: Vec<SharedString> = provider
+ .provided_models(cx)
+ .into_iter()
+ .map(|model| SharedString::from(model.id().0.to_string()))
+ .collect();
+ models.sort();
+ models
+}
+
/// Output from the Ollama HTTP request, containing all data needed to create the prediction result.
struct OllamaRequestOutput {
created_at: String,
@@ -560,6 +560,7 @@ impl EditPredictionButton {
cx.observe_global::<EditPredictionStore>(move |_, cx| cx.notify())
.detach();
+ edit_prediction::ollama::ensure_authenticated(cx);
let sweep_api_token_task = edit_prediction::sweep_ai::load_sweep_api_token(cx);
let mercury_api_token_task = edit_prediction::mercury::load_mercury_api_token(cx);
@@ -31,7 +31,6 @@ fuzzy.workspace = true
gpui.workspace = true
heck.workspace = true
itertools.workspace = true
-language_model.workspace = true
language_models.workspace = true
language.workspace = true
log.workspace = true
@@ -2,7 +2,6 @@ use std::sync::Arc;
use fuzzy::StringMatch;
use gpui::{AnyElement, App, Context, DismissEvent, ReadGlobal, SharedString, Task, Window, px};
-use language_model::{LanguageModelProviderId, LanguageModelRegistry};
use picker::{Picker, PickerDelegate};
use settings::SettingsStore;
use ui::{ListItem, ListItemSpacing, PopoverMenu, prelude::*};
@@ -28,7 +27,7 @@ impl OllamaModelPickerDelegate {
on_model_changed: impl Fn(SharedString, &mut Window, &mut App) + 'static,
cx: &mut Context<OllamaModelPicker>,
) -> Self {
- let mut models = Self::fetch_ollama_models(cx);
+ let mut models = edit_prediction::ollama::fetch_models(cx);
let current_in_list = models.contains(¤t_model);
if !current_model.is_empty() && !current_in_list {
@@ -58,28 +57,6 @@ impl OllamaModelPickerDelegate {
on_model_changed: Arc::new(on_model_changed),
}
}
-
- fn fetch_ollama_models(cx: &mut App) -> Vec<SharedString> {
- let ollama_provider_id = LanguageModelProviderId::new("ollama");
-
- let Some(provider) = LanguageModelRegistry::read_global(cx).provider(&ollama_provider_id)
- else {
- return Vec::new();
- };
-
- // Re-fetch models in case ollama has been started or updated since
- // Zed was launched.
- provider.authenticate(cx).detach_and_log_err(cx);
-
- let mut models: Vec<SharedString> = provider
- .provided_models(cx)
- .into_iter()
- .map(|model| SharedString::from(model.id().0.to_string()))
- .collect();
-
- models.sort();
- models
- }
}
impl PickerDelegate for OllamaModelPickerDelegate {