ollama.rs

  1use anyhow::{Result, anyhow};
  2use fs::Fs;
  3use futures::{FutureExt, StreamExt, future::BoxFuture, stream::BoxStream};
  4use futures::{Stream, TryFutureExt, stream};
  5use gpui::{AnyView, App, AsyncApp, Context, CursorStyle, Entity, Task};
  6use http_client::HttpClient;
  7use language_model::{
  8    AuthenticateError, LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent,
  9    LanguageModelId, LanguageModelName, LanguageModelProvider, LanguageModelProviderId,
 10    LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest,
 11    LanguageModelRequestTool, LanguageModelToolChoice, LanguageModelToolUse,
 12    LanguageModelToolUseId, MessageContent, RateLimiter, Role, StopReason, TokenUsage,
 13};
 14use menu;
 15use ollama::{
 16    ChatMessage, ChatOptions, ChatRequest, ChatResponseDelta, OLLAMA_API_URL, OllamaFunctionCall,
 17    OllamaFunctionTool, OllamaToolCall, get_models, show_model, stream_chat_completion,
 18};
 19pub use settings::OllamaAvailableModel as AvailableModel;
 20use settings::{Settings, SettingsStore, update_settings_file};
 21use std::pin::Pin;
 22use std::sync::LazyLock;
 23use std::sync::atomic::{AtomicU64, Ordering};
 24use std::{collections::HashMap, sync::Arc};
 25use ui::{ButtonLike, ElevationIndex, List, Tooltip, prelude::*};
 26use ui_input::InputField;
 27use zed_env_vars::{EnvVar, env_var};
 28
 29use crate::AllLanguageModelSettings;
 30use crate::api_key::ApiKeyState;
 31use crate::ui::InstructionListItem;
 32
 33const OLLAMA_DOWNLOAD_URL: &str = "https://ollama.com/download";
 34const OLLAMA_LIBRARY_URL: &str = "https://ollama.com/library";
 35const OLLAMA_SITE: &str = "https://ollama.com/";
 36
 37const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("ollama");
 38const PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("Ollama");
 39
 40const API_KEY_ENV_VAR_NAME: &str = "OLLAMA_API_KEY";
 41static API_KEY_ENV_VAR: LazyLock<EnvVar> = env_var!(API_KEY_ENV_VAR_NAME);
 42
 43#[derive(Default, Debug, Clone, PartialEq)]
 44pub struct OllamaSettings {
 45    pub api_url: String,
 46    pub available_models: Vec<AvailableModel>,
 47}
 48
 49pub struct OllamaLanguageModelProvider {
 50    http_client: Arc<dyn HttpClient>,
 51    state: Entity<State>,
 52}
 53
 54pub struct State {
 55    api_key_state: ApiKeyState,
 56    http_client: Arc<dyn HttpClient>,
 57    fetched_models: Vec<ollama::Model>,
 58    fetch_model_task: Option<Task<Result<()>>>,
 59}
 60
 61impl State {
 62    fn is_authenticated(&self) -> bool {
 63        !self.fetched_models.is_empty()
 64    }
 65
 66    fn set_api_key(&mut self, api_key: Option<String>, cx: &mut Context<Self>) -> Task<Result<()>> {
 67        let api_url = OllamaLanguageModelProvider::api_url(cx);
 68        let task = self
 69            .api_key_state
 70            .store(api_url, api_key, |this| &mut this.api_key_state, cx);
 71
 72        self.fetched_models.clear();
 73        cx.spawn(async move |this, cx| {
 74            let result = task.await;
 75            this.update(cx, |this, cx| this.restart_fetch_models_task(cx))
 76                .ok();
 77            result
 78        })
 79    }
 80
 81    fn authenticate(&mut self, cx: &mut Context<Self>) -> Task<Result<(), AuthenticateError>> {
 82        let api_url = OllamaLanguageModelProvider::api_url(cx);
 83        let task = self.api_key_state.load_if_needed(
 84            api_url,
 85            &API_KEY_ENV_VAR,
 86            |this| &mut this.api_key_state,
 87            cx,
 88        );
 89
 90        // Always try to fetch models - if no API key is needed (local Ollama), it will work
 91        // If API key is needed and provided, it will work
 92        // If API key is needed and not provided, it will fail gracefully
 93        cx.spawn(async move |this, cx| {
 94            let result = task.await;
 95            this.update(cx, |this, cx| this.restart_fetch_models_task(cx))
 96                .ok();
 97            result
 98        })
 99    }
100
101    fn fetch_models(&mut self, cx: &mut Context<Self>) -> Task<Result<()>> {
102        let http_client = Arc::clone(&self.http_client);
103        let api_url = OllamaLanguageModelProvider::api_url(cx);
104        let api_key = self.api_key_state.key(&api_url);
105
106        // As a proxy for the server being "authenticated", we'll check if its up by fetching the models
107        cx.spawn(async move |this, cx| {
108            let models = get_models(http_client.as_ref(), &api_url, api_key.as_deref()).await?;
109
110            let tasks = models
111                .into_iter()
112                // Since there is no metadata from the Ollama API
113                // indicating which models are embedding models,
114                // simply filter out models with "-embed" in their name
115                .filter(|model| !model.name.contains("-embed"))
116                .map(|model| {
117                    let http_client = Arc::clone(&http_client);
118                    let api_url = api_url.clone();
119                    let api_key = api_key.clone();
120                    async move {
121                        let name = model.name.as_str();
122                        let model =
123                            show_model(http_client.as_ref(), &api_url, api_key.as_deref(), name)
124                                .await?;
125                        let ollama_model = ollama::Model::new(
126                            name,
127                            None,
128                            model.context_length,
129                            Some(model.supports_tools()),
130                            Some(model.supports_vision()),
131                            Some(model.supports_thinking()),
132                        );
133                        Ok(ollama_model)
134                    }
135                });
136
137            // Rate-limit capability fetches
138            // since there is an arbitrary number of models available
139            let mut ollama_models: Vec<_> = futures::stream::iter(tasks)
140                .buffer_unordered(5)
141                .collect::<Vec<Result<_>>>()
142                .await
143                .into_iter()
144                .collect::<Result<Vec<_>>>()?;
145
146            ollama_models.sort_by(|a, b| a.name.cmp(&b.name));
147
148            this.update(cx, |this, cx| {
149                this.fetched_models = ollama_models;
150                cx.notify();
151            })
152        })
153    }
154
155    fn restart_fetch_models_task(&mut self, cx: &mut Context<Self>) {
156        let task = self.fetch_models(cx);
157        self.fetch_model_task.replace(task);
158    }
159}
160
161impl OllamaLanguageModelProvider {
162    pub fn new(http_client: Arc<dyn HttpClient>, cx: &mut App) -> Self {
163        let this = Self {
164            http_client: http_client.clone(),
165            state: cx.new(|cx| {
166                cx.observe_global::<SettingsStore>({
167                    let mut last_settings = OllamaLanguageModelProvider::settings(cx).clone();
168                    move |this: &mut State, cx| {
169                        let current_settings = OllamaLanguageModelProvider::settings(cx);
170                        let settings_changed = current_settings != &last_settings;
171                        if settings_changed {
172                            let url_changed = last_settings.api_url != current_settings.api_url;
173                            last_settings = current_settings.clone();
174                            if url_changed {
175                                this.fetched_models.clear();
176                                this.authenticate(cx).detach();
177                            }
178                            cx.notify();
179                        }
180                    }
181                })
182                .detach();
183
184                State {
185                    http_client,
186                    fetched_models: Default::default(),
187                    fetch_model_task: None,
188                    api_key_state: ApiKeyState::new(Self::api_url(cx)),
189                }
190            }),
191        };
192        this
193    }
194
195    fn settings(cx: &App) -> &OllamaSettings {
196        &AllLanguageModelSettings::get_global(cx).ollama
197    }
198
199    fn api_url(cx: &App) -> SharedString {
200        let api_url = &Self::settings(cx).api_url;
201        if api_url.is_empty() {
202            OLLAMA_API_URL.into()
203        } else {
204            SharedString::new(api_url.as_str())
205        }
206    }
207}
208
209impl LanguageModelProviderState for OllamaLanguageModelProvider {
210    type ObservableEntity = State;
211
212    fn observable_entity(&self) -> Option<Entity<Self::ObservableEntity>> {
213        Some(self.state.clone())
214    }
215}
216
217impl LanguageModelProvider for OllamaLanguageModelProvider {
218    fn id(&self) -> LanguageModelProviderId {
219        PROVIDER_ID
220    }
221
222    fn name(&self) -> LanguageModelProviderName {
223        PROVIDER_NAME
224    }
225
226    fn icon(&self) -> IconName {
227        IconName::AiOllama
228    }
229
230    fn default_model(&self, _: &App) -> Option<Arc<dyn LanguageModel>> {
231        // We shouldn't try to select default model, because it might lead to a load call for an unloaded model.
232        // In a constrained environment where user might not have enough resources it'll be a bad UX to select something
233        // to load by default.
234        None
235    }
236
237    fn default_fast_model(&self, _: &App) -> Option<Arc<dyn LanguageModel>> {
238        // See explanation for default_model.
239        None
240    }
241
242    fn provided_models(&self, cx: &App) -> Vec<Arc<dyn LanguageModel>> {
243        let mut models: HashMap<String, ollama::Model> = HashMap::new();
244
245        // Add models from the Ollama API
246        for model in self.state.read(cx).fetched_models.iter() {
247            models.insert(model.name.clone(), model.clone());
248        }
249
250        // Override with available models from settings
251        for setting_model in &OllamaLanguageModelProvider::settings(cx).available_models {
252            let setting_base = setting_model.name.split(':').next().unwrap();
253            if let Some(model) = models
254                .values_mut()
255                .find(|m| m.name.split(':').next().unwrap() == setting_base)
256            {
257                model.max_tokens = setting_model.max_tokens;
258                model.display_name = setting_model.display_name.clone();
259                model.keep_alive = setting_model.keep_alive.clone();
260                model.supports_tools = setting_model.supports_tools;
261                model.supports_vision = setting_model.supports_images;
262                model.supports_thinking = setting_model.supports_thinking;
263            } else {
264                models.insert(
265                    setting_model.name.clone(),
266                    ollama::Model {
267                        name: setting_model.name.clone(),
268                        display_name: setting_model.display_name.clone(),
269                        max_tokens: setting_model.max_tokens,
270                        keep_alive: setting_model.keep_alive.clone(),
271                        supports_tools: setting_model.supports_tools,
272                        supports_vision: setting_model.supports_images,
273                        supports_thinking: setting_model.supports_thinking,
274                    },
275                );
276            }
277        }
278
279        let mut models = models
280            .into_values()
281            .map(|model| {
282                Arc::new(OllamaLanguageModel {
283                    id: LanguageModelId::from(model.name.clone()),
284                    model,
285                    http_client: self.http_client.clone(),
286                    request_limiter: RateLimiter::new(4),
287                    state: self.state.clone(),
288                }) as Arc<dyn LanguageModel>
289            })
290            .collect::<Vec<_>>();
291        models.sort_by_key(|model| model.name());
292        models
293    }
294
295    fn is_authenticated(&self, cx: &App) -> bool {
296        self.state.read(cx).is_authenticated()
297    }
298
299    fn authenticate(&self, cx: &mut App) -> Task<Result<(), AuthenticateError>> {
300        self.state.update(cx, |state, cx| state.authenticate(cx))
301    }
302
303    fn configuration_view(
304        &self,
305        _target_agent: language_model::ConfigurationViewTargetAgent,
306        window: &mut Window,
307        cx: &mut App,
308    ) -> AnyView {
309        let state = self.state.clone();
310        cx.new(|cx| ConfigurationView::new(state, window, cx))
311            .into()
312    }
313
314    fn reset_credentials(&self, cx: &mut App) -> Task<Result<()>> {
315        self.state
316            .update(cx, |state, cx| state.set_api_key(None, cx))
317    }
318}
319
320pub struct OllamaLanguageModel {
321    id: LanguageModelId,
322    model: ollama::Model,
323    http_client: Arc<dyn HttpClient>,
324    request_limiter: RateLimiter,
325    state: Entity<State>,
326}
327
328impl OllamaLanguageModel {
329    fn to_ollama_request(&self, request: LanguageModelRequest) -> ChatRequest {
330        let supports_vision = self.model.supports_vision.unwrap_or(false);
331
332        let mut messages = Vec::with_capacity(request.messages.len());
333
334        for mut msg in request.messages.into_iter() {
335            let images = if supports_vision {
336                msg.content
337                    .iter()
338                    .filter_map(|content| match content {
339                        MessageContent::Image(image) => Some(image.source.to_string()),
340                        _ => None,
341                    })
342                    .collect::<Vec<String>>()
343            } else {
344                vec![]
345            };
346
347            match msg.role {
348                Role::User => {
349                    for tool_result in msg
350                        .content
351                        .extract_if(.., |x| matches!(x, MessageContent::ToolResult(..)))
352                    {
353                        match tool_result {
354                            MessageContent::ToolResult(tool_result) => {
355                                messages.push(ChatMessage::Tool {
356                                    tool_name: tool_result.tool_name.to_string(),
357                                    content: tool_result.content.to_str().unwrap_or("").to_string(),
358                                })
359                            }
360                            _ => unreachable!("Only tool result should be extracted"),
361                        }
362                    }
363                    if !msg.content.is_empty() {
364                        messages.push(ChatMessage::User {
365                            content: msg.string_contents(),
366                            images: if images.is_empty() {
367                                None
368                            } else {
369                                Some(images)
370                            },
371                        })
372                    }
373                }
374                Role::Assistant => {
375                    let content = msg.string_contents();
376                    let mut thinking = None;
377                    let mut tool_calls = Vec::new();
378                    for content in msg.content.into_iter() {
379                        match content {
380                            MessageContent::Thinking { text, .. } if !text.is_empty() => {
381                                thinking = Some(text)
382                            }
383                            MessageContent::ToolUse(tool_use) => {
384                                tool_calls.push(OllamaToolCall::Function(OllamaFunctionCall {
385                                    name: tool_use.name.to_string(),
386                                    arguments: tool_use.input,
387                                }));
388                            }
389                            _ => (),
390                        }
391                    }
392                    messages.push(ChatMessage::Assistant {
393                        content,
394                        tool_calls: Some(tool_calls),
395                        images: if images.is_empty() {
396                            None
397                        } else {
398                            Some(images)
399                        },
400                        thinking,
401                    })
402                }
403                Role::System => messages.push(ChatMessage::System {
404                    content: msg.string_contents(),
405                }),
406            }
407        }
408        ChatRequest {
409            model: self.model.name.clone(),
410            messages,
411            keep_alive: self.model.keep_alive.clone().unwrap_or_default(),
412            stream: true,
413            options: Some(ChatOptions {
414                num_ctx: Some(self.model.max_tokens),
415                stop: Some(request.stop),
416                temperature: request.temperature.or(Some(1.0)),
417                ..Default::default()
418            }),
419            think: self
420                .model
421                .supports_thinking
422                .map(|supports_thinking| supports_thinking && request.thinking_allowed),
423            tools: if self.model.supports_tools.unwrap_or(false) {
424                request.tools.into_iter().map(tool_into_ollama).collect()
425            } else {
426                vec![]
427            },
428        }
429    }
430}
431
432impl LanguageModel for OllamaLanguageModel {
433    fn id(&self) -> LanguageModelId {
434        self.id.clone()
435    }
436
437    fn name(&self) -> LanguageModelName {
438        LanguageModelName::from(self.model.display_name().to_string())
439    }
440
441    fn provider_id(&self) -> LanguageModelProviderId {
442        PROVIDER_ID
443    }
444
445    fn provider_name(&self) -> LanguageModelProviderName {
446        PROVIDER_NAME
447    }
448
449    fn supports_tools(&self) -> bool {
450        self.model.supports_tools.unwrap_or(false)
451    }
452
453    fn supports_images(&self) -> bool {
454        self.model.supports_vision.unwrap_or(false)
455    }
456
457    fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool {
458        match choice {
459            LanguageModelToolChoice::Auto => false,
460            LanguageModelToolChoice::Any => false,
461            LanguageModelToolChoice::None => false,
462        }
463    }
464
465    fn telemetry_id(&self) -> String {
466        format!("ollama/{}", self.model.id())
467    }
468
469    fn max_token_count(&self) -> u64 {
470        self.model.max_token_count()
471    }
472
473    fn count_tokens(
474        &self,
475        request: LanguageModelRequest,
476        _cx: &App,
477    ) -> BoxFuture<'static, Result<u64>> {
478        // There is no endpoint for this _yet_ in Ollama
479        // see: https://github.com/ollama/ollama/issues/1716 and https://github.com/ollama/ollama/issues/3582
480        let token_count = request
481            .messages
482            .iter()
483            .map(|msg| msg.string_contents().chars().count())
484            .sum::<usize>()
485            / 4;
486
487        async move { Ok(token_count as u64) }.boxed()
488    }
489
490    fn stream_completion(
491        &self,
492        request: LanguageModelRequest,
493        cx: &AsyncApp,
494    ) -> BoxFuture<
495        'static,
496        Result<
497            BoxStream<'static, Result<LanguageModelCompletionEvent, LanguageModelCompletionError>>,
498            LanguageModelCompletionError,
499        >,
500    > {
501        let request = self.to_ollama_request(request);
502
503        let http_client = self.http_client.clone();
504        let Ok((api_key, api_url)) = self.state.read_with(cx, |state, cx| {
505            let api_url = OllamaLanguageModelProvider::api_url(cx);
506            (state.api_key_state.key(&api_url), api_url)
507        }) else {
508            return futures::future::ready(Err(anyhow!("App state dropped").into())).boxed();
509        };
510
511        let future = self.request_limiter.stream(async move {
512            let stream =
513                stream_chat_completion(http_client.as_ref(), &api_url, api_key.as_deref(), request)
514                    .await?;
515            let stream = map_to_language_model_completion_events(stream);
516            Ok(stream)
517        });
518
519        future.map_ok(|f| f.boxed()).boxed()
520    }
521}
522
523fn map_to_language_model_completion_events(
524    stream: Pin<Box<dyn Stream<Item = anyhow::Result<ChatResponseDelta>> + Send>>,
525) -> impl Stream<Item = Result<LanguageModelCompletionEvent, LanguageModelCompletionError>> {
526    // Used for creating unique tool use ids
527    static TOOL_CALL_COUNTER: AtomicU64 = AtomicU64::new(0);
528
529    struct State {
530        stream: Pin<Box<dyn Stream<Item = anyhow::Result<ChatResponseDelta>> + Send>>,
531        used_tools: bool,
532    }
533
534    // We need to create a ToolUse and Stop event from a single
535    // response from the original stream
536    let stream = stream::unfold(
537        State {
538            stream,
539            used_tools: false,
540        },
541        async move |mut state| {
542            let response = state.stream.next().await?;
543
544            let delta = match response {
545                Ok(delta) => delta,
546                Err(e) => {
547                    let event = Err(LanguageModelCompletionError::from(anyhow!(e)));
548                    return Some((vec![event], state));
549                }
550            };
551
552            let mut events = Vec::new();
553
554            match delta.message {
555                ChatMessage::User { content, images: _ } => {
556                    events.push(Ok(LanguageModelCompletionEvent::Text(content)));
557                }
558                ChatMessage::System { content } => {
559                    events.push(Ok(LanguageModelCompletionEvent::Text(content)));
560                }
561                ChatMessage::Tool { content, .. } => {
562                    events.push(Ok(LanguageModelCompletionEvent::Text(content)));
563                }
564                ChatMessage::Assistant {
565                    content,
566                    tool_calls,
567                    images: _,
568                    thinking,
569                } => {
570                    if let Some(text) = thinking {
571                        events.push(Ok(LanguageModelCompletionEvent::Thinking {
572                            text,
573                            signature: None,
574                        }));
575                    }
576
577                    if let Some(tool_call) = tool_calls.and_then(|v| v.into_iter().next()) {
578                        match tool_call {
579                            OllamaToolCall::Function(function) => {
580                                let tool_id = format!(
581                                    "{}-{}",
582                                    &function.name,
583                                    TOOL_CALL_COUNTER.fetch_add(1, Ordering::Relaxed)
584                                );
585                                let event =
586                                    LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse {
587                                        id: LanguageModelToolUseId::from(tool_id),
588                                        name: Arc::from(function.name),
589                                        raw_input: function.arguments.to_string(),
590                                        input: function.arguments,
591                                        is_input_complete: true,
592                                    });
593                                events.push(Ok(event));
594                                state.used_tools = true;
595                            }
596                        }
597                    } else if !content.is_empty() {
598                        events.push(Ok(LanguageModelCompletionEvent::Text(content)));
599                    }
600                }
601            };
602
603            if delta.done {
604                events.push(Ok(LanguageModelCompletionEvent::UsageUpdate(TokenUsage {
605                    input_tokens: delta.prompt_eval_count.unwrap_or(0),
606                    output_tokens: delta.eval_count.unwrap_or(0),
607                    cache_creation_input_tokens: 0,
608                    cache_read_input_tokens: 0,
609                })));
610                if state.used_tools {
611                    state.used_tools = false;
612                    events.push(Ok(LanguageModelCompletionEvent::Stop(StopReason::ToolUse)));
613                } else {
614                    events.push(Ok(LanguageModelCompletionEvent::Stop(StopReason::EndTurn)));
615                }
616            }
617
618            Some((events, state))
619        },
620    );
621
622    stream.flat_map(futures::stream::iter)
623}
624
625struct ConfigurationView {
626    api_key_editor: Entity<InputField>,
627    api_url_editor: Entity<InputField>,
628    state: Entity<State>,
629}
630
631impl ConfigurationView {
632    pub fn new(state: Entity<State>, window: &mut Window, cx: &mut Context<Self>) -> Self {
633        let api_key_editor = cx.new(|cx| InputField::new(window, cx, "63e02e...").label("API key"));
634
635        let api_url_editor = cx.new(|cx| {
636            let input = InputField::new(window, cx, OLLAMA_API_URL).label("API URL");
637            input.set_text(OllamaLanguageModelProvider::api_url(cx), window, cx);
638            input
639        });
640
641        cx.observe(&state, |_, _, cx| {
642            cx.notify();
643        })
644        .detach();
645
646        Self {
647            api_key_editor,
648            api_url_editor,
649            state,
650        }
651    }
652
653    fn retry_connection(&self, cx: &mut App) {
654        self.state
655            .update(cx, |state, cx| state.restart_fetch_models_task(cx));
656    }
657
658    fn save_api_key(&mut self, _: &menu::Confirm, window: &mut Window, cx: &mut Context<Self>) {
659        let api_key = self.api_key_editor.read(cx).text(cx).trim().to_string();
660        if api_key.is_empty() {
661            return;
662        }
663
664        // url changes can cause the editor to be displayed again
665        self.api_key_editor
666            .update(cx, |input, cx| input.set_text("", window, cx));
667
668        let state = self.state.clone();
669        cx.spawn_in(window, async move |_, cx| {
670            state
671                .update(cx, |state, cx| state.set_api_key(Some(api_key), cx))?
672                .await
673        })
674        .detach_and_log_err(cx);
675    }
676
677    fn reset_api_key(&mut self, window: &mut Window, cx: &mut Context<Self>) {
678        self.api_key_editor
679            .update(cx, |input, cx| input.set_text("", window, cx));
680
681        let state = self.state.clone();
682        cx.spawn_in(window, async move |_, cx| {
683            state
684                .update(cx, |state, cx| state.set_api_key(None, cx))?
685                .await
686        })
687        .detach_and_log_err(cx);
688
689        cx.notify();
690    }
691
692    fn save_api_url(&mut self, cx: &mut Context<Self>) {
693        let api_url = self.api_url_editor.read(cx).text(cx).trim().to_string();
694        let current_url = OllamaLanguageModelProvider::api_url(cx);
695        if !api_url.is_empty() && &api_url != &current_url {
696            let fs = <dyn Fs>::global(cx);
697            update_settings_file(fs, cx, move |settings, _| {
698                settings
699                    .language_models
700                    .get_or_insert_default()
701                    .ollama
702                    .get_or_insert_default()
703                    .api_url = Some(api_url);
704            });
705        }
706    }
707
708    fn reset_api_url(&mut self, window: &mut Window, cx: &mut Context<Self>) {
709        self.api_url_editor
710            .update(cx, |input, cx| input.set_text("", window, cx));
711        let fs = <dyn Fs>::global(cx);
712        update_settings_file(fs, cx, |settings, _cx| {
713            if let Some(settings) = settings
714                .language_models
715                .as_mut()
716                .and_then(|models| models.ollama.as_mut())
717            {
718                settings.api_url = Some(OLLAMA_API_URL.into());
719            }
720        });
721        cx.notify();
722    }
723
724    fn render_instructions() -> Div {
725        v_flex()
726            .gap_2()
727            .child(Label::new(
728                "Run LLMs locally on your machine with Ollama, or connect to an Ollama server. \
729                Can provide access to Llama, Mistral, Gemma, and hundreds of other models.",
730            ))
731            .child(Label::new("To use local Ollama:"))
732            .child(
733                List::new()
734                    .child(InstructionListItem::new(
735                        "Download and install Ollama from",
736                        Some("ollama.com"),
737                        Some("https://ollama.com/download"),
738                    ))
739                    .child(InstructionListItem::text_only(
740                        "Start Ollama and download a model: `ollama run gpt-oss:20b`",
741                    ))
742                    .child(InstructionListItem::text_only(
743                        "Click 'Connect' below to start using Ollama in Zed",
744                    )),
745            )
746            .child(Label::new(
747                "Alternatively, you can connect to an Ollama server by specifying its \
748                URL and API key (may not be required):",
749            ))
750    }
751
752    fn render_api_key_editor(&self, cx: &Context<Self>) -> Div {
753        let state = self.state.read(cx);
754        let env_var_set = state.api_key_state.is_from_env_var();
755
756        if !state.api_key_state.has_key() {
757            v_flex()
758              .on_action(cx.listener(Self::save_api_key))
759              .child(self.api_key_editor.clone())
760              .child(
761                  Label::new(
762                      format!("You can also assign the {API_KEY_ENV_VAR_NAME} environment variable and restart Zed.")
763                  )
764                  .size(LabelSize::Small)
765                  .color(Color::Muted),
766              )
767        } else {
768            h_flex()
769                .p_3()
770                .justify_between()
771                .rounded_md()
772                .border_1()
773                .border_color(cx.theme().colors().border)
774                .bg(cx.theme().colors().elevated_surface_background)
775                .child(
776                    h_flex()
777                        .gap_2()
778                        .child(Icon::new(IconName::Check).color(Color::Success))
779                        .child(
780                            Label::new(
781                                if env_var_set {
782                                    format!("API key set in {API_KEY_ENV_VAR_NAME} environment variable.")
783                                } else {
784                                    "API key configured".to_string()
785                                }
786                            )
787                        )
788                )
789                .child(
790                    Button::new("reset-api-key", "Reset API Key")
791                        .label_size(LabelSize::Small)
792                        .icon(IconName::Undo)
793                        .icon_size(IconSize::Small)
794                        .icon_position(IconPosition::Start)
795                        .layer(ElevationIndex::ModalSurface)
796                        .when(env_var_set, |this| {
797                            this.tooltip(Tooltip::text(format!("To reset your API key, unset the {API_KEY_ENV_VAR_NAME} environment variable.")))
798                        })
799                        .on_click(cx.listener(|this, _, window, cx| this.reset_api_key(window, cx))),
800                )
801        }
802    }
803
804    fn render_api_url_editor(&self, cx: &Context<Self>) -> Div {
805        let api_url = OllamaLanguageModelProvider::api_url(cx);
806        let custom_api_url_set = api_url != OLLAMA_API_URL;
807
808        if custom_api_url_set {
809            h_flex()
810                .p_3()
811                .justify_between()
812                .rounded_md()
813                .border_1()
814                .border_color(cx.theme().colors().border)
815                .bg(cx.theme().colors().elevated_surface_background)
816                .child(
817                    h_flex()
818                        .gap_2()
819                        .child(Icon::new(IconName::Check).color(Color::Success))
820                        .child(v_flex().gap_1().child(Label::new(api_url))),
821                )
822                .child(
823                    Button::new("reset-api-url", "Reset API URL")
824                        .label_size(LabelSize::Small)
825                        .icon(IconName::Undo)
826                        .icon_size(IconSize::Small)
827                        .icon_position(IconPosition::Start)
828                        .layer(ElevationIndex::ModalSurface)
829                        .on_click(
830                            cx.listener(|this, _, window, cx| this.reset_api_url(window, cx)),
831                        ),
832                )
833        } else {
834            v_flex()
835                .on_action(cx.listener(|this, _: &menu::Confirm, _window, cx| {
836                    this.save_api_url(cx);
837                    cx.notify();
838                }))
839                .gap_2()
840                .child(self.api_url_editor.clone())
841        }
842    }
843}
844
845impl Render for ConfigurationView {
846    fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
847        let is_authenticated = self.state.read(cx).is_authenticated();
848
849        v_flex()
850            .gap_2()
851            .child(Self::render_instructions())
852            .child(self.render_api_url_editor(cx))
853            .child(self.render_api_key_editor(cx))
854            .child(
855                h_flex()
856                    .w_full()
857                    .justify_between()
858                    .gap_2()
859                    .child(
860                        h_flex()
861                            .w_full()
862                            .gap_2()
863                            .map(|this| {
864                                if is_authenticated {
865                                    this.child(
866                                        Button::new("ollama-site", "Ollama")
867                                            .style(ButtonStyle::Subtle)
868                                            .icon(IconName::ArrowUpRight)
869                                            .icon_size(IconSize::XSmall)
870                                            .icon_color(Color::Muted)
871                                            .on_click(move |_, _, cx| cx.open_url(OLLAMA_SITE))
872                                            .into_any_element(),
873                                    )
874                                } else {
875                                    this.child(
876                                        Button::new("download_ollama_button", "Download Ollama")
877                                            .style(ButtonStyle::Subtle)
878                                            .icon(IconName::ArrowUpRight)
879                                            .icon_size(IconSize::XSmall)
880                                            .icon_color(Color::Muted)
881                                            .on_click(move |_, _, cx| {
882                                                cx.open_url(OLLAMA_DOWNLOAD_URL)
883                                            })
884                                            .into_any_element(),
885                                    )
886                                }
887                            })
888                            .child(
889                                Button::new("view-models", "View All Models")
890                                    .style(ButtonStyle::Subtle)
891                                    .icon(IconName::ArrowUpRight)
892                                    .icon_size(IconSize::XSmall)
893                                    .icon_color(Color::Muted)
894                                    .on_click(move |_, _, cx| cx.open_url(OLLAMA_LIBRARY_URL)),
895                            ),
896                    )
897                    .map(|this| {
898                        if is_authenticated {
899                            this.child(
900                                ButtonLike::new("connected")
901                                    .disabled(true)
902                                    .cursor_style(CursorStyle::Arrow)
903                                    .child(
904                                        h_flex()
905                                            .gap_2()
906                                            .child(Icon::new(IconName::Check).color(Color::Success))
907                                            .child(Label::new("Connected"))
908                                            .into_any_element(),
909                                    )
910                                    .child(
911                                        IconButton::new("refresh-models", IconName::RotateCcw)
912                                            .tooltip(Tooltip::text("Refresh models"))
913                                            .on_click(cx.listener(|this, _, _, cx| {
914                                                this.state.update(cx, |state, _| {
915                                                    state.fetched_models.clear();
916                                                });
917                                                this.retry_connection(cx);
918                                            })),
919                                    ),
920                            )
921                        } else {
922                            this.child(
923                                Button::new("retry_ollama_models", "Connect")
924                                    .icon_position(IconPosition::Start)
925                                    .icon_size(IconSize::XSmall)
926                                    .icon(IconName::PlayOutlined)
927                                    .on_click(
928                                        cx.listener(move |this, _, _, cx| {
929                                            this.retry_connection(cx)
930                                        }),
931                                    ),
932                            )
933                        }
934                    }),
935            )
936    }
937}
938
939fn tool_into_ollama(tool: LanguageModelRequestTool) -> ollama::OllamaTool {
940    ollama::OllamaTool::Function {
941        function: OllamaFunctionTool {
942            name: tool.name,
943            description: Some(tool.description),
944            parameters: Some(tool.input_schema),
945        },
946    }
947}