ollama.rs

  1use anyhow::{Result, anyhow};
  2use fs::Fs;
  3use futures::{FutureExt, StreamExt, future::BoxFuture, stream::BoxStream};
  4use futures::{Stream, TryFutureExt, stream};
  5use gpui::{AnyView, App, AsyncApp, Context, Task};
  6use http_client::HttpClient;
  7use language_model::{
  8    AuthenticateError, LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent,
  9    LanguageModelId, LanguageModelName, LanguageModelProvider, LanguageModelProviderId,
 10    LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest,
 11    LanguageModelRequestTool, LanguageModelToolChoice, LanguageModelToolUse,
 12    LanguageModelToolUseId, MessageContent, RateLimiter, Role, StopReason, TokenUsage,
 13};
 14use menu;
 15use ollama::{
 16    ChatMessage, ChatOptions, ChatRequest, ChatResponseDelta, OLLAMA_API_URL, OllamaFunctionCall,
 17    OllamaFunctionTool, OllamaToolCall, get_models, show_model, stream_chat_completion,
 18};
 19pub use settings::OllamaAvailableModel as AvailableModel;
 20use settings::{Settings, SettingsStore, update_settings_file};
 21use std::pin::Pin;
 22use std::sync::LazyLock;
 23use std::sync::atomic::{AtomicU64, Ordering};
 24use std::{collections::HashMap, sync::Arc};
 25use ui::{ButtonLike, ElevationIndex, List, Tooltip, prelude::*};
 26use ui_input::SingleLineInput;
 27use zed_env_vars::{EnvVar, env_var};
 28
 29use crate::AllLanguageModelSettings;
 30use crate::api_key::ApiKeyState;
 31use crate::ui::InstructionListItem;
 32
 33const OLLAMA_DOWNLOAD_URL: &str = "https://ollama.com/download";
 34const OLLAMA_LIBRARY_URL: &str = "https://ollama.com/library";
 35const OLLAMA_SITE: &str = "https://ollama.com/";
 36
 37const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("ollama");
 38const PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("Ollama");
 39
 40const API_KEY_ENV_VAR_NAME: &str = "OLLAMA_API_KEY";
 41static API_KEY_ENV_VAR: LazyLock<EnvVar> = env_var!(API_KEY_ENV_VAR_NAME);
 42
 43#[derive(Default, Debug, Clone, PartialEq)]
 44pub struct OllamaSettings {
 45    pub api_url: String,
 46    pub available_models: Vec<AvailableModel>,
 47}
 48
 49pub struct OllamaLanguageModelProvider {
 50    http_client: Arc<dyn HttpClient>,
 51    state: gpui::Entity<State>,
 52}
 53
 54pub struct State {
 55    api_key_state: ApiKeyState,
 56    http_client: Arc<dyn HttpClient>,
 57    fetched_models: Vec<ollama::Model>,
 58    fetch_model_task: Option<Task<Result<()>>>,
 59}
 60
 61impl State {
 62    fn is_authenticated(&self) -> bool {
 63        !self.fetched_models.is_empty()
 64    }
 65
 66    fn set_api_key(&mut self, api_key: Option<String>, cx: &mut Context<Self>) -> Task<Result<()>> {
 67        let api_url = OllamaLanguageModelProvider::api_url(cx);
 68        let task = self
 69            .api_key_state
 70            .store(api_url, api_key, |this| &mut this.api_key_state, cx);
 71
 72        self.fetched_models.clear();
 73        cx.spawn(async move |this, cx| {
 74            let result = task.await;
 75            this.update(cx, |this, cx| this.restart_fetch_models_task(cx))
 76                .ok();
 77            result
 78        })
 79    }
 80
 81    fn authenticate(&mut self, cx: &mut Context<Self>) -> Task<Result<(), AuthenticateError>> {
 82        let api_url = OllamaLanguageModelProvider::api_url(cx);
 83        let task = self.api_key_state.load_if_needed(
 84            api_url,
 85            &API_KEY_ENV_VAR,
 86            |this| &mut this.api_key_state,
 87            cx,
 88        );
 89
 90        // Always try to fetch models - if no API key is needed (local Ollama), it will work
 91        // If API key is needed and provided, it will work
 92        // If API key is needed and not provided, it will fail gracefully
 93        cx.spawn(async move |this, cx| {
 94            let result = task.await;
 95            this.update(cx, |this, cx| this.restart_fetch_models_task(cx))
 96                .ok();
 97            result
 98        })
 99    }
100
101    fn fetch_models(&mut self, cx: &mut Context<Self>) -> Task<Result<()>> {
102        let http_client = Arc::clone(&self.http_client);
103        let api_url = OllamaLanguageModelProvider::api_url(cx);
104        let api_key = self.api_key_state.key(&api_url);
105
106        // As a proxy for the server being "authenticated", we'll check if its up by fetching the models
107        cx.spawn(async move |this, cx| {
108            let models =
109                get_models(http_client.as_ref(), &api_url, api_key.as_deref(), None).await?;
110
111            let tasks = models
112                .into_iter()
113                // Since there is no metadata from the Ollama API
114                // indicating which models are embedding models,
115                // simply filter out models with "-embed" in their name
116                .filter(|model| !model.name.contains("-embed"))
117                .map(|model| {
118                    let http_client = Arc::clone(&http_client);
119                    let api_url = api_url.clone();
120                    let api_key = api_key.clone();
121                    async move {
122                        let name = model.name.as_str();
123                        let capabilities =
124                            show_model(http_client.as_ref(), &api_url, api_key.as_deref(), name)
125                                .await?;
126                        let ollama_model = ollama::Model::new(
127                            name,
128                            None,
129                            None,
130                            Some(capabilities.supports_tools()),
131                            Some(capabilities.supports_vision()),
132                            Some(capabilities.supports_thinking()),
133                        );
134                        Ok(ollama_model)
135                    }
136                });
137
138            // Rate-limit capability fetches
139            // since there is an arbitrary number of models available
140            let mut ollama_models: Vec<_> = futures::stream::iter(tasks)
141                .buffer_unordered(5)
142                .collect::<Vec<Result<_>>>()
143                .await
144                .into_iter()
145                .collect::<Result<Vec<_>>>()?;
146
147            ollama_models.sort_by(|a, b| a.name.cmp(&b.name));
148
149            this.update(cx, |this, cx| {
150                this.fetched_models = ollama_models;
151                cx.notify();
152            })
153        })
154    }
155
156    fn restart_fetch_models_task(&mut self, cx: &mut Context<Self>) {
157        let task = self.fetch_models(cx);
158        self.fetch_model_task.replace(task);
159    }
160}
161
162impl OllamaLanguageModelProvider {
163    pub fn new(http_client: Arc<dyn HttpClient>, cx: &mut App) -> Self {
164        let this = Self {
165            http_client: http_client.clone(),
166            state: cx.new(|cx| {
167                cx.observe_global::<SettingsStore>({
168                    let mut last_settings = OllamaLanguageModelProvider::settings(cx).clone();
169                    move |this: &mut State, cx| {
170                        let current_settings = OllamaLanguageModelProvider::settings(cx);
171                        let settings_changed = current_settings != &last_settings;
172                        if settings_changed {
173                            let url_changed = last_settings.api_url != current_settings.api_url;
174                            last_settings = current_settings.clone();
175                            if url_changed {
176                                this.fetched_models.clear();
177                                this.authenticate(cx).detach();
178                            }
179                            cx.notify();
180                        }
181                    }
182                })
183                .detach();
184
185                State {
186                    http_client,
187                    fetched_models: Default::default(),
188                    fetch_model_task: None,
189                    api_key_state: ApiKeyState::new(Self::api_url(cx)),
190                }
191            }),
192        };
193        this
194    }
195
196    fn settings(cx: &App) -> &OllamaSettings {
197        &AllLanguageModelSettings::get_global(cx).ollama
198    }
199
200    fn api_url(cx: &App) -> SharedString {
201        let api_url = &Self::settings(cx).api_url;
202        if api_url.is_empty() {
203            OLLAMA_API_URL.into()
204        } else {
205            SharedString::new(api_url.as_str())
206        }
207    }
208}
209
210impl LanguageModelProviderState for OllamaLanguageModelProvider {
211    type ObservableEntity = State;
212
213    fn observable_entity(&self) -> Option<gpui::Entity<Self::ObservableEntity>> {
214        Some(self.state.clone())
215    }
216}
217
218impl LanguageModelProvider for OllamaLanguageModelProvider {
219    fn id(&self) -> LanguageModelProviderId {
220        PROVIDER_ID
221    }
222
223    fn name(&self) -> LanguageModelProviderName {
224        PROVIDER_NAME
225    }
226
227    fn icon(&self) -> IconName {
228        IconName::AiOllama
229    }
230
231    fn default_model(&self, _: &App) -> Option<Arc<dyn LanguageModel>> {
232        // We shouldn't try to select default model, because it might lead to a load call for an unloaded model.
233        // In a constrained environment where user might not have enough resources it'll be a bad UX to select something
234        // to load by default.
235        None
236    }
237
238    fn default_fast_model(&self, _: &App) -> Option<Arc<dyn LanguageModel>> {
239        // See explanation for default_model.
240        None
241    }
242
243    fn provided_models(&self, cx: &App) -> Vec<Arc<dyn LanguageModel>> {
244        let mut models: HashMap<String, ollama::Model> = HashMap::new();
245
246        // Add models from the Ollama API
247        for model in self.state.read(cx).fetched_models.iter() {
248            models.insert(model.name.clone(), model.clone());
249        }
250
251        // Override with available models from settings
252        for model in &OllamaLanguageModelProvider::settings(cx).available_models {
253            models.insert(
254                model.name.clone(),
255                ollama::Model {
256                    name: model.name.clone(),
257                    display_name: model.display_name.clone(),
258                    max_tokens: model.max_tokens,
259                    keep_alive: model.keep_alive.clone(),
260                    supports_tools: model.supports_tools,
261                    supports_vision: model.supports_images,
262                    supports_thinking: model.supports_thinking,
263                },
264            );
265        }
266
267        let mut models = models
268            .into_values()
269            .map(|model| {
270                Arc::new(OllamaLanguageModel {
271                    id: LanguageModelId::from(model.name.clone()),
272                    model,
273                    http_client: self.http_client.clone(),
274                    request_limiter: RateLimiter::new(4),
275                    state: self.state.clone(),
276                }) as Arc<dyn LanguageModel>
277            })
278            .collect::<Vec<_>>();
279        models.sort_by_key(|model| model.name());
280        models
281    }
282
283    fn is_authenticated(&self, cx: &App) -> bool {
284        self.state.read(cx).is_authenticated()
285    }
286
287    fn authenticate(&self, cx: &mut App) -> Task<Result<(), AuthenticateError>> {
288        self.state.update(cx, |state, cx| state.authenticate(cx))
289    }
290
291    fn configuration_view(
292        &self,
293        _target_agent: language_model::ConfigurationViewTargetAgent,
294        window: &mut Window,
295        cx: &mut App,
296    ) -> AnyView {
297        let state = self.state.clone();
298        cx.new(|cx| ConfigurationView::new(state, window, cx))
299            .into()
300    }
301
302    fn reset_credentials(&self, cx: &mut App) -> Task<Result<()>> {
303        self.state
304            .update(cx, |state, cx| state.set_api_key(None, cx))
305    }
306}
307
308pub struct OllamaLanguageModel {
309    id: LanguageModelId,
310    model: ollama::Model,
311    http_client: Arc<dyn HttpClient>,
312    request_limiter: RateLimiter,
313    state: gpui::Entity<State>,
314}
315
316impl OllamaLanguageModel {
317    fn to_ollama_request(&self, request: LanguageModelRequest) -> ChatRequest {
318        let supports_vision = self.model.supports_vision.unwrap_or(false);
319
320        let mut messages = Vec::with_capacity(request.messages.len());
321
322        for mut msg in request.messages.into_iter() {
323            let images = if supports_vision {
324                msg.content
325                    .iter()
326                    .filter_map(|content| match content {
327                        MessageContent::Image(image) => Some(image.source.to_string()),
328                        _ => None,
329                    })
330                    .collect::<Vec<String>>()
331            } else {
332                vec![]
333            };
334
335            match msg.role {
336                Role::User => {
337                    for tool_result in msg
338                        .content
339                        .extract_if(.., |x| matches!(x, MessageContent::ToolResult(..)))
340                    {
341                        match tool_result {
342                            MessageContent::ToolResult(tool_result) => {
343                                messages.push(ChatMessage::Tool {
344                                    tool_name: tool_result.tool_name.to_string(),
345                                    content: tool_result.content.to_str().unwrap_or("").to_string(),
346                                })
347                            }
348                            _ => unreachable!("Only tool result should be extracted"),
349                        }
350                    }
351                    if !msg.content.is_empty() {
352                        messages.push(ChatMessage::User {
353                            content: msg.string_contents(),
354                            images: if images.is_empty() {
355                                None
356                            } else {
357                                Some(images)
358                            },
359                        })
360                    }
361                }
362                Role::Assistant => {
363                    let content = msg.string_contents();
364                    let mut thinking = None;
365                    let mut tool_calls = Vec::new();
366                    for content in msg.content.into_iter() {
367                        match content {
368                            MessageContent::Thinking { text, .. } if !text.is_empty() => {
369                                thinking = Some(text)
370                            }
371                            MessageContent::ToolUse(tool_use) => {
372                                tool_calls.push(OllamaToolCall::Function(OllamaFunctionCall {
373                                    name: tool_use.name.to_string(),
374                                    arguments: tool_use.input,
375                                }));
376                            }
377                            _ => (),
378                        }
379                    }
380                    messages.push(ChatMessage::Assistant {
381                        content,
382                        tool_calls: Some(tool_calls),
383                        images: if images.is_empty() {
384                            None
385                        } else {
386                            Some(images)
387                        },
388                        thinking,
389                    })
390                }
391                Role::System => messages.push(ChatMessage::System {
392                    content: msg.string_contents(),
393                }),
394            }
395        }
396        ChatRequest {
397            model: self.model.name.clone(),
398            messages,
399            keep_alive: self.model.keep_alive.clone().unwrap_or_default(),
400            stream: true,
401            options: Some(ChatOptions {
402                num_ctx: Some(self.model.max_tokens),
403                stop: Some(request.stop),
404                temperature: request.temperature.or(Some(1.0)),
405                ..Default::default()
406            }),
407            think: self
408                .model
409                .supports_thinking
410                .map(|supports_thinking| supports_thinking && request.thinking_allowed),
411            tools: if self.model.supports_tools.unwrap_or(false) {
412                request.tools.into_iter().map(tool_into_ollama).collect()
413            } else {
414                vec![]
415            },
416        }
417    }
418}
419
420impl LanguageModel for OllamaLanguageModel {
421    fn id(&self) -> LanguageModelId {
422        self.id.clone()
423    }
424
425    fn name(&self) -> LanguageModelName {
426        LanguageModelName::from(self.model.display_name().to_string())
427    }
428
429    fn provider_id(&self) -> LanguageModelProviderId {
430        PROVIDER_ID
431    }
432
433    fn provider_name(&self) -> LanguageModelProviderName {
434        PROVIDER_NAME
435    }
436
437    fn supports_tools(&self) -> bool {
438        self.model.supports_tools.unwrap_or(false)
439    }
440
441    fn supports_images(&self) -> bool {
442        self.model.supports_vision.unwrap_or(false)
443    }
444
445    fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool {
446        match choice {
447            LanguageModelToolChoice::Auto => false,
448            LanguageModelToolChoice::Any => false,
449            LanguageModelToolChoice::None => false,
450        }
451    }
452
453    fn telemetry_id(&self) -> String {
454        format!("ollama/{}", self.model.id())
455    }
456
457    fn max_token_count(&self) -> u64 {
458        self.model.max_token_count()
459    }
460
461    fn count_tokens(
462        &self,
463        request: LanguageModelRequest,
464        _cx: &App,
465    ) -> BoxFuture<'static, Result<u64>> {
466        // There is no endpoint for this _yet_ in Ollama
467        // see: https://github.com/ollama/ollama/issues/1716 and https://github.com/ollama/ollama/issues/3582
468        let token_count = request
469            .messages
470            .iter()
471            .map(|msg| msg.string_contents().chars().count())
472            .sum::<usize>()
473            / 4;
474
475        async move { Ok(token_count as u64) }.boxed()
476    }
477
478    fn stream_completion(
479        &self,
480        request: LanguageModelRequest,
481        cx: &AsyncApp,
482    ) -> BoxFuture<
483        'static,
484        Result<
485            BoxStream<'static, Result<LanguageModelCompletionEvent, LanguageModelCompletionError>>,
486            LanguageModelCompletionError,
487        >,
488    > {
489        let request = self.to_ollama_request(request);
490
491        let http_client = self.http_client.clone();
492        let Ok((api_key, api_url)) = self.state.read_with(cx, |state, cx| {
493            let api_url = OllamaLanguageModelProvider::api_url(cx);
494            (state.api_key_state.key(&api_url), api_url)
495        }) else {
496            return futures::future::ready(Err(anyhow!("App state dropped").into())).boxed();
497        };
498
499        let future = self.request_limiter.stream(async move {
500            let stream =
501                stream_chat_completion(http_client.as_ref(), &api_url, api_key.as_deref(), request)
502                    .await?;
503            let stream = map_to_language_model_completion_events(stream);
504            Ok(stream)
505        });
506
507        future.map_ok(|f| f.boxed()).boxed()
508    }
509}
510
511fn map_to_language_model_completion_events(
512    stream: Pin<Box<dyn Stream<Item = anyhow::Result<ChatResponseDelta>> + Send>>,
513) -> impl Stream<Item = Result<LanguageModelCompletionEvent, LanguageModelCompletionError>> {
514    // Used for creating unique tool use ids
515    static TOOL_CALL_COUNTER: AtomicU64 = AtomicU64::new(0);
516
517    struct State {
518        stream: Pin<Box<dyn Stream<Item = anyhow::Result<ChatResponseDelta>> + Send>>,
519        used_tools: bool,
520    }
521
522    // We need to create a ToolUse and Stop event from a single
523    // response from the original stream
524    let stream = stream::unfold(
525        State {
526            stream,
527            used_tools: false,
528        },
529        async move |mut state| {
530            let response = state.stream.next().await?;
531
532            let delta = match response {
533                Ok(delta) => delta,
534                Err(e) => {
535                    let event = Err(LanguageModelCompletionError::from(anyhow!(e)));
536                    return Some((vec![event], state));
537                }
538            };
539
540            let mut events = Vec::new();
541
542            match delta.message {
543                ChatMessage::User { content, images: _ } => {
544                    events.push(Ok(LanguageModelCompletionEvent::Text(content)));
545                }
546                ChatMessage::System { content } => {
547                    events.push(Ok(LanguageModelCompletionEvent::Text(content)));
548                }
549                ChatMessage::Tool { content, .. } => {
550                    events.push(Ok(LanguageModelCompletionEvent::Text(content)));
551                }
552                ChatMessage::Assistant {
553                    content,
554                    tool_calls,
555                    images: _,
556                    thinking,
557                } => {
558                    if let Some(text) = thinking {
559                        events.push(Ok(LanguageModelCompletionEvent::Thinking {
560                            text,
561                            signature: None,
562                        }));
563                    }
564
565                    if let Some(tool_call) = tool_calls.and_then(|v| v.into_iter().next()) {
566                        match tool_call {
567                            OllamaToolCall::Function(function) => {
568                                let tool_id = format!(
569                                    "{}-{}",
570                                    &function.name,
571                                    TOOL_CALL_COUNTER.fetch_add(1, Ordering::Relaxed)
572                                );
573                                let event =
574                                    LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse {
575                                        id: LanguageModelToolUseId::from(tool_id),
576                                        name: Arc::from(function.name),
577                                        raw_input: function.arguments.to_string(),
578                                        input: function.arguments,
579                                        is_input_complete: true,
580                                    });
581                                events.push(Ok(event));
582                                state.used_tools = true;
583                            }
584                        }
585                    } else if !content.is_empty() {
586                        events.push(Ok(LanguageModelCompletionEvent::Text(content)));
587                    }
588                }
589            };
590
591            if delta.done {
592                events.push(Ok(LanguageModelCompletionEvent::UsageUpdate(TokenUsage {
593                    input_tokens: delta.prompt_eval_count.unwrap_or(0),
594                    output_tokens: delta.eval_count.unwrap_or(0),
595                    cache_creation_input_tokens: 0,
596                    cache_read_input_tokens: 0,
597                })));
598                if state.used_tools {
599                    state.used_tools = false;
600                    events.push(Ok(LanguageModelCompletionEvent::Stop(StopReason::ToolUse)));
601                } else {
602                    events.push(Ok(LanguageModelCompletionEvent::Stop(StopReason::EndTurn)));
603                }
604            }
605
606            Some((events, state))
607        },
608    );
609
610    stream.flat_map(futures::stream::iter)
611}
612
613struct ConfigurationView {
614    api_key_editor: gpui::Entity<SingleLineInput>,
615    api_url_editor: gpui::Entity<SingleLineInput>,
616    state: gpui::Entity<State>,
617}
618
619impl ConfigurationView {
620    pub fn new(state: gpui::Entity<State>, window: &mut Window, cx: &mut Context<Self>) -> Self {
621        let api_key_editor =
622            cx.new(|cx| SingleLineInput::new(window, cx, "63e02e...").label("API key"));
623
624        let api_url_editor = cx.new(|cx| {
625            let input = SingleLineInput::new(window, cx, OLLAMA_API_URL).label("API URL");
626            input.set_text(OllamaLanguageModelProvider::api_url(cx), window, cx);
627            input
628        });
629
630        cx.observe(&state, |_, _, cx| {
631            cx.notify();
632        })
633        .detach();
634
635        Self {
636            api_key_editor,
637            api_url_editor,
638            state,
639        }
640    }
641
642    fn retry_connection(&self, cx: &mut App) {
643        self.state
644            .update(cx, |state, cx| state.restart_fetch_models_task(cx));
645    }
646
647    fn save_api_key(&mut self, _: &menu::Confirm, window: &mut Window, cx: &mut Context<Self>) {
648        let api_key = self.api_key_editor.read(cx).text(cx).trim().to_string();
649        if api_key.is_empty() {
650            return;
651        }
652
653        // url changes can cause the editor to be displayed again
654        self.api_key_editor
655            .update(cx, |input, cx| input.set_text("", window, cx));
656
657        let state = self.state.clone();
658        cx.spawn_in(window, async move |_, cx| {
659            state
660                .update(cx, |state, cx| state.set_api_key(Some(api_key), cx))?
661                .await
662        })
663        .detach_and_log_err(cx);
664    }
665
666    fn reset_api_key(&mut self, window: &mut Window, cx: &mut Context<Self>) {
667        self.api_key_editor
668            .update(cx, |input, cx| input.set_text("", window, cx));
669
670        let state = self.state.clone();
671        cx.spawn_in(window, async move |_, cx| {
672            state
673                .update(cx, |state, cx| state.set_api_key(None, cx))?
674                .await
675        })
676        .detach_and_log_err(cx);
677
678        cx.notify();
679    }
680
681    fn save_api_url(&mut self, cx: &mut Context<Self>) {
682        let api_url = self.api_url_editor.read(cx).text(cx).trim().to_string();
683        let current_url = OllamaLanguageModelProvider::api_url(cx);
684        if !api_url.is_empty() && &api_url != &current_url {
685            let fs = <dyn Fs>::global(cx);
686            update_settings_file(fs, cx, move |settings, _| {
687                settings
688                    .language_models
689                    .get_or_insert_default()
690                    .ollama
691                    .get_or_insert_default()
692                    .api_url = Some(api_url);
693            });
694        }
695    }
696
697    fn reset_api_url(&mut self, window: &mut Window, cx: &mut Context<Self>) {
698        self.api_url_editor
699            .update(cx, |input, cx| input.set_text("", window, cx));
700        let fs = <dyn Fs>::global(cx);
701        update_settings_file(fs, cx, |settings, _cx| {
702            if let Some(settings) = settings
703                .language_models
704                .as_mut()
705                .and_then(|models| models.ollama.as_mut())
706            {
707                settings.api_url = Some(OLLAMA_API_URL.into());
708            }
709        });
710        cx.notify();
711    }
712
713    fn render_instructions() -> Div {
714        v_flex()
715            .gap_2()
716            .child(Label::new(
717                "Run LLMs locally on your machine with Ollama, or connect to an Ollama server. \
718                Can provide access to Llama, Mistral, Gemma, and hundreds of other models.",
719            ))
720            .child(Label::new("To use local Ollama:"))
721            .child(
722                List::new()
723                    .child(InstructionListItem::new(
724                        "Download and install Ollama from",
725                        Some("ollama.com"),
726                        Some("https://ollama.com/download"),
727                    ))
728                    .child(InstructionListItem::text_only(
729                        "Start Ollama and download a model: `ollama run gpt-oss:20b`",
730                    ))
731                    .child(InstructionListItem::text_only(
732                        "Click 'Connect' below to start using Ollama in Zed",
733                    )),
734            )
735            .child(Label::new(
736                "Alternatively, you can connect to an Ollama server by specifying its \
737                URL and API key (may not be required):",
738            ))
739    }
740
741    fn render_api_key_editor(&self, cx: &Context<Self>) -> Div {
742        let state = self.state.read(cx);
743        let env_var_set = state.api_key_state.is_from_env_var();
744
745        if !state.api_key_state.has_key() {
746            v_flex()
747              .on_action(cx.listener(Self::save_api_key))
748              .child(self.api_key_editor.clone())
749              .child(
750                  Label::new(
751                      format!("You can also assign the {API_KEY_ENV_VAR_NAME} environment variable and restart Zed.")
752                  )
753                  .size(LabelSize::Small)
754                  .color(Color::Muted),
755              )
756        } else {
757            h_flex()
758                .p_3()
759                .justify_between()
760                .rounded_md()
761                .border_1()
762                .border_color(cx.theme().colors().border)
763                .bg(cx.theme().colors().elevated_surface_background)
764                .child(
765                    h_flex()
766                        .gap_2()
767                        .child(Icon::new(IconName::Check).color(Color::Success))
768                        .child(
769                            Label::new(
770                                if env_var_set {
771                                    format!("API key set in {API_KEY_ENV_VAR_NAME} environment variable.")
772                                } else {
773                                    "API key configured".to_string()
774                                }
775                            )
776                        )
777                )
778                .child(
779                    Button::new("reset-api-key", "Reset API Key")
780                        .label_size(LabelSize::Small)
781                        .icon(IconName::Undo)
782                        .icon_size(IconSize::Small)
783                        .icon_position(IconPosition::Start)
784                        .layer(ElevationIndex::ModalSurface)
785                        .when(env_var_set, |this| {
786                            this.tooltip(Tooltip::text(format!("To reset your API key, unset the {API_KEY_ENV_VAR_NAME} environment variable.")))
787                        })
788                        .on_click(cx.listener(|this, _, window, cx| this.reset_api_key(window, cx))),
789                )
790        }
791    }
792
793    fn render_api_url_editor(&self, cx: &Context<Self>) -> Div {
794        let api_url = OllamaLanguageModelProvider::api_url(cx);
795        let custom_api_url_set = api_url != OLLAMA_API_URL;
796
797        if custom_api_url_set {
798            h_flex()
799                .p_3()
800                .justify_between()
801                .rounded_md()
802                .border_1()
803                .border_color(cx.theme().colors().border)
804                .bg(cx.theme().colors().elevated_surface_background)
805                .child(
806                    h_flex()
807                        .gap_2()
808                        .child(Icon::new(IconName::Check).color(Color::Success))
809                        .child(v_flex().gap_1().child(Label::new(api_url))),
810                )
811                .child(
812                    Button::new("reset-api-url", "Reset API URL")
813                        .label_size(LabelSize::Small)
814                        .icon(IconName::Undo)
815                        .icon_size(IconSize::Small)
816                        .icon_position(IconPosition::Start)
817                        .layer(ElevationIndex::ModalSurface)
818                        .on_click(
819                            cx.listener(|this, _, window, cx| this.reset_api_url(window, cx)),
820                        ),
821                )
822        } else {
823            v_flex()
824                .on_action(cx.listener(|this, _: &menu::Confirm, _window, cx| {
825                    this.save_api_url(cx);
826                    cx.notify();
827                }))
828                .gap_2()
829                .child(self.api_url_editor.clone())
830        }
831    }
832}
833
834impl Render for ConfigurationView {
835    fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
836        let is_authenticated = self.state.read(cx).is_authenticated();
837
838        v_flex()
839            .gap_2()
840            .child(Self::render_instructions())
841            .child(self.render_api_url_editor(cx))
842            .child(self.render_api_key_editor(cx))
843            .child(
844                h_flex()
845                    .w_full()
846                    .justify_between()
847                    .gap_2()
848                    .child(
849                        h_flex()
850                            .w_full()
851                            .gap_2()
852                            .map(|this| {
853                                if is_authenticated {
854                                    this.child(
855                                        Button::new("ollama-site", "Ollama")
856                                            .style(ButtonStyle::Subtle)
857                                            .icon(IconName::ArrowUpRight)
858                                            .icon_size(IconSize::XSmall)
859                                            .icon_color(Color::Muted)
860                                            .on_click(move |_, _, cx| cx.open_url(OLLAMA_SITE))
861                                            .into_any_element(),
862                                    )
863                                } else {
864                                    this.child(
865                                        Button::new("download_ollama_button", "Download Ollama")
866                                            .style(ButtonStyle::Subtle)
867                                            .icon(IconName::ArrowUpRight)
868                                            .icon_size(IconSize::XSmall)
869                                            .icon_color(Color::Muted)
870                                            .on_click(move |_, _, cx| {
871                                                cx.open_url(OLLAMA_DOWNLOAD_URL)
872                                            })
873                                            .into_any_element(),
874                                    )
875                                }
876                            })
877                            .child(
878                                Button::new("view-models", "View All Models")
879                                    .style(ButtonStyle::Subtle)
880                                    .icon(IconName::ArrowUpRight)
881                                    .icon_size(IconSize::XSmall)
882                                    .icon_color(Color::Muted)
883                                    .on_click(move |_, _, cx| cx.open_url(OLLAMA_LIBRARY_URL)),
884                            ),
885                    )
886                    .map(|this| {
887                        if is_authenticated {
888                            this.child(
889                                ButtonLike::new("connected")
890                                    .disabled(true)
891                                    .cursor_style(gpui::CursorStyle::Arrow)
892                                    .child(
893                                        h_flex()
894                                            .gap_2()
895                                            .child(Icon::new(IconName::Check).color(Color::Success))
896                                            .child(Label::new("Connected"))
897                                            .into_any_element(),
898                                    ),
899                            )
900                        } else {
901                            this.child(
902                                Button::new("retry_ollama_models", "Connect")
903                                    .icon_position(IconPosition::Start)
904                                    .icon_size(IconSize::XSmall)
905                                    .icon(IconName::PlayOutlined)
906                                    .on_click(
907                                        cx.listener(move |this, _, _, cx| {
908                                            this.retry_connection(cx)
909                                        }),
910                                    ),
911                            )
912                        }
913                    }),
914            )
915    }
916}
917
918fn tool_into_ollama(tool: LanguageModelRequestTool) -> ollama::OllamaTool {
919    ollama::OllamaTool::Function {
920        function: OllamaFunctionTool {
921            name: tool.name,
922            description: Some(tool.description),
923            parameters: Some(tool.input_schema),
924        },
925    }
926}