1use anyhow::{Result, anyhow};
  2use fs::Fs;
  3use futures::{FutureExt, StreamExt, future::BoxFuture, stream::BoxStream};
  4use futures::{Stream, TryFutureExt, stream};
  5use gpui::{AnyView, App, AsyncApp, Context, CursorStyle, Entity, Task};
  6use http_client::HttpClient;
  7use language_model::{
  8    AuthenticateError, LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent,
  9    LanguageModelId, LanguageModelName, LanguageModelProvider, LanguageModelProviderId,
 10    LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest,
 11    LanguageModelRequestTool, LanguageModelToolChoice, LanguageModelToolUse,
 12    LanguageModelToolUseId, MessageContent, OLLAMA_PROVIDER_ID, OLLAMA_PROVIDER_NAME, RateLimiter,
 13    Role, StopReason, TokenUsage,
 14};
 15use menu;
 16use ollama::{
 17    ChatMessage, ChatOptions, ChatRequest, ChatResponseDelta, OLLAMA_API_URL, OllamaFunctionCall,
 18    OllamaFunctionTool, OllamaToolCall, get_models, show_model, stream_chat_completion,
 19};
 20pub use settings::OllamaAvailableModel as AvailableModel;
 21use settings::{Settings, SettingsStore, update_settings_file};
 22use std::pin::Pin;
 23use std::sync::LazyLock;
 24use std::sync::atomic::{AtomicU64, Ordering};
 25use std::{collections::HashMap, sync::Arc};
 26use ui::{ButtonLike, ElevationIndex, List, Tooltip, prelude::*};
 27use ui_input::InputField;
 28use zed_env_vars::{EnvVar, env_var};
 29
 30use crate::AllLanguageModelSettings;
 31use crate::api_key::ApiKeyState;
 32use crate::ui::InstructionListItem;
 33
 34const OLLAMA_DOWNLOAD_URL: &str = "https://ollama.com/download";
 35const OLLAMA_LIBRARY_URL: &str = "https://ollama.com/library";
 36const OLLAMA_SITE: &str = "https://ollama.com/";
 37
 38const API_KEY_ENV_VAR_NAME: &str = "OLLAMA_API_KEY";
 39static API_KEY_ENV_VAR: LazyLock<EnvVar> = env_var!(API_KEY_ENV_VAR_NAME);
 40
 41#[derive(Default, Debug, Clone, PartialEq)]
 42pub struct OllamaSettings {
 43    pub api_url: String,
 44    pub available_models: Vec<AvailableModel>,
 45}
 46
 47pub struct OllamaLanguageModelProvider {
 48    http_client: Arc<dyn HttpClient>,
 49    state: Entity<State>,
 50}
 51
 52pub struct State {
 53    api_key_state: ApiKeyState,
 54    http_client: Arc<dyn HttpClient>,
 55    fetched_models: Vec<ollama::Model>,
 56    fetch_model_task: Option<Task<Result<()>>>,
 57}
 58
 59impl State {
 60    fn is_authenticated(&self) -> bool {
 61        !self.fetched_models.is_empty()
 62    }
 63
 64    fn set_api_key(&mut self, api_key: Option<String>, cx: &mut Context<Self>) -> Task<Result<()>> {
 65        let api_url = OllamaLanguageModelProvider::api_url(cx);
 66        let task = self
 67            .api_key_state
 68            .store(api_url, api_key, |this| &mut this.api_key_state, cx);
 69
 70        self.fetched_models.clear();
 71        cx.spawn(async move |this, cx| {
 72            let result = task.await;
 73            this.update(cx, |this, cx| this.restart_fetch_models_task(cx))
 74                .ok();
 75            result
 76        })
 77    }
 78
 79    fn authenticate(&mut self, cx: &mut Context<Self>) -> Task<Result<(), AuthenticateError>> {
 80        let api_url = OllamaLanguageModelProvider::api_url(cx);
 81        let task = self.api_key_state.load_if_needed(
 82            api_url,
 83            &API_KEY_ENV_VAR,
 84            |this| &mut this.api_key_state,
 85            cx,
 86        );
 87
 88        // Always try to fetch models - if no API key is needed (local Ollama), it will work
 89        // If API key is needed and provided, it will work
 90        // If API key is needed and not provided, it will fail gracefully
 91        cx.spawn(async move |this, cx| {
 92            let result = task.await;
 93            this.update(cx, |this, cx| this.restart_fetch_models_task(cx))
 94                .ok();
 95            result
 96        })
 97    }
 98
 99    fn fetch_models(&mut self, cx: &mut Context<Self>) -> Task<Result<()>> {
100        let http_client = Arc::clone(&self.http_client);
101        let api_url = OllamaLanguageModelProvider::api_url(cx);
102        let api_key = self.api_key_state.key(&api_url);
103
104        // As a proxy for the server being "authenticated", we'll check if its up by fetching the models
105        cx.spawn(async move |this, cx| {
106            let models = get_models(http_client.as_ref(), &api_url, api_key.as_deref()).await?;
107
108            let tasks = models
109                .into_iter()
110                // Since there is no metadata from the Ollama API
111                // indicating which models are embedding models,
112                // simply filter out models with "-embed" in their name
113                .filter(|model| !model.name.contains("-embed"))
114                .map(|model| {
115                    let http_client = Arc::clone(&http_client);
116                    let api_url = api_url.clone();
117                    let api_key = api_key.clone();
118                    async move {
119                        let name = model.name.as_str();
120                        let model =
121                            show_model(http_client.as_ref(), &api_url, api_key.as_deref(), name)
122                                .await?;
123                        let ollama_model = ollama::Model::new(
124                            name,
125                            None,
126                            model.context_length,
127                            Some(model.supports_tools()),
128                            Some(model.supports_vision()),
129                            Some(model.supports_thinking()),
130                        );
131                        Ok(ollama_model)
132                    }
133                });
134
135            // Rate-limit capability fetches
136            // since there is an arbitrary number of models available
137            let mut ollama_models: Vec<_> = futures::stream::iter(tasks)
138                .buffer_unordered(5)
139                .collect::<Vec<Result<_>>>()
140                .await
141                .into_iter()
142                .collect::<Result<Vec<_>>>()?;
143
144            ollama_models.sort_by(|a, b| a.name.cmp(&b.name));
145
146            this.update(cx, |this, cx| {
147                this.fetched_models = ollama_models;
148                cx.notify();
149            })
150        })
151    }
152
153    fn restart_fetch_models_task(&mut self, cx: &mut Context<Self>) {
154        let task = self.fetch_models(cx);
155        self.fetch_model_task.replace(task);
156    }
157}
158
159impl OllamaLanguageModelProvider {
160    pub fn new(http_client: Arc<dyn HttpClient>, cx: &mut App) -> Self {
161        let this = Self {
162            http_client: http_client.clone(),
163            state: cx.new(|cx| {
164                cx.observe_global::<SettingsStore>({
165                    let mut last_settings = OllamaLanguageModelProvider::settings(cx).clone();
166                    move |this: &mut State, cx| {
167                        let current_settings = OllamaLanguageModelProvider::settings(cx);
168                        let settings_changed = current_settings != &last_settings;
169                        if settings_changed {
170                            let url_changed = last_settings.api_url != current_settings.api_url;
171                            last_settings = current_settings.clone();
172                            if url_changed {
173                                this.fetched_models.clear();
174                                this.authenticate(cx).detach();
175                            }
176                            cx.notify();
177                        }
178                    }
179                })
180                .detach();
181
182                State {
183                    http_client,
184                    fetched_models: Default::default(),
185                    fetch_model_task: None,
186                    api_key_state: ApiKeyState::new(Self::api_url(cx)),
187                }
188            }),
189        };
190        this
191    }
192
193    fn settings(cx: &App) -> &OllamaSettings {
194        &AllLanguageModelSettings::get_global(cx).ollama
195    }
196
197    fn api_url(cx: &App) -> SharedString {
198        let api_url = &Self::settings(cx).api_url;
199        if api_url.is_empty() {
200            OLLAMA_API_URL.into()
201        } else {
202            SharedString::new(api_url.as_str())
203        }
204    }
205}
206
207impl LanguageModelProviderState for OllamaLanguageModelProvider {
208    type ObservableEntity = State;
209
210    fn observable_entity(&self) -> Option<Entity<Self::ObservableEntity>> {
211        Some(self.state.clone())
212    }
213}
214
215impl LanguageModelProvider for OllamaLanguageModelProvider {
216    fn id(&self) -> LanguageModelProviderId {
217        OLLAMA_PROVIDER_ID
218    }
219
220    fn name(&self) -> LanguageModelProviderName {
221        OLLAMA_PROVIDER_NAME
222    }
223
224    fn icon(&self) -> IconName {
225        IconName::AiOllama
226    }
227
228    fn default_model(&self, _: &App) -> Option<Arc<dyn LanguageModel>> {
229        // We shouldn't try to select default model, because it might lead to a load call for an unloaded model.
230        // In a constrained environment where user might not have enough resources it'll be a bad UX to select something
231        // to load by default.
232        None
233    }
234
235    fn default_fast_model(&self, _: &App) -> Option<Arc<dyn LanguageModel>> {
236        // See explanation for default_model.
237        None
238    }
239
240    fn provided_models(&self, cx: &App) -> Vec<Arc<dyn LanguageModel>> {
241        let mut models: HashMap<String, ollama::Model> = HashMap::new();
242
243        // Add models from the Ollama API
244        for model in self.state.read(cx).fetched_models.iter() {
245            models.insert(model.name.clone(), model.clone());
246        }
247
248        // Override with available models from settings
249        for setting_model in &OllamaLanguageModelProvider::settings(cx).available_models {
250            let setting_base = setting_model.name.split(':').next().unwrap();
251            if let Some(model) = models
252                .values_mut()
253                .find(|m| m.name.split(':').next().unwrap() == setting_base)
254            {
255                model.max_tokens = setting_model.max_tokens;
256                model.display_name = setting_model.display_name.clone();
257                model.keep_alive = setting_model.keep_alive.clone();
258                model.supports_tools = setting_model.supports_tools;
259                model.supports_vision = setting_model.supports_images;
260                model.supports_thinking = setting_model.supports_thinking;
261            } else {
262                models.insert(
263                    setting_model.name.clone(),
264                    ollama::Model {
265                        name: setting_model.name.clone(),
266                        display_name: setting_model.display_name.clone(),
267                        max_tokens: setting_model.max_tokens,
268                        keep_alive: setting_model.keep_alive.clone(),
269                        supports_tools: setting_model.supports_tools,
270                        supports_vision: setting_model.supports_images,
271                        supports_thinking: setting_model.supports_thinking,
272                    },
273                );
274            }
275        }
276
277        let mut models = models
278            .into_values()
279            .map(|model| {
280                Arc::new(OllamaLanguageModel {
281                    id: LanguageModelId::from(model.name.clone()),
282                    model,
283                    http_client: self.http_client.clone(),
284                    request_limiter: RateLimiter::new(4),
285                    state: self.state.clone(),
286                }) as Arc<dyn LanguageModel>
287            })
288            .collect::<Vec<_>>();
289        models.sort_by_key(|model| model.name());
290        models
291    }
292
293    fn is_authenticated(&self, cx: &App) -> bool {
294        self.state.read(cx).is_authenticated()
295    }
296
297    fn authenticate(&self, cx: &mut App) -> Task<Result<(), AuthenticateError>> {
298        self.state.update(cx, |state, cx| state.authenticate(cx))
299    }
300
301    fn configuration_view(
302        &self,
303        _target_agent: language_model::ConfigurationViewTargetAgent,
304        window: &mut Window,
305        cx: &mut App,
306    ) -> AnyView {
307        let state = self.state.clone();
308        cx.new(|cx| ConfigurationView::new(state, window, cx))
309            .into()
310    }
311
312    fn reset_credentials(&self, cx: &mut App) -> Task<Result<()>> {
313        self.state
314            .update(cx, |state, cx| state.set_api_key(None, cx))
315    }
316}
317
318pub struct OllamaLanguageModel {
319    id: LanguageModelId,
320    model: ollama::Model,
321    http_client: Arc<dyn HttpClient>,
322    request_limiter: RateLimiter,
323    state: Entity<State>,
324}
325
326impl OllamaLanguageModel {
327    fn to_ollama_request(&self, request: LanguageModelRequest) -> ChatRequest {
328        let supports_vision = self.model.supports_vision.unwrap_or(false);
329
330        let mut messages = Vec::with_capacity(request.messages.len());
331
332        for mut msg in request.messages.into_iter() {
333            let images = if supports_vision {
334                msg.content
335                    .iter()
336                    .filter_map(|content| match content {
337                        MessageContent::Image(image) => Some(image.source.to_string()),
338                        _ => None,
339                    })
340                    .collect::<Vec<String>>()
341            } else {
342                vec![]
343            };
344
345            match msg.role {
346                Role::User => {
347                    for tool_result in msg
348                        .content
349                        .extract_if(.., |x| matches!(x, MessageContent::ToolResult(..)))
350                    {
351                        match tool_result {
352                            MessageContent::ToolResult(tool_result) => {
353                                messages.push(ChatMessage::Tool {
354                                    tool_name: tool_result.tool_name.to_string(),
355                                    content: tool_result.content.to_str().unwrap_or("").to_string(),
356                                })
357                            }
358                            _ => unreachable!("Only tool result should be extracted"),
359                        }
360                    }
361                    if !msg.content.is_empty() {
362                        messages.push(ChatMessage::User {
363                            content: msg.string_contents(),
364                            images: if images.is_empty() {
365                                None
366                            } else {
367                                Some(images)
368                            },
369                        })
370                    }
371                }
372                Role::Assistant => {
373                    let content = msg.string_contents();
374                    let mut thinking = None;
375                    let mut tool_calls = Vec::new();
376                    for content in msg.content.into_iter() {
377                        match content {
378                            MessageContent::Thinking { text, .. } if !text.is_empty() => {
379                                thinking = Some(text)
380                            }
381                            MessageContent::ToolUse(tool_use) => {
382                                tool_calls.push(OllamaToolCall::Function(OllamaFunctionCall {
383                                    name: tool_use.name.to_string(),
384                                    arguments: tool_use.input,
385                                }));
386                            }
387                            _ => (),
388                        }
389                    }
390                    messages.push(ChatMessage::Assistant {
391                        content,
392                        tool_calls: Some(tool_calls),
393                        images: if images.is_empty() {
394                            None
395                        } else {
396                            Some(images)
397                        },
398                        thinking,
399                    })
400                }
401                Role::System => messages.push(ChatMessage::System {
402                    content: msg.string_contents(),
403                }),
404            }
405        }
406        ChatRequest {
407            model: self.model.name.clone(),
408            messages,
409            keep_alive: self.model.keep_alive.clone().unwrap_or_default(),
410            stream: true,
411            options: Some(ChatOptions {
412                num_ctx: Some(self.model.max_tokens),
413                stop: Some(request.stop),
414                temperature: request.temperature.or(Some(1.0)),
415                ..Default::default()
416            }),
417            think: self
418                .model
419                .supports_thinking
420                .map(|supports_thinking| supports_thinking && request.thinking_allowed),
421            tools: if self.model.supports_tools.unwrap_or(false) {
422                request.tools.into_iter().map(tool_into_ollama).collect()
423            } else {
424                vec![]
425            },
426        }
427    }
428}
429
430impl LanguageModel for OllamaLanguageModel {
431    fn id(&self) -> LanguageModelId {
432        self.id.clone()
433    }
434
435    fn name(&self) -> LanguageModelName {
436        LanguageModelName::from(self.model.display_name().to_string())
437    }
438
439    fn provider_id(&self) -> LanguageModelProviderId {
440        OLLAMA_PROVIDER_ID
441    }
442
443    fn provider_name(&self) -> LanguageModelProviderName {
444        OLLAMA_PROVIDER_NAME
445    }
446
447    fn supports_tools(&self) -> bool {
448        self.model.supports_tools.unwrap_or(false)
449    }
450
451    fn supports_images(&self) -> bool {
452        self.model.supports_vision.unwrap_or(false)
453    }
454
455    fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool {
456        match choice {
457            LanguageModelToolChoice::Auto => false,
458            LanguageModelToolChoice::Any => false,
459            LanguageModelToolChoice::None => false,
460        }
461    }
462
463    fn telemetry_id(&self) -> String {
464        format!("ollama/{}", self.model.id())
465    }
466
467    fn max_token_count(&self) -> u64 {
468        self.model.max_token_count()
469    }
470
471    fn count_tokens(
472        &self,
473        request: LanguageModelRequest,
474        _cx: &App,
475    ) -> BoxFuture<'static, Result<u64>> {
476        // There is no endpoint for this _yet_ in Ollama
477        // see: https://github.com/ollama/ollama/issues/1716 and https://github.com/ollama/ollama/issues/3582
478        let token_count = request
479            .messages
480            .iter()
481            .map(|msg| msg.string_contents().chars().count())
482            .sum::<usize>()
483            / 4;
484
485        async move { Ok(token_count as u64) }.boxed()
486    }
487
488    fn stream_completion(
489        &self,
490        request: LanguageModelRequest,
491        cx: &AsyncApp,
492    ) -> BoxFuture<
493        'static,
494        Result<
495            BoxStream<'static, Result<LanguageModelCompletionEvent, LanguageModelCompletionError>>,
496            LanguageModelCompletionError,
497        >,
498    > {
499        let request = self.to_ollama_request(request);
500
501        let http_client = self.http_client.clone();
502        let Ok((api_key, api_url)) = self.state.read_with(cx, |state, cx| {
503            let api_url = OllamaLanguageModelProvider::api_url(cx);
504            (state.api_key_state.key(&api_url), api_url)
505        }) else {
506            return futures::future::ready(Err(anyhow!("App state dropped").into())).boxed();
507        };
508
509        let future = self.request_limiter.stream(async move {
510            let stream =
511                stream_chat_completion(http_client.as_ref(), &api_url, api_key.as_deref(), request)
512                    .await?;
513            let stream = map_to_language_model_completion_events(stream);
514            Ok(stream)
515        });
516
517        future.map_ok(|f| f.boxed()).boxed()
518    }
519}
520
521fn map_to_language_model_completion_events(
522    stream: Pin<Box<dyn Stream<Item = anyhow::Result<ChatResponseDelta>> + Send>>,
523) -> impl Stream<Item = Result<LanguageModelCompletionEvent, LanguageModelCompletionError>> {
524    // Used for creating unique tool use ids
525    static TOOL_CALL_COUNTER: AtomicU64 = AtomicU64::new(0);
526
527    struct State {
528        stream: Pin<Box<dyn Stream<Item = anyhow::Result<ChatResponseDelta>> + Send>>,
529        used_tools: bool,
530    }
531
532    // We need to create a ToolUse and Stop event from a single
533    // response from the original stream
534    let stream = stream::unfold(
535        State {
536            stream,
537            used_tools: false,
538        },
539        async move |mut state| {
540            let response = state.stream.next().await?;
541
542            let delta = match response {
543                Ok(delta) => delta,
544                Err(e) => {
545                    let event = Err(LanguageModelCompletionError::from(anyhow!(e)));
546                    return Some((vec![event], state));
547                }
548            };
549
550            let mut events = Vec::new();
551
552            match delta.message {
553                ChatMessage::User { content, images: _ } => {
554                    events.push(Ok(LanguageModelCompletionEvent::Text(content)));
555                }
556                ChatMessage::System { content } => {
557                    events.push(Ok(LanguageModelCompletionEvent::Text(content)));
558                }
559                ChatMessage::Tool { content, .. } => {
560                    events.push(Ok(LanguageModelCompletionEvent::Text(content)));
561                }
562                ChatMessage::Assistant {
563                    content,
564                    tool_calls,
565                    images: _,
566                    thinking,
567                } => {
568                    if let Some(text) = thinking {
569                        events.push(Ok(LanguageModelCompletionEvent::Thinking {
570                            text,
571                            signature: None,
572                        }));
573                    }
574
575                    if let Some(tool_call) = tool_calls.and_then(|v| v.into_iter().next()) {
576                        match tool_call {
577                            OllamaToolCall::Function(function) => {
578                                let tool_id = format!(
579                                    "{}-{}",
580                                    &function.name,
581                                    TOOL_CALL_COUNTER.fetch_add(1, Ordering::Relaxed)
582                                );
583                                let event =
584                                    LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse {
585                                        id: LanguageModelToolUseId::from(tool_id),
586                                        name: Arc::from(function.name),
587                                        raw_input: function.arguments.to_string(),
588                                        input: function.arguments,
589                                        is_input_complete: true,
590                                    });
591                                events.push(Ok(event));
592                                state.used_tools = true;
593                            }
594                        }
595                    } else if !content.is_empty() {
596                        events.push(Ok(LanguageModelCompletionEvent::Text(content)));
597                    }
598                }
599            };
600
601            if delta.done {
602                events.push(Ok(LanguageModelCompletionEvent::UsageUpdate(TokenUsage {
603                    input_tokens: delta.prompt_eval_count.unwrap_or(0),
604                    output_tokens: delta.eval_count.unwrap_or(0),
605                    cache_creation_input_tokens: 0,
606                    cache_read_input_tokens: 0,
607                })));
608                if state.used_tools {
609                    state.used_tools = false;
610                    events.push(Ok(LanguageModelCompletionEvent::Stop(StopReason::ToolUse)));
611                } else {
612                    events.push(Ok(LanguageModelCompletionEvent::Stop(StopReason::EndTurn)));
613                }
614            }
615
616            Some((events, state))
617        },
618    );
619
620    stream.flat_map(futures::stream::iter)
621}
622
623struct ConfigurationView {
624    api_key_editor: Entity<InputField>,
625    api_url_editor: Entity<InputField>,
626    state: Entity<State>,
627}
628
629impl ConfigurationView {
630    pub fn new(state: Entity<State>, window: &mut Window, cx: &mut Context<Self>) -> Self {
631        let api_key_editor = cx.new(|cx| InputField::new(window, cx, "63e02e...").label("API key"));
632
633        let api_url_editor = cx.new(|cx| {
634            let input = InputField::new(window, cx, OLLAMA_API_URL).label("API URL");
635            input.set_text(OllamaLanguageModelProvider::api_url(cx), window, cx);
636            input
637        });
638
639        cx.observe(&state, |_, _, cx| {
640            cx.notify();
641        })
642        .detach();
643
644        Self {
645            api_key_editor,
646            api_url_editor,
647            state,
648        }
649    }
650
651    fn retry_connection(&self, cx: &mut App) {
652        self.state
653            .update(cx, |state, cx| state.restart_fetch_models_task(cx));
654    }
655
656    fn save_api_key(&mut self, _: &menu::Confirm, window: &mut Window, cx: &mut Context<Self>) {
657        let api_key = self.api_key_editor.read(cx).text(cx).trim().to_string();
658        if api_key.is_empty() {
659            return;
660        }
661
662        // url changes can cause the editor to be displayed again
663        self.api_key_editor
664            .update(cx, |input, cx| input.set_text("", window, cx));
665
666        let state = self.state.clone();
667        cx.spawn_in(window, async move |_, cx| {
668            state
669                .update(cx, |state, cx| state.set_api_key(Some(api_key), cx))?
670                .await
671        })
672        .detach_and_log_err(cx);
673    }
674
675    fn reset_api_key(&mut self, window: &mut Window, cx: &mut Context<Self>) {
676        self.api_key_editor
677            .update(cx, |input, cx| input.set_text("", window, cx));
678
679        let state = self.state.clone();
680        cx.spawn_in(window, async move |_, cx| {
681            state
682                .update(cx, |state, cx| state.set_api_key(None, cx))?
683                .await
684        })
685        .detach_and_log_err(cx);
686
687        cx.notify();
688    }
689
690    fn save_api_url(&mut self, cx: &mut Context<Self>) {
691        let api_url = self.api_url_editor.read(cx).text(cx).trim().to_string();
692        let current_url = OllamaLanguageModelProvider::api_url(cx);
693        if !api_url.is_empty() && &api_url != ¤t_url {
694            let fs = <dyn Fs>::global(cx);
695            update_settings_file(fs, cx, move |settings, _| {
696                settings
697                    .language_models
698                    .get_or_insert_default()
699                    .ollama
700                    .get_or_insert_default()
701                    .api_url = Some(api_url);
702            });
703        }
704    }
705
706    fn reset_api_url(&mut self, window: &mut Window, cx: &mut Context<Self>) {
707        self.api_url_editor
708            .update(cx, |input, cx| input.set_text("", window, cx));
709        let fs = <dyn Fs>::global(cx);
710        update_settings_file(fs, cx, |settings, _cx| {
711            if let Some(settings) = settings
712                .language_models
713                .as_mut()
714                .and_then(|models| models.ollama.as_mut())
715            {
716                settings.api_url = Some(OLLAMA_API_URL.into());
717            }
718        });
719        cx.notify();
720    }
721
722    fn render_instructions() -> Div {
723        v_flex()
724            .gap_2()
725            .child(Label::new(
726                "Run LLMs locally on your machine with Ollama, or connect to an Ollama server. \
727                Can provide access to Llama, Mistral, Gemma, and hundreds of other models.",
728            ))
729            .child(Label::new("To use local Ollama:"))
730            .child(
731                List::new()
732                    .child(InstructionListItem::new(
733                        "Download and install Ollama from",
734                        Some("ollama.com"),
735                        Some("https://ollama.com/download"),
736                    ))
737                    .child(InstructionListItem::text_only(
738                        "Start Ollama and download a model: `ollama run gpt-oss:20b`",
739                    ))
740                    .child(InstructionListItem::text_only(
741                        "Click 'Connect' below to start using Ollama in Zed",
742                    )),
743            )
744            .child(Label::new(
745                "Alternatively, you can connect to an Ollama server by specifying its \
746                URL and API key (may not be required):",
747            ))
748    }
749
750    fn render_api_key_editor(&self, cx: &Context<Self>) -> Div {
751        let state = self.state.read(cx);
752        let env_var_set = state.api_key_state.is_from_env_var();
753
754        if !state.api_key_state.has_key() {
755            v_flex()
756              .on_action(cx.listener(Self::save_api_key))
757              .child(self.api_key_editor.clone())
758              .child(
759                  Label::new(
760                      format!("You can also assign the {API_KEY_ENV_VAR_NAME} environment variable and restart Zed.")
761                  )
762                  .size(LabelSize::Small)
763                  .color(Color::Muted),
764              )
765        } else {
766            h_flex()
767                .p_3()
768                .justify_between()
769                .rounded_md()
770                .border_1()
771                .border_color(cx.theme().colors().border)
772                .bg(cx.theme().colors().elevated_surface_background)
773                .child(
774                    h_flex()
775                        .gap_2()
776                        .child(Icon::new(IconName::Check).color(Color::Success))
777                        .child(
778                            Label::new(
779                                if env_var_set {
780                                    format!("API key set in {API_KEY_ENV_VAR_NAME} environment variable.")
781                                } else {
782                                    "API key configured".to_string()
783                                }
784                            )
785                        )
786                )
787                .child(
788                    Button::new("reset-api-key", "Reset API Key")
789                        .label_size(LabelSize::Small)
790                        .icon(IconName::Undo)
791                        .icon_size(IconSize::Small)
792                        .icon_position(IconPosition::Start)
793                        .layer(ElevationIndex::ModalSurface)
794                        .when(env_var_set, |this| {
795                            this.tooltip(Tooltip::text(format!("To reset your API key, unset the {API_KEY_ENV_VAR_NAME} environment variable.")))
796                        })
797                        .on_click(cx.listener(|this, _, window, cx| this.reset_api_key(window, cx))),
798                )
799        }
800    }
801
802    fn render_api_url_editor(&self, cx: &Context<Self>) -> Div {
803        let api_url = OllamaLanguageModelProvider::api_url(cx);
804        let custom_api_url_set = api_url != OLLAMA_API_URL;
805
806        if custom_api_url_set {
807            h_flex()
808                .p_3()
809                .justify_between()
810                .rounded_md()
811                .border_1()
812                .border_color(cx.theme().colors().border)
813                .bg(cx.theme().colors().elevated_surface_background)
814                .child(
815                    h_flex()
816                        .gap_2()
817                        .child(Icon::new(IconName::Check).color(Color::Success))
818                        .child(v_flex().gap_1().child(Label::new(api_url))),
819                )
820                .child(
821                    Button::new("reset-api-url", "Reset API URL")
822                        .label_size(LabelSize::Small)
823                        .icon(IconName::Undo)
824                        .icon_size(IconSize::Small)
825                        .icon_position(IconPosition::Start)
826                        .layer(ElevationIndex::ModalSurface)
827                        .on_click(
828                            cx.listener(|this, _, window, cx| this.reset_api_url(window, cx)),
829                        ),
830                )
831        } else {
832            v_flex()
833                .on_action(cx.listener(|this, _: &menu::Confirm, _window, cx| {
834                    this.save_api_url(cx);
835                    cx.notify();
836                }))
837                .gap_2()
838                .child(self.api_url_editor.clone())
839        }
840    }
841}
842
843impl Render for ConfigurationView {
844    fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
845        let is_authenticated = self.state.read(cx).is_authenticated();
846
847        v_flex()
848            .gap_2()
849            .child(Self::render_instructions())
850            .child(self.render_api_url_editor(cx))
851            .child(self.render_api_key_editor(cx))
852            .child(
853                h_flex()
854                    .w_full()
855                    .justify_between()
856                    .gap_2()
857                    .child(
858                        h_flex()
859                            .w_full()
860                            .gap_2()
861                            .map(|this| {
862                                if is_authenticated {
863                                    this.child(
864                                        Button::new("ollama-site", "Ollama")
865                                            .style(ButtonStyle::Subtle)
866                                            .icon(IconName::ArrowUpRight)
867                                            .icon_size(IconSize::XSmall)
868                                            .icon_color(Color::Muted)
869                                            .on_click(move |_, _, cx| cx.open_url(OLLAMA_SITE))
870                                            .into_any_element(),
871                                    )
872                                } else {
873                                    this.child(
874                                        Button::new("download_ollama_button", "Download Ollama")
875                                            .style(ButtonStyle::Subtle)
876                                            .icon(IconName::ArrowUpRight)
877                                            .icon_size(IconSize::XSmall)
878                                            .icon_color(Color::Muted)
879                                            .on_click(move |_, _, cx| {
880                                                cx.open_url(OLLAMA_DOWNLOAD_URL)
881                                            })
882                                            .into_any_element(),
883                                    )
884                                }
885                            })
886                            .child(
887                                Button::new("view-models", "View All Models")
888                                    .style(ButtonStyle::Subtle)
889                                    .icon(IconName::ArrowUpRight)
890                                    .icon_size(IconSize::XSmall)
891                                    .icon_color(Color::Muted)
892                                    .on_click(move |_, _, cx| cx.open_url(OLLAMA_LIBRARY_URL)),
893                            ),
894                    )
895                    .map(|this| {
896                        if is_authenticated {
897                            this.child(
898                                ButtonLike::new("connected")
899                                    .disabled(true)
900                                    .cursor_style(CursorStyle::Arrow)
901                                    .child(
902                                        h_flex()
903                                            .gap_2()
904                                            .child(Icon::new(IconName::Check).color(Color::Success))
905                                            .child(Label::new("Connected"))
906                                            .into_any_element(),
907                                    ),
908                            )
909                        } else {
910                            this.child(
911                                Button::new("retry_ollama_models", "Connect")
912                                    .icon_position(IconPosition::Start)
913                                    .icon_size(IconSize::XSmall)
914                                    .icon(IconName::PlayOutlined)
915                                    .on_click(
916                                        cx.listener(move |this, _, _, cx| {
917                                            this.retry_connection(cx)
918                                        }),
919                                    ),
920                            )
921                        }
922                    }),
923            )
924    }
925}
926
927fn tool_into_ollama(tool: LanguageModelRequestTool) -> ollama::OllamaTool {
928    ollama::OllamaTool::Function {
929        function: OllamaFunctionTool {
930            name: tool.name,
931            description: Some(tool.description),
932            parameters: Some(tool.input_schema),
933        },
934    }
935}