language_model.rs

  1use crate::merge_from::MergeFrom;
  2use collections::HashMap;
  3use schemars::JsonSchema;
  4use serde::{Deserialize, Serialize};
  5use settings_macros::{MergeFrom, with_fallible_options};
  6
  7use std::sync::Arc;
  8
  9#[with_fallible_options]
 10#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)]
 11pub struct AllLanguageModelSettingsContent {
 12    pub anthropic: Option<AnthropicSettingsContent>,
 13    pub bedrock: Option<AmazonBedrockSettingsContent>,
 14    pub deepseek: Option<DeepseekSettingsContent>,
 15    pub google: Option<GoogleSettingsContent>,
 16    pub lmstudio: Option<LmStudioSettingsContent>,
 17    pub mistral: Option<MistralSettingsContent>,
 18    pub ollama: Option<OllamaSettingsContent>,
 19    pub opencode: Option<OpenCodeSettingsContent>,
 20    pub open_router: Option<OpenRouterSettingsContent>,
 21    pub openai: Option<OpenAiSettingsContent>,
 22    pub openai_compatible: Option<HashMap<Arc<str>, OpenAiCompatibleSettingsContent>>,
 23    pub vercel: Option<VercelSettingsContent>,
 24    pub vercel_ai_gateway: Option<VercelAiGatewaySettingsContent>,
 25    pub x_ai: Option<XAiSettingsContent>,
 26    #[serde(rename = "zed.dev")]
 27    pub zed_dot_dev: Option<ZedDotDevSettingsContent>,
 28}
 29
 30#[with_fallible_options]
 31#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)]
 32pub struct AnthropicSettingsContent {
 33    pub api_url: Option<String>,
 34    pub available_models: Option<Vec<AnthropicAvailableModel>>,
 35}
 36
 37#[with_fallible_options]
 38#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
 39pub struct AnthropicAvailableModel {
 40    /// The model's name in the Anthropic API. e.g. claude-3-5-sonnet-latest, claude-3-opus-20240229, etc
 41    pub name: String,
 42    /// The model's name in Zed's UI, such as in the model selector dropdown menu in the assistant panel.
 43    pub display_name: Option<String>,
 44    /// The model's context window size.
 45    pub max_tokens: u64,
 46    /// A model `name` to substitute when calling tools, in case the primary model doesn't support tool calling.
 47    pub tool_override: Option<String>,
 48    /// Configuration of Anthropic's caching API.
 49    pub cache_configuration: Option<LanguageModelCacheConfiguration>,
 50    pub max_output_tokens: Option<u64>,
 51    #[serde(serialize_with = "crate::serialize_optional_f32_with_two_decimal_places")]
 52    pub default_temperature: Option<f32>,
 53    #[serde(default)]
 54    pub extra_beta_headers: Vec<String>,
 55    /// The model's mode (e.g. thinking)
 56    pub mode: Option<ModelMode>,
 57}
 58
 59#[with_fallible_options]
 60#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)]
 61pub struct AmazonBedrockSettingsContent {
 62    pub available_models: Option<Vec<BedrockAvailableModel>>,
 63    pub endpoint_url: Option<String>,
 64    pub region: Option<String>,
 65    pub profile: Option<String>,
 66    pub authentication_method: Option<BedrockAuthMethodContent>,
 67    pub allow_global: Option<bool>,
 68    /// Enable the 1M token extended context window beta for supported Anthropic models.
 69    pub allow_extended_context: Option<bool>,
 70}
 71
 72#[with_fallible_options]
 73#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
 74pub struct BedrockAvailableModel {
 75    pub name: String,
 76    pub display_name: Option<String>,
 77    pub max_tokens: u64,
 78    pub cache_configuration: Option<LanguageModelCacheConfiguration>,
 79    pub max_output_tokens: Option<u64>,
 80    #[serde(serialize_with = "crate::serialize_optional_f32_with_two_decimal_places")]
 81    pub default_temperature: Option<f32>,
 82    pub mode: Option<ModelMode>,
 83}
 84
 85#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
 86pub enum BedrockAuthMethodContent {
 87    #[serde(rename = "named_profile")]
 88    NamedProfile,
 89    #[serde(rename = "sso")]
 90    SingleSignOn,
 91    #[serde(rename = "api_key")]
 92    ApiKey,
 93    /// IMDSv2, PodIdentity, env vars, etc.
 94    #[serde(rename = "default")]
 95    Automatic,
 96}
 97
 98#[with_fallible_options]
 99#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)]
100pub struct OllamaSettingsContent {
101    pub api_url: Option<String>,
102    pub auto_discover: Option<bool>,
103    pub available_models: Option<Vec<OllamaAvailableModel>>,
104    pub context_window: Option<u64>,
105}
106
107#[with_fallible_options]
108#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
109pub struct OllamaAvailableModel {
110    /// The model name in the Ollama API (e.g. "llama3.2:latest")
111    pub name: String,
112    /// The model's name in Zed's UI, such as in the model selector dropdown menu in the assistant panel.
113    pub display_name: Option<String>,
114    /// The Context Length parameter to the model (aka num_ctx or n_ctx)
115    pub max_tokens: u64,
116    /// The number of seconds to keep the connection open after the last request
117    pub keep_alive: Option<KeepAlive>,
118    /// Whether the model supports tools
119    pub supports_tools: Option<bool>,
120    /// Whether the model supports vision
121    pub supports_images: Option<bool>,
122    /// Whether to enable think mode
123    pub supports_thinking: Option<bool>,
124}
125
126#[derive(Clone, Serialize, Deserialize, Debug, Eq, PartialEq, JsonSchema, MergeFrom)]
127#[serde(untagged)]
128pub enum KeepAlive {
129    /// Keep model alive for N seconds
130    Seconds(isize),
131    /// Keep model alive for a fixed duration. Accepts durations like "5m", "10m", "1h", "1d", etc.
132    Duration(String),
133}
134
135impl KeepAlive {
136    /// Keep model alive until a new model is loaded or until Ollama shuts down
137    pub fn indefinite() -> Self {
138        Self::Seconds(-1)
139    }
140}
141
142impl Default for KeepAlive {
143    fn default() -> Self {
144        Self::indefinite()
145    }
146}
147
148#[with_fallible_options]
149#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)]
150pub struct OpenCodeSettingsContent {
151    pub api_url: Option<String>,
152    pub available_models: Option<Vec<OpenCodeAvailableModel>>,
153}
154
155#[with_fallible_options]
156#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
157pub struct OpenCodeAvailableModel {
158    pub name: String,
159    pub display_name: Option<String>,
160    pub max_tokens: u64,
161    pub max_output_tokens: Option<u64>,
162    /// The API protocol to use for this model: "anthropic", "openai_responses", "openai_chat", or "google".
163    pub protocol: String,
164}
165
166#[with_fallible_options]
167#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)]
168pub struct LmStudioSettingsContent {
169    pub api_url: Option<String>,
170    pub api_key: Option<String>,
171    pub available_models: Option<Vec<LmStudioAvailableModel>>,
172}
173
174#[with_fallible_options]
175#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
176pub struct LmStudioAvailableModel {
177    pub name: String,
178    pub display_name: Option<String>,
179    pub max_tokens: u64,
180    pub supports_tool_calls: bool,
181    pub supports_images: bool,
182}
183
184#[with_fallible_options]
185#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)]
186pub struct DeepseekSettingsContent {
187    pub api_url: Option<String>,
188    pub available_models: Option<Vec<DeepseekAvailableModel>>,
189}
190
191#[with_fallible_options]
192#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
193pub struct DeepseekAvailableModel {
194    pub name: String,
195    pub display_name: Option<String>,
196    pub max_tokens: u64,
197    pub max_output_tokens: Option<u64>,
198}
199
200#[with_fallible_options]
201#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)]
202pub struct MistralSettingsContent {
203    pub api_url: Option<String>,
204    pub available_models: Option<Vec<MistralAvailableModel>>,
205}
206
207#[with_fallible_options]
208#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
209pub struct MistralAvailableModel {
210    pub name: String,
211    pub display_name: Option<String>,
212    pub max_tokens: u64,
213    pub max_output_tokens: Option<u64>,
214    pub max_completion_tokens: Option<u64>,
215    pub supports_tools: Option<bool>,
216    pub supports_images: Option<bool>,
217    pub supports_thinking: Option<bool>,
218}
219
220#[with_fallible_options]
221#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)]
222pub struct OpenAiSettingsContent {
223    pub api_url: Option<String>,
224    pub available_models: Option<Vec<OpenAiAvailableModel>>,
225}
226
227#[with_fallible_options]
228#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
229pub struct OpenAiAvailableModel {
230    pub name: String,
231    pub display_name: Option<String>,
232    pub max_tokens: u64,
233    pub max_output_tokens: Option<u64>,
234    pub max_completion_tokens: Option<u64>,
235    pub reasoning_effort: Option<OpenAiReasoningEffort>,
236    #[serde(default)]
237    pub capabilities: OpenAiModelCapabilities,
238}
239
240pub use language_model_core::ReasoningEffort as OpenAiReasoningEffort;
241
242impl MergeFrom for OpenAiReasoningEffort {
243    fn merge_from(&mut self, other: &Self) {
244        *self = *other;
245    }
246}
247
248#[with_fallible_options]
249#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)]
250pub struct OpenAiCompatibleSettingsContent {
251    pub api_url: String,
252    pub available_models: Vec<OpenAiCompatibleAvailableModel>,
253}
254
255#[with_fallible_options]
256#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
257pub struct OpenAiModelCapabilities {
258    #[serde(default = "default_true")]
259    pub chat_completions: bool,
260}
261
262impl Default for OpenAiModelCapabilities {
263    fn default() -> Self {
264        Self {
265            chat_completions: default_true(),
266        }
267    }
268}
269
270#[with_fallible_options]
271#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
272pub struct OpenAiCompatibleAvailableModel {
273    pub name: String,
274    pub display_name: Option<String>,
275    pub max_tokens: u64,
276    pub max_output_tokens: Option<u64>,
277    pub max_completion_tokens: Option<u64>,
278    pub reasoning_effort: Option<OpenAiReasoningEffort>,
279    #[serde(default)]
280    pub capabilities: OpenAiCompatibleModelCapabilities,
281}
282
283#[with_fallible_options]
284#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
285pub struct OpenAiCompatibleModelCapabilities {
286    pub tools: bool,
287    pub images: bool,
288    pub parallel_tool_calls: bool,
289    pub prompt_cache_key: bool,
290    #[serde(default = "default_true")]
291    pub chat_completions: bool,
292}
293
294impl Default for OpenAiCompatibleModelCapabilities {
295    fn default() -> Self {
296        Self {
297            tools: true,
298            images: false,
299            parallel_tool_calls: false,
300            prompt_cache_key: false,
301            chat_completions: default_true(),
302        }
303    }
304}
305
306#[with_fallible_options]
307#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)]
308pub struct VercelSettingsContent {
309    pub api_url: Option<String>,
310    pub available_models: Option<Vec<VercelAvailableModel>>,
311}
312
313#[with_fallible_options]
314#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
315pub struct VercelAvailableModel {
316    pub name: String,
317    pub display_name: Option<String>,
318    pub max_tokens: u64,
319    pub max_output_tokens: Option<u64>,
320    pub max_completion_tokens: Option<u64>,
321}
322
323#[with_fallible_options]
324#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)]
325pub struct VercelAiGatewaySettingsContent {
326    pub api_url: Option<String>,
327    pub available_models: Option<Vec<VercelAiGatewayAvailableModel>>,
328}
329
330#[with_fallible_options]
331#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
332pub struct VercelAiGatewayAvailableModel {
333    pub name: String,
334    pub display_name: Option<String>,
335    pub max_tokens: u64,
336    pub max_output_tokens: Option<u64>,
337    pub max_completion_tokens: Option<u64>,
338    #[serde(default)]
339    pub capabilities: OpenAiCompatibleModelCapabilities,
340}
341
342#[with_fallible_options]
343#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)]
344pub struct GoogleSettingsContent {
345    pub api_url: Option<String>,
346    pub available_models: Option<Vec<GoogleAvailableModel>>,
347}
348
349#[with_fallible_options]
350#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
351pub struct GoogleAvailableModel {
352    pub name: String,
353    pub display_name: Option<String>,
354    pub max_tokens: u64,
355    pub mode: Option<ModelMode>,
356}
357
358#[with_fallible_options]
359#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)]
360pub struct XAiSettingsContent {
361    pub api_url: Option<String>,
362    pub available_models: Option<Vec<XaiAvailableModel>>,
363}
364
365#[with_fallible_options]
366#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
367pub struct XaiAvailableModel {
368    pub name: String,
369    pub display_name: Option<String>,
370    pub max_tokens: u64,
371    pub max_output_tokens: Option<u64>,
372    pub max_completion_tokens: Option<u64>,
373    pub supports_images: Option<bool>,
374    pub supports_tools: Option<bool>,
375    pub parallel_tool_calls: Option<bool>,
376}
377
378#[with_fallible_options]
379#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)]
380pub struct ZedDotDevSettingsContent {
381    pub available_models: Option<Vec<ZedDotDevAvailableModel>>,
382}
383
384#[with_fallible_options]
385#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
386pub struct ZedDotDevAvailableModel {
387    /// The provider of the language model.
388    pub provider: ZedDotDevAvailableProvider,
389    /// The model's name in the provider's API. e.g. claude-3-5-sonnet-20240620
390    pub name: String,
391    /// The name displayed in the UI, such as in the assistant panel model dropdown menu.
392    pub display_name: Option<String>,
393    /// The size of the context window, indicating the maximum number of tokens the model can process.
394    pub max_tokens: usize,
395    /// The maximum number of output tokens allowed by the model.
396    pub max_output_tokens: Option<u64>,
397    /// The maximum number of completion tokens allowed by the model (o1-* only)
398    pub max_completion_tokens: Option<u64>,
399    /// Override this model with a different Anthropic model for tool calls.
400    pub tool_override: Option<String>,
401    /// Indicates whether this custom model supports caching.
402    pub cache_configuration: Option<LanguageModelCacheConfiguration>,
403    /// The default temperature to use for this model.
404    #[serde(serialize_with = "crate::serialize_optional_f32_with_two_decimal_places")]
405    pub default_temperature: Option<f32>,
406    /// Any extra beta headers to provide when using the model.
407    #[serde(default)]
408    pub extra_beta_headers: Vec<String>,
409    /// The model's mode (e.g. thinking)
410    pub mode: Option<ModelMode>,
411}
412
413#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
414#[serde(rename_all = "lowercase")]
415pub enum ZedDotDevAvailableProvider {
416    Anthropic,
417    OpenAi,
418    Google,
419}
420
421#[with_fallible_options]
422#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)]
423pub struct OpenRouterSettingsContent {
424    pub api_url: Option<String>,
425    pub available_models: Option<Vec<OpenRouterAvailableModel>>,
426}
427
428#[with_fallible_options]
429#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
430pub struct OpenRouterAvailableModel {
431    pub name: String,
432    pub display_name: Option<String>,
433    pub max_tokens: u64,
434    pub max_output_tokens: Option<u64>,
435    pub max_completion_tokens: Option<u64>,
436    pub supports_tools: Option<bool>,
437    pub supports_images: Option<bool>,
438    pub mode: Option<ModelMode>,
439    pub provider: Option<OpenRouterProvider>,
440}
441
442#[with_fallible_options]
443#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
444pub struct OpenRouterProvider {
445    order: Option<Vec<String>>,
446    #[serde(default = "default_true")]
447    allow_fallbacks: bool,
448    #[serde(default)]
449    require_parameters: bool,
450    #[serde(default)]
451    data_collection: DataCollection,
452    only: Option<Vec<String>>,
453    ignore: Option<Vec<String>>,
454    quantizations: Option<Vec<String>>,
455    sort: Option<String>,
456}
457
458#[derive(Clone, Default, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
459#[serde(rename_all = "lowercase")]
460pub enum DataCollection {
461    #[default]
462    Allow,
463    Disallow,
464}
465
466fn default_true() -> bool {
467    true
468}
469
470/// Configuration for caching language model messages.
471#[with_fallible_options]
472#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
473pub struct LanguageModelCacheConfiguration {
474    pub max_cache_anchors: usize,
475    pub should_speculate: bool,
476    pub min_total_token: u64,
477}
478
479pub use language_model_core::ModelMode;
480
481impl MergeFrom for ModelMode {
482    fn merge_from(&mut self, other: &Self) {
483        *self = *other;
484    }
485}