1use collections::HashMap;
2use schemars::JsonSchema;
3use serde::{Deserialize, Serialize};
4use settings_macros::{MergeFrom, with_fallible_options};
5use strum::EnumString;
6
7use std::sync::Arc;
8
9#[with_fallible_options]
10#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)]
11pub struct AllLanguageModelSettingsContent {
12 pub anthropic: Option<AnthropicSettingsContent>,
13 pub bedrock: Option<AmazonBedrockSettingsContent>,
14 pub deepseek: Option<DeepseekSettingsContent>,
15 pub google: Option<GoogleSettingsContent>,
16 pub lmstudio: Option<LmStudioSettingsContent>,
17 pub mistral: Option<MistralSettingsContent>,
18 pub ollama: Option<OllamaSettingsContent>,
19 pub open_router: Option<OpenRouterSettingsContent>,
20 pub openai: Option<OpenAiSettingsContent>,
21 pub openai_compatible: Option<HashMap<Arc<str>, OpenAiCompatibleSettingsContent>>,
22 pub vercel: Option<VercelSettingsContent>,
23 pub x_ai: Option<XAiSettingsContent>,
24 #[serde(rename = "zed.dev")]
25 pub zed_dot_dev: Option<ZedDotDevSettingsContent>,
26}
27
28#[with_fallible_options]
29#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)]
30pub struct AnthropicSettingsContent {
31 pub api_url: Option<String>,
32 pub available_models: Option<Vec<AnthropicAvailableModel>>,
33}
34
35#[with_fallible_options]
36#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
37pub struct AnthropicAvailableModel {
38 /// The model's name in the Anthropic API. e.g. claude-3-5-sonnet-latest, claude-3-opus-20240229, etc
39 pub name: String,
40 /// The model's name in Zed's UI, such as in the model selector dropdown menu in the assistant panel.
41 pub display_name: Option<String>,
42 /// The model's context window size.
43 pub max_tokens: u64,
44 /// A model `name` to substitute when calling tools, in case the primary model doesn't support tool calling.
45 pub tool_override: Option<String>,
46 /// Configuration of Anthropic's caching API.
47 pub cache_configuration: Option<LanguageModelCacheConfiguration>,
48 pub max_output_tokens: Option<u64>,
49 #[serde(serialize_with = "crate::serialize_optional_f32_with_two_decimal_places")]
50 pub default_temperature: Option<f32>,
51 #[serde(default)]
52 pub extra_beta_headers: Vec<String>,
53 /// The model's mode (e.g. thinking)
54 pub mode: Option<ModelMode>,
55}
56
57#[with_fallible_options]
58#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)]
59pub struct AmazonBedrockSettingsContent {
60 pub available_models: Option<Vec<BedrockAvailableModel>>,
61 pub endpoint_url: Option<String>,
62 pub region: Option<String>,
63 pub profile: Option<String>,
64 pub authentication_method: Option<BedrockAuthMethodContent>,
65 pub allow_global: Option<bool>,
66 /// Enable the 1M token extended context window beta for supported Anthropic models.
67 pub allow_extended_context: Option<bool>,
68}
69
70#[with_fallible_options]
71#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
72pub struct BedrockAvailableModel {
73 pub name: String,
74 pub display_name: Option<String>,
75 pub max_tokens: u64,
76 pub cache_configuration: Option<LanguageModelCacheConfiguration>,
77 pub max_output_tokens: Option<u64>,
78 #[serde(serialize_with = "crate::serialize_optional_f32_with_two_decimal_places")]
79 pub default_temperature: Option<f32>,
80 pub mode: Option<ModelMode>,
81}
82
83#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
84pub enum BedrockAuthMethodContent {
85 #[serde(rename = "named_profile")]
86 NamedProfile,
87 #[serde(rename = "sso")]
88 SingleSignOn,
89 #[serde(rename = "api_key")]
90 ApiKey,
91 /// IMDSv2, PodIdentity, env vars, etc.
92 #[serde(rename = "default")]
93 Automatic,
94}
95
96#[with_fallible_options]
97#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)]
98pub struct OllamaSettingsContent {
99 pub api_url: Option<String>,
100 pub auto_discover: Option<bool>,
101 pub available_models: Option<Vec<OllamaAvailableModel>>,
102 pub context_window: Option<u64>,
103}
104
105#[with_fallible_options]
106#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
107pub struct OllamaAvailableModel {
108 /// The model name in the Ollama API (e.g. "llama3.2:latest")
109 pub name: String,
110 /// The model's name in Zed's UI, such as in the model selector dropdown menu in the assistant panel.
111 pub display_name: Option<String>,
112 /// The Context Length parameter to the model (aka num_ctx or n_ctx)
113 pub max_tokens: u64,
114 /// The number of seconds to keep the connection open after the last request
115 pub keep_alive: Option<KeepAlive>,
116 /// Whether the model supports tools
117 pub supports_tools: Option<bool>,
118 /// Whether the model supports vision
119 pub supports_images: Option<bool>,
120 /// Whether to enable think mode
121 pub supports_thinking: Option<bool>,
122}
123
124#[derive(Clone, Serialize, Deserialize, Debug, Eq, PartialEq, JsonSchema, MergeFrom)]
125#[serde(untagged)]
126pub enum KeepAlive {
127 /// Keep model alive for N seconds
128 Seconds(isize),
129 /// Keep model alive for a fixed duration. Accepts durations like "5m", "10m", "1h", "1d", etc.
130 Duration(String),
131}
132
133impl KeepAlive {
134 /// Keep model alive until a new model is loaded or until Ollama shuts down
135 pub fn indefinite() -> Self {
136 Self::Seconds(-1)
137 }
138}
139
140impl Default for KeepAlive {
141 fn default() -> Self {
142 Self::indefinite()
143 }
144}
145
146#[with_fallible_options]
147#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)]
148pub struct LmStudioSettingsContent {
149 pub api_url: Option<String>,
150 pub available_models: Option<Vec<LmStudioAvailableModel>>,
151}
152
153#[with_fallible_options]
154#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
155pub struct LmStudioAvailableModel {
156 pub name: String,
157 pub display_name: Option<String>,
158 pub max_tokens: u64,
159 pub supports_tool_calls: bool,
160 pub supports_images: bool,
161}
162
163#[with_fallible_options]
164#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)]
165pub struct DeepseekSettingsContent {
166 pub api_url: Option<String>,
167 pub available_models: Option<Vec<DeepseekAvailableModel>>,
168}
169
170#[with_fallible_options]
171#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
172pub struct DeepseekAvailableModel {
173 pub name: String,
174 pub display_name: Option<String>,
175 pub max_tokens: u64,
176 pub max_output_tokens: Option<u64>,
177}
178
179#[with_fallible_options]
180#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)]
181pub struct MistralSettingsContent {
182 pub api_url: Option<String>,
183 pub available_models: Option<Vec<MistralAvailableModel>>,
184}
185
186#[with_fallible_options]
187#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
188pub struct MistralAvailableModel {
189 pub name: String,
190 pub display_name: Option<String>,
191 pub max_tokens: u64,
192 pub max_output_tokens: Option<u64>,
193 pub max_completion_tokens: Option<u64>,
194 pub supports_tools: Option<bool>,
195 pub supports_images: Option<bool>,
196 pub supports_thinking: Option<bool>,
197}
198
199#[with_fallible_options]
200#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)]
201pub struct OpenAiSettingsContent {
202 pub api_url: Option<String>,
203 pub available_models: Option<Vec<OpenAiAvailableModel>>,
204}
205
206#[with_fallible_options]
207#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
208pub struct OpenAiAvailableModel {
209 pub name: String,
210 pub display_name: Option<String>,
211 pub max_tokens: u64,
212 pub max_output_tokens: Option<u64>,
213 pub max_completion_tokens: Option<u64>,
214 pub reasoning_effort: Option<OpenAiReasoningEffort>,
215 #[serde(default)]
216 pub capabilities: OpenAiModelCapabilities,
217}
218
219#[derive(Debug, Serialize, Deserialize, PartialEq, Clone, EnumString, JsonSchema, MergeFrom)]
220#[serde(rename_all = "lowercase")]
221#[strum(serialize_all = "lowercase")]
222pub enum OpenAiReasoningEffort {
223 Minimal,
224 Low,
225 Medium,
226 High,
227 XHigh,
228}
229
230#[with_fallible_options]
231#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)]
232pub struct OpenAiCompatibleSettingsContent {
233 pub api_url: String,
234 pub available_models: Vec<OpenAiCompatibleAvailableModel>,
235}
236
237#[with_fallible_options]
238#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
239pub struct OpenAiModelCapabilities {
240 #[serde(default = "default_true")]
241 pub chat_completions: bool,
242}
243
244impl Default for OpenAiModelCapabilities {
245 fn default() -> Self {
246 Self {
247 chat_completions: default_true(),
248 }
249 }
250}
251
252#[with_fallible_options]
253#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
254pub struct OpenAiCompatibleAvailableModel {
255 pub name: String,
256 pub display_name: Option<String>,
257 pub max_tokens: u64,
258 pub max_output_tokens: Option<u64>,
259 pub max_completion_tokens: Option<u64>,
260 #[serde(default)]
261 pub capabilities: OpenAiCompatibleModelCapabilities,
262}
263
264#[with_fallible_options]
265#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
266pub struct OpenAiCompatibleModelCapabilities {
267 pub tools: bool,
268 pub images: bool,
269 pub parallel_tool_calls: bool,
270 pub prompt_cache_key: bool,
271 #[serde(default = "default_true")]
272 pub chat_completions: bool,
273}
274
275impl Default for OpenAiCompatibleModelCapabilities {
276 fn default() -> Self {
277 Self {
278 tools: true,
279 images: false,
280 parallel_tool_calls: false,
281 prompt_cache_key: false,
282 chat_completions: default_true(),
283 }
284 }
285}
286
287#[with_fallible_options]
288#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)]
289pub struct VercelSettingsContent {
290 pub api_url: Option<String>,
291 pub available_models: Option<Vec<VercelAvailableModel>>,
292}
293
294#[with_fallible_options]
295#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
296pub struct VercelAvailableModel {
297 pub name: String,
298 pub display_name: Option<String>,
299 pub max_tokens: u64,
300 pub max_output_tokens: Option<u64>,
301 pub max_completion_tokens: Option<u64>,
302}
303
304#[with_fallible_options]
305#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)]
306pub struct GoogleSettingsContent {
307 pub api_url: Option<String>,
308 pub available_models: Option<Vec<GoogleAvailableModel>>,
309}
310
311#[with_fallible_options]
312#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
313pub struct GoogleAvailableModel {
314 pub name: String,
315 pub display_name: Option<String>,
316 pub max_tokens: u64,
317 pub mode: Option<ModelMode>,
318}
319
320#[with_fallible_options]
321#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)]
322pub struct XAiSettingsContent {
323 pub api_url: Option<String>,
324 pub available_models: Option<Vec<XaiAvailableModel>>,
325}
326
327#[with_fallible_options]
328#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
329pub struct XaiAvailableModel {
330 pub name: String,
331 pub display_name: Option<String>,
332 pub max_tokens: u64,
333 pub max_output_tokens: Option<u64>,
334 pub max_completion_tokens: Option<u64>,
335 pub supports_images: Option<bool>,
336 pub supports_tools: Option<bool>,
337 pub parallel_tool_calls: Option<bool>,
338}
339
340#[with_fallible_options]
341#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)]
342pub struct ZedDotDevSettingsContent {
343 pub available_models: Option<Vec<ZedDotDevAvailableModel>>,
344}
345
346#[with_fallible_options]
347#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
348pub struct ZedDotDevAvailableModel {
349 /// The provider of the language model.
350 pub provider: ZedDotDevAvailableProvider,
351 /// The model's name in the provider's API. e.g. claude-3-5-sonnet-20240620
352 pub name: String,
353 /// The name displayed in the UI, such as in the assistant panel model dropdown menu.
354 pub display_name: Option<String>,
355 /// The size of the context window, indicating the maximum number of tokens the model can process.
356 pub max_tokens: usize,
357 /// The maximum number of output tokens allowed by the model.
358 pub max_output_tokens: Option<u64>,
359 /// The maximum number of completion tokens allowed by the model (o1-* only)
360 pub max_completion_tokens: Option<u64>,
361 /// Override this model with a different Anthropic model for tool calls.
362 pub tool_override: Option<String>,
363 /// Indicates whether this custom model supports caching.
364 pub cache_configuration: Option<LanguageModelCacheConfiguration>,
365 /// The default temperature to use for this model.
366 #[serde(serialize_with = "crate::serialize_optional_f32_with_two_decimal_places")]
367 pub default_temperature: Option<f32>,
368 /// Any extra beta headers to provide when using the model.
369 #[serde(default)]
370 pub extra_beta_headers: Vec<String>,
371 /// The model's mode (e.g. thinking)
372 pub mode: Option<ModelMode>,
373}
374
375#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
376#[serde(rename_all = "lowercase")]
377pub enum ZedDotDevAvailableProvider {
378 Anthropic,
379 OpenAi,
380 Google,
381}
382
383#[with_fallible_options]
384#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)]
385pub struct OpenRouterSettingsContent {
386 pub api_url: Option<String>,
387 pub available_models: Option<Vec<OpenRouterAvailableModel>>,
388}
389
390#[with_fallible_options]
391#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
392pub struct OpenRouterAvailableModel {
393 pub name: String,
394 pub display_name: Option<String>,
395 pub max_tokens: u64,
396 pub max_output_tokens: Option<u64>,
397 pub max_completion_tokens: Option<u64>,
398 pub supports_tools: Option<bool>,
399 pub supports_images: Option<bool>,
400 pub mode: Option<ModelMode>,
401 pub provider: Option<OpenRouterProvider>,
402}
403
404#[with_fallible_options]
405#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
406pub struct OpenRouterProvider {
407 order: Option<Vec<String>>,
408 #[serde(default = "default_true")]
409 allow_fallbacks: bool,
410 #[serde(default)]
411 require_parameters: bool,
412 #[serde(default)]
413 data_collection: DataCollection,
414 only: Option<Vec<String>>,
415 ignore: Option<Vec<String>>,
416 quantizations: Option<Vec<String>>,
417 sort: Option<String>,
418}
419
420#[derive(Clone, Default, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
421#[serde(rename_all = "lowercase")]
422pub enum DataCollection {
423 #[default]
424 Allow,
425 Disallow,
426}
427
428fn default_true() -> bool {
429 true
430}
431
432/// Configuration for caching language model messages.
433#[with_fallible_options]
434#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
435pub struct LanguageModelCacheConfiguration {
436 pub max_cache_anchors: usize,
437 pub should_speculate: bool,
438 pub min_total_token: u64,
439}
440
441#[derive(
442 Copy, Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize, JsonSchema, MergeFrom,
443)]
444#[serde(tag = "type", rename_all = "lowercase")]
445pub enum ModelMode {
446 #[default]
447 Default,
448 Thinking {
449 /// The maximum number of tokens to use for reasoning. Must be lower than the model's `max_output_tokens`.
450 budget_tokens: Option<u32>,
451 },
452}