1use collections::HashMap;
2use schemars::JsonSchema;
3use serde::{Deserialize, Serialize};
4use settings_macros::{MergeFrom, with_fallible_options};
5use strum::EnumString;
6
7use std::sync::Arc;
8
9#[with_fallible_options]
10#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)]
11pub struct AllLanguageModelSettingsContent {
12 pub anthropic: Option<AnthropicSettingsContent>,
13 pub bedrock: Option<AmazonBedrockSettingsContent>,
14 pub deepseek: Option<DeepseekSettingsContent>,
15 pub google: Option<GoogleSettingsContent>,
16 pub lmstudio: Option<LmStudioSettingsContent>,
17 pub mistral: Option<MistralSettingsContent>,
18 pub ollama: Option<OllamaSettingsContent>,
19 pub open_router: Option<OpenRouterSettingsContent>,
20 pub openai: Option<OpenAiSettingsContent>,
21 pub openai_compatible: Option<HashMap<Arc<str>, OpenAiCompatibleSettingsContent>>,
22 pub vercel: Option<VercelSettingsContent>,
23 pub x_ai: Option<XAiSettingsContent>,
24 #[serde(rename = "zed.dev")]
25 pub zed_dot_dev: Option<ZedDotDevSettingsContent>,
26}
27
28#[with_fallible_options]
29#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)]
30pub struct AnthropicSettingsContent {
31 pub api_url: Option<String>,
32 pub available_models: Option<Vec<AnthropicAvailableModel>>,
33}
34
35#[with_fallible_options]
36#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
37pub struct AnthropicAvailableModel {
38 /// The model's name in the Anthropic API. e.g. claude-3-5-sonnet-latest, claude-3-opus-20240229, etc
39 pub name: String,
40 /// The model's name in Zed's UI, such as in the model selector dropdown menu in the assistant panel.
41 pub display_name: Option<String>,
42 /// The model's context window size.
43 pub max_tokens: u64,
44 /// A model `name` to substitute when calling tools, in case the primary model doesn't support tool calling.
45 pub tool_override: Option<String>,
46 /// Configuration of Anthropic's caching API.
47 pub cache_configuration: Option<LanguageModelCacheConfiguration>,
48 pub max_output_tokens: Option<u64>,
49 #[serde(serialize_with = "crate::serialize_optional_f32_with_two_decimal_places")]
50 pub default_temperature: Option<f32>,
51 #[serde(default)]
52 pub extra_beta_headers: Vec<String>,
53 /// The model's mode (e.g. thinking)
54 pub mode: Option<ModelMode>,
55}
56
57#[with_fallible_options]
58#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)]
59pub struct AmazonBedrockSettingsContent {
60 pub available_models: Option<Vec<BedrockAvailableModel>>,
61 pub endpoint_url: Option<String>,
62 pub region: Option<String>,
63 pub profile: Option<String>,
64 pub authentication_method: Option<BedrockAuthMethodContent>,
65 pub allow_global: Option<bool>,
66}
67
68#[with_fallible_options]
69#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
70pub struct BedrockAvailableModel {
71 pub name: String,
72 pub display_name: Option<String>,
73 pub max_tokens: u64,
74 pub cache_configuration: Option<LanguageModelCacheConfiguration>,
75 pub max_output_tokens: Option<u64>,
76 #[serde(serialize_with = "crate::serialize_optional_f32_with_two_decimal_places")]
77 pub default_temperature: Option<f32>,
78 pub mode: Option<ModelMode>,
79}
80
81#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
82pub enum BedrockAuthMethodContent {
83 #[serde(rename = "named_profile")]
84 NamedProfile,
85 #[serde(rename = "sso")]
86 SingleSignOn,
87 #[serde(rename = "api_key")]
88 ApiKey,
89 /// IMDSv2, PodIdentity, env vars, etc.
90 #[serde(rename = "default")]
91 Automatic,
92}
93
94#[with_fallible_options]
95#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)]
96pub struct OllamaSettingsContent {
97 pub api_url: Option<String>,
98 pub auto_discover: Option<bool>,
99 pub available_models: Option<Vec<OllamaAvailableModel>>,
100}
101
102#[with_fallible_options]
103#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
104pub struct OllamaAvailableModel {
105 /// The model name in the Ollama API (e.g. "llama3.2:latest")
106 pub name: String,
107 /// The model's name in Zed's UI, such as in the model selector dropdown menu in the assistant panel.
108 pub display_name: Option<String>,
109 /// The Context Length parameter to the model (aka num_ctx or n_ctx)
110 pub max_tokens: u64,
111 /// The number of seconds to keep the connection open after the last request
112 pub keep_alive: Option<KeepAlive>,
113 /// Whether the model supports tools
114 pub supports_tools: Option<bool>,
115 /// Whether the model supports vision
116 pub supports_images: Option<bool>,
117 /// Whether to enable think mode
118 pub supports_thinking: Option<bool>,
119}
120
121#[derive(Clone, Serialize, Deserialize, Debug, Eq, PartialEq, JsonSchema, MergeFrom)]
122#[serde(untagged)]
123pub enum KeepAlive {
124 /// Keep model alive for N seconds
125 Seconds(isize),
126 /// Keep model alive for a fixed duration. Accepts durations like "5m", "10m", "1h", "1d", etc.
127 Duration(String),
128}
129
130impl KeepAlive {
131 /// Keep model alive until a new model is loaded or until Ollama shuts down
132 pub fn indefinite() -> Self {
133 Self::Seconds(-1)
134 }
135}
136
137impl Default for KeepAlive {
138 fn default() -> Self {
139 Self::indefinite()
140 }
141}
142
143#[with_fallible_options]
144#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)]
145pub struct LmStudioSettingsContent {
146 pub api_url: Option<String>,
147 pub available_models: Option<Vec<LmStudioAvailableModel>>,
148}
149
150#[with_fallible_options]
151#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
152pub struct LmStudioAvailableModel {
153 pub name: String,
154 pub display_name: Option<String>,
155 pub max_tokens: u64,
156 pub supports_tool_calls: bool,
157 pub supports_images: bool,
158}
159
160#[with_fallible_options]
161#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)]
162pub struct DeepseekSettingsContent {
163 pub api_url: Option<String>,
164 pub available_models: Option<Vec<DeepseekAvailableModel>>,
165}
166
167#[with_fallible_options]
168#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
169pub struct DeepseekAvailableModel {
170 pub name: String,
171 pub display_name: Option<String>,
172 pub max_tokens: u64,
173 pub max_output_tokens: Option<u64>,
174}
175
176#[with_fallible_options]
177#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)]
178pub struct MistralSettingsContent {
179 pub api_url: Option<String>,
180 pub available_models: Option<Vec<MistralAvailableModel>>,
181}
182
183#[with_fallible_options]
184#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
185pub struct MistralAvailableModel {
186 pub name: String,
187 pub display_name: Option<String>,
188 pub max_tokens: u64,
189 pub max_output_tokens: Option<u64>,
190 pub max_completion_tokens: Option<u64>,
191 pub supports_tools: Option<bool>,
192 pub supports_images: Option<bool>,
193 pub supports_thinking: Option<bool>,
194}
195
196#[with_fallible_options]
197#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)]
198pub struct OpenAiSettingsContent {
199 pub api_url: Option<String>,
200 pub available_models: Option<Vec<OpenAiAvailableModel>>,
201}
202
203#[with_fallible_options]
204#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
205pub struct OpenAiAvailableModel {
206 pub name: String,
207 pub display_name: Option<String>,
208 pub max_tokens: u64,
209 pub max_output_tokens: Option<u64>,
210 pub max_completion_tokens: Option<u64>,
211 pub reasoning_effort: Option<OpenAiReasoningEffort>,
212 #[serde(default)]
213 pub capabilities: OpenAiModelCapabilities,
214}
215
216#[derive(Debug, Serialize, Deserialize, PartialEq, Clone, EnumString, JsonSchema, MergeFrom)]
217#[serde(rename_all = "lowercase")]
218#[strum(serialize_all = "lowercase")]
219pub enum OpenAiReasoningEffort {
220 Minimal,
221 Low,
222 Medium,
223 High,
224 XHigh,
225}
226
227#[with_fallible_options]
228#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)]
229pub struct OpenAiCompatibleSettingsContent {
230 pub api_url: String,
231 pub available_models: Vec<OpenAiCompatibleAvailableModel>,
232}
233
234#[with_fallible_options]
235#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
236pub struct OpenAiModelCapabilities {
237 #[serde(default = "default_true")]
238 pub chat_completions: bool,
239}
240
241impl Default for OpenAiModelCapabilities {
242 fn default() -> Self {
243 Self {
244 chat_completions: default_true(),
245 }
246 }
247}
248
249#[with_fallible_options]
250#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
251pub struct OpenAiCompatibleAvailableModel {
252 pub name: String,
253 pub display_name: Option<String>,
254 pub max_tokens: u64,
255 pub max_output_tokens: Option<u64>,
256 pub max_completion_tokens: Option<u64>,
257 #[serde(default)]
258 pub capabilities: OpenAiCompatibleModelCapabilities,
259}
260
261#[with_fallible_options]
262#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
263pub struct OpenAiCompatibleModelCapabilities {
264 pub tools: bool,
265 pub images: bool,
266 pub parallel_tool_calls: bool,
267 pub prompt_cache_key: bool,
268 #[serde(default = "default_true")]
269 pub chat_completions: bool,
270}
271
272impl Default for OpenAiCompatibleModelCapabilities {
273 fn default() -> Self {
274 Self {
275 tools: true,
276 images: false,
277 parallel_tool_calls: false,
278 prompt_cache_key: false,
279 chat_completions: default_true(),
280 }
281 }
282}
283
284#[with_fallible_options]
285#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)]
286pub struct VercelSettingsContent {
287 pub api_url: Option<String>,
288 pub available_models: Option<Vec<VercelAvailableModel>>,
289}
290
291#[with_fallible_options]
292#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
293pub struct VercelAvailableModel {
294 pub name: String,
295 pub display_name: Option<String>,
296 pub max_tokens: u64,
297 pub max_output_tokens: Option<u64>,
298 pub max_completion_tokens: Option<u64>,
299}
300
301#[with_fallible_options]
302#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)]
303pub struct GoogleSettingsContent {
304 pub api_url: Option<String>,
305 pub available_models: Option<Vec<GoogleAvailableModel>>,
306}
307
308#[with_fallible_options]
309#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
310pub struct GoogleAvailableModel {
311 pub name: String,
312 pub display_name: Option<String>,
313 pub max_tokens: u64,
314 pub mode: Option<ModelMode>,
315}
316
317#[with_fallible_options]
318#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)]
319pub struct XAiSettingsContent {
320 pub api_url: Option<String>,
321 pub available_models: Option<Vec<XaiAvailableModel>>,
322}
323
324#[with_fallible_options]
325#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
326pub struct XaiAvailableModel {
327 pub name: String,
328 pub display_name: Option<String>,
329 pub max_tokens: u64,
330 pub max_output_tokens: Option<u64>,
331 pub max_completion_tokens: Option<u64>,
332 pub supports_images: Option<bool>,
333 pub supports_tools: Option<bool>,
334 pub parallel_tool_calls: Option<bool>,
335}
336
337#[with_fallible_options]
338#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)]
339pub struct ZedDotDevSettingsContent {
340 pub available_models: Option<Vec<ZedDotDevAvailableModel>>,
341}
342
343#[with_fallible_options]
344#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
345pub struct ZedDotDevAvailableModel {
346 /// The provider of the language model.
347 pub provider: ZedDotDevAvailableProvider,
348 /// The model's name in the provider's API. e.g. claude-3-5-sonnet-20240620
349 pub name: String,
350 /// The name displayed in the UI, such as in the assistant panel model dropdown menu.
351 pub display_name: Option<String>,
352 /// The size of the context window, indicating the maximum number of tokens the model can process.
353 pub max_tokens: usize,
354 /// The maximum number of output tokens allowed by the model.
355 pub max_output_tokens: Option<u64>,
356 /// The maximum number of completion tokens allowed by the model (o1-* only)
357 pub max_completion_tokens: Option<u64>,
358 /// Override this model with a different Anthropic model for tool calls.
359 pub tool_override: Option<String>,
360 /// Indicates whether this custom model supports caching.
361 pub cache_configuration: Option<LanguageModelCacheConfiguration>,
362 /// The default temperature to use for this model.
363 #[serde(serialize_with = "crate::serialize_optional_f32_with_two_decimal_places")]
364 pub default_temperature: Option<f32>,
365 /// Any extra beta headers to provide when using the model.
366 #[serde(default)]
367 pub extra_beta_headers: Vec<String>,
368 /// The model's mode (e.g. thinking)
369 pub mode: Option<ModelMode>,
370}
371
372#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
373#[serde(rename_all = "lowercase")]
374pub enum ZedDotDevAvailableProvider {
375 Anthropic,
376 OpenAi,
377 Google,
378}
379
380#[with_fallible_options]
381#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)]
382pub struct OpenRouterSettingsContent {
383 pub api_url: Option<String>,
384 pub available_models: Option<Vec<OpenRouterAvailableModel>>,
385}
386
387#[with_fallible_options]
388#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
389pub struct OpenRouterAvailableModel {
390 pub name: String,
391 pub display_name: Option<String>,
392 pub max_tokens: u64,
393 pub max_output_tokens: Option<u64>,
394 pub max_completion_tokens: Option<u64>,
395 pub supports_tools: Option<bool>,
396 pub supports_images: Option<bool>,
397 pub mode: Option<ModelMode>,
398 pub provider: Option<OpenRouterProvider>,
399}
400
401#[with_fallible_options]
402#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
403pub struct OpenRouterProvider {
404 order: Option<Vec<String>>,
405 #[serde(default = "default_true")]
406 allow_fallbacks: bool,
407 #[serde(default)]
408 require_parameters: bool,
409 #[serde(default)]
410 data_collection: DataCollection,
411 only: Option<Vec<String>>,
412 ignore: Option<Vec<String>>,
413 quantizations: Option<Vec<String>>,
414 sort: Option<String>,
415}
416
417#[derive(Clone, Default, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
418#[serde(rename_all = "lowercase")]
419pub enum DataCollection {
420 #[default]
421 Allow,
422 Disallow,
423}
424
425fn default_true() -> bool {
426 true
427}
428
429/// Configuration for caching language model messages.
430#[with_fallible_options]
431#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
432pub struct LanguageModelCacheConfiguration {
433 pub max_cache_anchors: usize,
434 pub should_speculate: bool,
435 pub min_total_token: u64,
436}
437
438#[derive(
439 Copy, Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize, JsonSchema, MergeFrom,
440)]
441#[serde(tag = "type", rename_all = "lowercase")]
442pub enum ModelMode {
443 #[default]
444 Default,
445 Thinking {
446 /// The maximum number of tokens to use for reasoning. Must be lower than the model's `max_output_tokens`.
447 budget_tokens: Option<u32>,
448 },
449}