1use std::sync::Arc;
2
3use anyhow::Result;
4use gpui::App;
5use language_model::LanguageModelCacheConfiguration;
6use project::Fs;
7use schemars::JsonSchema;
8use serde::{Deserialize, Serialize};
9use settings::{update_settings_file, Settings, SettingsSources};
10
11use crate::provider::{
12 self,
13 anthropic::AnthropicSettings,
14 bedrock::AmazonBedrockSettings,
15 cloud::{self, ZedDotDevSettings},
16 copilot_chat::CopilotChatSettings,
17 deepseek::DeepSeekSettings,
18 google::GoogleSettings,
19 lmstudio::LmStudioSettings,
20 mistral::MistralSettings,
21 ollama::OllamaSettings,
22 open_ai::OpenAiSettings,
23};
24
25/// Initializes the language model settings.
26pub fn init(fs: Arc<dyn Fs>, cx: &mut App) {
27 AllLanguageModelSettings::register(cx);
28
29 if AllLanguageModelSettings::get_global(cx)
30 .openai
31 .needs_setting_migration
32 {
33 update_settings_file::<AllLanguageModelSettings>(fs.clone(), cx, move |setting, _| {
34 if let Some(settings) = setting.openai.clone() {
35 let (newest_version, _) = settings.upgrade();
36 setting.openai = Some(OpenAiSettingsContent::Versioned(
37 VersionedOpenAiSettingsContent::V1(newest_version),
38 ));
39 }
40 });
41 }
42
43 if AllLanguageModelSettings::get_global(cx)
44 .anthropic
45 .needs_setting_migration
46 {
47 update_settings_file::<AllLanguageModelSettings>(fs, cx, move |setting, _| {
48 if let Some(settings) = setting.anthropic.clone() {
49 let (newest_version, _) = settings.upgrade();
50 setting.anthropic = Some(AnthropicSettingsContent::Versioned(
51 VersionedAnthropicSettingsContent::V1(newest_version),
52 ));
53 }
54 });
55 }
56}
57
58#[derive(Default)]
59pub struct AllLanguageModelSettings {
60 pub anthropic: AnthropicSettings,
61 pub bedrock: AmazonBedrockSettings,
62 pub ollama: OllamaSettings,
63 pub openai: OpenAiSettings,
64 pub zed_dot_dev: ZedDotDevSettings,
65 pub google: GoogleSettings,
66 pub copilot_chat: CopilotChatSettings,
67 pub lmstudio: LmStudioSettings,
68 pub deepseek: DeepSeekSettings,
69 pub mistral: MistralSettings,
70}
71
72#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
73pub struct AllLanguageModelSettingsContent {
74 pub anthropic: Option<AnthropicSettingsContent>,
75 pub ollama: Option<OllamaSettingsContent>,
76 pub lmstudio: Option<LmStudioSettingsContent>,
77 pub openai: Option<OpenAiSettingsContent>,
78 #[serde(rename = "zed.dev")]
79 pub zed_dot_dev: Option<ZedDotDevSettingsContent>,
80 pub google: Option<GoogleSettingsContent>,
81 pub deepseek: Option<DeepseekSettingsContent>,
82 pub copilot_chat: Option<CopilotChatSettingsContent>,
83 pub mistral: Option<MistralSettingsContent>,
84}
85
86#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
87#[serde(untagged)]
88pub enum AnthropicSettingsContent {
89 Legacy(LegacyAnthropicSettingsContent),
90 Versioned(VersionedAnthropicSettingsContent),
91}
92
93impl AnthropicSettingsContent {
94 pub fn upgrade(self) -> (AnthropicSettingsContentV1, bool) {
95 match self {
96 AnthropicSettingsContent::Legacy(content) => (
97 AnthropicSettingsContentV1 {
98 api_url: content.api_url,
99 available_models: content.available_models.map(|models| {
100 models
101 .into_iter()
102 .filter_map(|model| match model {
103 anthropic::Model::Custom {
104 name,
105 display_name,
106 max_tokens,
107 tool_override,
108 cache_configuration,
109 max_output_tokens,
110 default_temperature,
111 extra_beta_headers,
112 } => Some(provider::anthropic::AvailableModel {
113 name,
114 display_name,
115 max_tokens,
116 tool_override,
117 cache_configuration: cache_configuration.as_ref().map(
118 |config| LanguageModelCacheConfiguration {
119 max_cache_anchors: config.max_cache_anchors,
120 should_speculate: config.should_speculate,
121 min_total_token: config.min_total_token,
122 },
123 ),
124 max_output_tokens,
125 default_temperature,
126 extra_beta_headers,
127 }),
128 _ => None,
129 })
130 .collect()
131 }),
132 },
133 true,
134 ),
135 AnthropicSettingsContent::Versioned(content) => match content {
136 VersionedAnthropicSettingsContent::V1(content) => (content, false),
137 },
138 }
139 }
140}
141
142#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
143pub struct LegacyAnthropicSettingsContent {
144 pub api_url: Option<String>,
145 pub available_models: Option<Vec<anthropic::Model>>,
146}
147
148#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
149#[serde(tag = "version")]
150pub enum VersionedAnthropicSettingsContent {
151 #[serde(rename = "1")]
152 V1(AnthropicSettingsContentV1),
153}
154
155#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
156pub struct AnthropicSettingsContentV1 {
157 pub api_url: Option<String>,
158 pub available_models: Option<Vec<provider::anthropic::AvailableModel>>,
159}
160
161#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
162pub struct OllamaSettingsContent {
163 pub api_url: Option<String>,
164 pub available_models: Option<Vec<provider::ollama::AvailableModel>>,
165}
166
167#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
168pub struct LmStudioSettingsContent {
169 pub api_url: Option<String>,
170 pub available_models: Option<Vec<provider::lmstudio::AvailableModel>>,
171}
172
173#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
174pub struct DeepseekSettingsContent {
175 pub api_url: Option<String>,
176 pub available_models: Option<Vec<provider::deepseek::AvailableModel>>,
177}
178
179#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
180pub struct MistralSettingsContent {
181 pub api_url: Option<String>,
182 pub available_models: Option<Vec<provider::mistral::AvailableModel>>,
183}
184
185#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
186#[serde(untagged)]
187pub enum OpenAiSettingsContent {
188 Legacy(LegacyOpenAiSettingsContent),
189 Versioned(VersionedOpenAiSettingsContent),
190}
191
192impl OpenAiSettingsContent {
193 pub fn upgrade(self) -> (OpenAiSettingsContentV1, bool) {
194 match self {
195 OpenAiSettingsContent::Legacy(content) => (
196 OpenAiSettingsContentV1 {
197 api_url: content.api_url,
198 available_models: content.available_models.map(|models| {
199 models
200 .into_iter()
201 .filter_map(|model| match model {
202 open_ai::Model::Custom {
203 name,
204 display_name,
205 max_tokens,
206 max_output_tokens,
207 max_completion_tokens,
208 } => Some(provider::open_ai::AvailableModel {
209 name,
210 max_tokens,
211 max_output_tokens,
212 display_name,
213 max_completion_tokens,
214 }),
215 _ => None,
216 })
217 .collect()
218 }),
219 },
220 true,
221 ),
222 OpenAiSettingsContent::Versioned(content) => match content {
223 VersionedOpenAiSettingsContent::V1(content) => (content, false),
224 },
225 }
226 }
227}
228
229#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
230pub struct LegacyOpenAiSettingsContent {
231 pub api_url: Option<String>,
232 pub available_models: Option<Vec<open_ai::Model>>,
233}
234
235#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
236#[serde(tag = "version")]
237pub enum VersionedOpenAiSettingsContent {
238 #[serde(rename = "1")]
239 V1(OpenAiSettingsContentV1),
240}
241
242#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
243pub struct OpenAiSettingsContentV1 {
244 pub api_url: Option<String>,
245 pub available_models: Option<Vec<provider::open_ai::AvailableModel>>,
246}
247
248#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
249pub struct GoogleSettingsContent {
250 pub api_url: Option<String>,
251 pub available_models: Option<Vec<provider::google::AvailableModel>>,
252}
253
254#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
255pub struct ZedDotDevSettingsContent {
256 available_models: Option<Vec<cloud::AvailableModel>>,
257}
258
259#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
260pub struct CopilotChatSettingsContent {}
261
262impl settings::Settings for AllLanguageModelSettings {
263 const KEY: Option<&'static str> = Some("language_models");
264
265 const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]);
266
267 type FileContent = AllLanguageModelSettingsContent;
268
269 fn load(sources: SettingsSources<Self::FileContent>, _: &mut App) -> Result<Self> {
270 fn merge<T>(target: &mut T, value: Option<T>) {
271 if let Some(value) = value {
272 *target = value;
273 }
274 }
275
276 let mut settings = AllLanguageModelSettings::default();
277
278 for value in sources.defaults_and_customizations() {
279 // Anthropic
280 let (anthropic, upgraded) = match value.anthropic.clone().map(|s| s.upgrade()) {
281 Some((content, upgraded)) => (Some(content), upgraded),
282 None => (None, false),
283 };
284
285 if upgraded {
286 settings.anthropic.needs_setting_migration = true;
287 }
288
289 merge(
290 &mut settings.anthropic.api_url,
291 anthropic.as_ref().and_then(|s| s.api_url.clone()),
292 );
293 merge(
294 &mut settings.anthropic.available_models,
295 anthropic.as_ref().and_then(|s| s.available_models.clone()),
296 );
297
298 // Ollama
299 let ollama = value.ollama.clone();
300
301 merge(
302 &mut settings.ollama.api_url,
303 value.ollama.as_ref().and_then(|s| s.api_url.clone()),
304 );
305 merge(
306 &mut settings.ollama.available_models,
307 ollama.as_ref().and_then(|s| s.available_models.clone()),
308 );
309
310 // LM Studio
311 let lmstudio = value.lmstudio.clone();
312
313 merge(
314 &mut settings.lmstudio.api_url,
315 value.lmstudio.as_ref().and_then(|s| s.api_url.clone()),
316 );
317 merge(
318 &mut settings.lmstudio.available_models,
319 lmstudio.as_ref().and_then(|s| s.available_models.clone()),
320 );
321
322 // DeepSeek
323 let deepseek = value.deepseek.clone();
324
325 merge(
326 &mut settings.deepseek.api_url,
327 value.deepseek.as_ref().and_then(|s| s.api_url.clone()),
328 );
329 merge(
330 &mut settings.deepseek.available_models,
331 deepseek.as_ref().and_then(|s| s.available_models.clone()),
332 );
333
334 // OpenAI
335 let (openai, upgraded) = match value.openai.clone().map(|s| s.upgrade()) {
336 Some((content, upgraded)) => (Some(content), upgraded),
337 None => (None, false),
338 };
339
340 if upgraded {
341 settings.openai.needs_setting_migration = true;
342 }
343
344 merge(
345 &mut settings.openai.api_url,
346 openai.as_ref().and_then(|s| s.api_url.clone()),
347 );
348 merge(
349 &mut settings.openai.available_models,
350 openai.as_ref().and_then(|s| s.available_models.clone()),
351 );
352 merge(
353 &mut settings.zed_dot_dev.available_models,
354 value
355 .zed_dot_dev
356 .as_ref()
357 .and_then(|s| s.available_models.clone()),
358 );
359 merge(
360 &mut settings.google.api_url,
361 value.google.as_ref().and_then(|s| s.api_url.clone()),
362 );
363 merge(
364 &mut settings.google.available_models,
365 value
366 .google
367 .as_ref()
368 .and_then(|s| s.available_models.clone()),
369 );
370
371 // Mistral
372 let mistral = value.mistral.clone();
373 merge(
374 &mut settings.mistral.api_url,
375 mistral.as_ref().and_then(|s| s.api_url.clone()),
376 );
377 merge(
378 &mut settings.mistral.available_models,
379 mistral.as_ref().and_then(|s| s.available_models.clone()),
380 );
381 }
382
383 Ok(settings)
384 }
385}