1use std::sync::Arc;
2
3use anyhow::Result;
4use gpui::App;
5use language_model::LanguageModelCacheConfiguration;
6use project::Fs;
7use schemars::JsonSchema;
8use serde::{Deserialize, Serialize};
9use settings::{Settings, SettingsSources, update_settings_file};
10
11use crate::provider::{
12 self,
13 anthropic::AnthropicSettings,
14 bedrock::AmazonBedrockSettings,
15 cloud::{self, ZedDotDevSettings},
16 copilot_chat::CopilotChatSettings,
17 deepseek::DeepSeekSettings,
18 google::GoogleSettings,
19 lmstudio::LmStudioSettings,
20 mistral::MistralSettings,
21 ollama::OllamaSettings,
22 open_ai::OpenAiSettings,
23};
24
25/// Initializes the language model settings.
26pub fn init(fs: Arc<dyn Fs>, cx: &mut App) {
27 AllLanguageModelSettings::register(cx);
28
29 if AllLanguageModelSettings::get_global(cx)
30 .openai
31 .needs_setting_migration
32 {
33 update_settings_file::<AllLanguageModelSettings>(fs.clone(), cx, move |setting, _| {
34 if let Some(settings) = setting.openai.clone() {
35 let (newest_version, _) = settings.upgrade();
36 setting.openai = Some(OpenAiSettingsContent::Versioned(
37 VersionedOpenAiSettingsContent::V1(newest_version),
38 ));
39 }
40 });
41 }
42
43 if AllLanguageModelSettings::get_global(cx)
44 .anthropic
45 .needs_setting_migration
46 {
47 update_settings_file::<AllLanguageModelSettings>(fs, cx, move |setting, _| {
48 if let Some(settings) = setting.anthropic.clone() {
49 let (newest_version, _) = settings.upgrade();
50 setting.anthropic = Some(AnthropicSettingsContent::Versioned(
51 VersionedAnthropicSettingsContent::V1(newest_version),
52 ));
53 }
54 });
55 }
56}
57
58#[derive(Default)]
59pub struct AllLanguageModelSettings {
60 pub anthropic: AnthropicSettings,
61 pub bedrock: AmazonBedrockSettings,
62 pub ollama: OllamaSettings,
63 pub openai: OpenAiSettings,
64 pub zed_dot_dev: ZedDotDevSettings,
65 pub google: GoogleSettings,
66 pub copilot_chat: CopilotChatSettings,
67 pub lmstudio: LmStudioSettings,
68 pub deepseek: DeepSeekSettings,
69 pub mistral: MistralSettings,
70}
71
72#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
73pub struct AllLanguageModelSettingsContent {
74 pub anthropic: Option<AnthropicSettingsContent>,
75 pub bedrock: Option<AmazonBedrockSettingsContent>,
76 pub ollama: Option<OllamaSettingsContent>,
77 pub lmstudio: Option<LmStudioSettingsContent>,
78 pub openai: Option<OpenAiSettingsContent>,
79 #[serde(rename = "zed.dev")]
80 pub zed_dot_dev: Option<ZedDotDevSettingsContent>,
81 pub google: Option<GoogleSettingsContent>,
82 pub deepseek: Option<DeepseekSettingsContent>,
83 pub copilot_chat: Option<CopilotChatSettingsContent>,
84 pub mistral: Option<MistralSettingsContent>,
85}
86
87#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
88#[serde(untagged)]
89pub enum AnthropicSettingsContent {
90 Versioned(VersionedAnthropicSettingsContent),
91 Legacy(LegacyAnthropicSettingsContent),
92}
93
94impl AnthropicSettingsContent {
95 pub fn upgrade(self) -> (AnthropicSettingsContentV1, bool) {
96 match self {
97 AnthropicSettingsContent::Legacy(content) => (
98 AnthropicSettingsContentV1 {
99 api_url: content.api_url,
100 available_models: content.available_models.map(|models| {
101 models
102 .into_iter()
103 .filter_map(|model| match model {
104 anthropic::Model::Custom {
105 name,
106 display_name,
107 max_tokens,
108 tool_override,
109 cache_configuration,
110 max_output_tokens,
111 default_temperature,
112 extra_beta_headers,
113 mode,
114 } => Some(provider::anthropic::AvailableModel {
115 name,
116 display_name,
117 max_tokens,
118 tool_override,
119 cache_configuration: cache_configuration.as_ref().map(
120 |config| LanguageModelCacheConfiguration {
121 max_cache_anchors: config.max_cache_anchors,
122 should_speculate: config.should_speculate,
123 min_total_token: config.min_total_token,
124 },
125 ),
126 max_output_tokens,
127 default_temperature,
128 extra_beta_headers,
129 mode: Some(mode.into()),
130 }),
131 _ => None,
132 })
133 .collect()
134 }),
135 },
136 true,
137 ),
138 AnthropicSettingsContent::Versioned(content) => match content {
139 VersionedAnthropicSettingsContent::V1(content) => (content, false),
140 },
141 }
142 }
143}
144
145#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
146pub struct LegacyAnthropicSettingsContent {
147 pub api_url: Option<String>,
148 pub available_models: Option<Vec<anthropic::Model>>,
149}
150
151#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
152#[serde(tag = "version")]
153pub enum VersionedAnthropicSettingsContent {
154 #[serde(rename = "1")]
155 V1(AnthropicSettingsContentV1),
156}
157
158#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
159pub struct AnthropicSettingsContentV1 {
160 pub api_url: Option<String>,
161 pub available_models: Option<Vec<provider::anthropic::AvailableModel>>,
162}
163
164#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
165pub struct AmazonBedrockSettingsContent {
166 available_models: Option<Vec<provider::bedrock::AvailableModel>>,
167 endpoint_url: Option<String>,
168 region: Option<String>,
169 profile: Option<String>,
170 authentication_method: Option<provider::bedrock::BedrockAuthMethod>,
171}
172
173#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
174pub struct OllamaSettingsContent {
175 pub api_url: Option<String>,
176 pub available_models: Option<Vec<provider::ollama::AvailableModel>>,
177}
178
179#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
180pub struct LmStudioSettingsContent {
181 pub api_url: Option<String>,
182 pub available_models: Option<Vec<provider::lmstudio::AvailableModel>>,
183}
184
185#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
186pub struct DeepseekSettingsContent {
187 pub api_url: Option<String>,
188 pub available_models: Option<Vec<provider::deepseek::AvailableModel>>,
189}
190
191#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
192pub struct MistralSettingsContent {
193 pub api_url: Option<String>,
194 pub available_models: Option<Vec<provider::mistral::AvailableModel>>,
195}
196
197#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
198#[serde(untagged)]
199pub enum OpenAiSettingsContent {
200 Versioned(VersionedOpenAiSettingsContent),
201 Legacy(LegacyOpenAiSettingsContent),
202}
203
204impl OpenAiSettingsContent {
205 pub fn upgrade(self) -> (OpenAiSettingsContentV1, bool) {
206 match self {
207 OpenAiSettingsContent::Legacy(content) => (
208 OpenAiSettingsContentV1 {
209 api_url: content.api_url,
210 available_models: content.available_models.map(|models| {
211 models
212 .into_iter()
213 .filter_map(|model| match model {
214 open_ai::Model::Custom {
215 name,
216 display_name,
217 max_tokens,
218 max_output_tokens,
219 max_completion_tokens,
220 } => Some(provider::open_ai::AvailableModel {
221 name,
222 max_tokens,
223 max_output_tokens,
224 display_name,
225 max_completion_tokens,
226 }),
227 _ => None,
228 })
229 .collect()
230 }),
231 },
232 true,
233 ),
234 OpenAiSettingsContent::Versioned(content) => match content {
235 VersionedOpenAiSettingsContent::V1(content) => (content, false),
236 },
237 }
238 }
239}
240
241#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
242pub struct LegacyOpenAiSettingsContent {
243 pub api_url: Option<String>,
244 pub available_models: Option<Vec<open_ai::Model>>,
245}
246
247#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
248#[serde(tag = "version")]
249pub enum VersionedOpenAiSettingsContent {
250 #[serde(rename = "1")]
251 V1(OpenAiSettingsContentV1),
252}
253
254#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
255pub struct OpenAiSettingsContentV1 {
256 pub api_url: Option<String>,
257 pub available_models: Option<Vec<provider::open_ai::AvailableModel>>,
258}
259
260#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
261pub struct GoogleSettingsContent {
262 pub api_url: Option<String>,
263 pub available_models: Option<Vec<provider::google::AvailableModel>>,
264}
265
266#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
267pub struct ZedDotDevSettingsContent {
268 available_models: Option<Vec<cloud::AvailableModel>>,
269}
270
271#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
272pub struct CopilotChatSettingsContent {}
273
274impl settings::Settings for AllLanguageModelSettings {
275 const KEY: Option<&'static str> = Some("language_models");
276
277 const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]);
278
279 type FileContent = AllLanguageModelSettingsContent;
280
281 fn load(sources: SettingsSources<Self::FileContent>, _: &mut App) -> Result<Self> {
282 fn merge<T>(target: &mut T, value: Option<T>) {
283 if let Some(value) = value {
284 *target = value;
285 }
286 }
287
288 let mut settings = AllLanguageModelSettings::default();
289
290 for value in sources.defaults_and_customizations() {
291 // Anthropic
292 let (anthropic, upgraded) = match value.anthropic.clone().map(|s| s.upgrade()) {
293 Some((content, upgraded)) => (Some(content), upgraded),
294 None => (None, false),
295 };
296
297 if upgraded {
298 settings.anthropic.needs_setting_migration = true;
299 }
300
301 merge(
302 &mut settings.anthropic.api_url,
303 anthropic.as_ref().and_then(|s| s.api_url.clone()),
304 );
305 merge(
306 &mut settings.anthropic.available_models,
307 anthropic.as_ref().and_then(|s| s.available_models.clone()),
308 );
309
310 // Bedrock
311 let bedrock = value.bedrock.clone();
312 merge(
313 &mut settings.bedrock.profile_name,
314 bedrock.as_ref().map(|s| s.profile.clone()),
315 );
316 merge(
317 &mut settings.bedrock.authentication_method,
318 bedrock.as_ref().map(|s| s.authentication_method.clone()),
319 );
320 merge(
321 &mut settings.bedrock.region,
322 bedrock.as_ref().map(|s| s.region.clone()),
323 );
324 merge(
325 &mut settings.bedrock.endpoint,
326 bedrock.as_ref().map(|s| s.endpoint_url.clone()),
327 );
328
329 // Ollama
330 let ollama = value.ollama.clone();
331
332 merge(
333 &mut settings.ollama.api_url,
334 value.ollama.as_ref().and_then(|s| s.api_url.clone()),
335 );
336 merge(
337 &mut settings.ollama.available_models,
338 ollama.as_ref().and_then(|s| s.available_models.clone()),
339 );
340
341 // LM Studio
342 let lmstudio = value.lmstudio.clone();
343
344 merge(
345 &mut settings.lmstudio.api_url,
346 value.lmstudio.as_ref().and_then(|s| s.api_url.clone()),
347 );
348 merge(
349 &mut settings.lmstudio.available_models,
350 lmstudio.as_ref().and_then(|s| s.available_models.clone()),
351 );
352
353 // DeepSeek
354 let deepseek = value.deepseek.clone();
355
356 merge(
357 &mut settings.deepseek.api_url,
358 value.deepseek.as_ref().and_then(|s| s.api_url.clone()),
359 );
360 merge(
361 &mut settings.deepseek.available_models,
362 deepseek.as_ref().and_then(|s| s.available_models.clone()),
363 );
364
365 // OpenAI
366 let (openai, upgraded) = match value.openai.clone().map(|s| s.upgrade()) {
367 Some((content, upgraded)) => (Some(content), upgraded),
368 None => (None, false),
369 };
370
371 if upgraded {
372 settings.openai.needs_setting_migration = true;
373 }
374
375 merge(
376 &mut settings.openai.api_url,
377 openai.as_ref().and_then(|s| s.api_url.clone()),
378 );
379 merge(
380 &mut settings.openai.available_models,
381 openai.as_ref().and_then(|s| s.available_models.clone()),
382 );
383 merge(
384 &mut settings.zed_dot_dev.available_models,
385 value
386 .zed_dot_dev
387 .as_ref()
388 .and_then(|s| s.available_models.clone()),
389 );
390 merge(
391 &mut settings.google.api_url,
392 value.google.as_ref().and_then(|s| s.api_url.clone()),
393 );
394 merge(
395 &mut settings.google.available_models,
396 value
397 .google
398 .as_ref()
399 .and_then(|s| s.available_models.clone()),
400 );
401
402 // Mistral
403 let mistral = value.mistral.clone();
404 merge(
405 &mut settings.mistral.api_url,
406 mistral.as_ref().and_then(|s| s.api_url.clone()),
407 );
408 merge(
409 &mut settings.mistral.available_models,
410 mistral.as_ref().and_then(|s| s.available_models.clone()),
411 );
412 }
413
414 Ok(settings)
415 }
416
417 fn import_from_vscode(_vscode: &settings::VsCodeSettings, _current: &mut Self::FileContent) {}
418}