1use std::sync::Arc;
2
3use anyhow::Result;
4use gpui::App;
5use language_model::LanguageModelCacheConfiguration;
6use project::Fs;
7use schemars::JsonSchema;
8use serde::{Deserialize, Serialize};
9use settings::{Settings, SettingsSources, update_settings_file};
10
11use crate::provider::{
12 self,
13 anthropic::AnthropicSettings,
14 bedrock::AmazonBedrockSettings,
15 cloud::{self, ZedDotDevSettings},
16 copilot_chat::CopilotChatSettings,
17 deepseek::DeepSeekSettings,
18 google::GoogleSettings,
19 lmstudio::LmStudioSettings,
20 mistral::MistralSettings,
21 ollama::OllamaSettings,
22 open_ai::OpenAiSettings,
23};
24
25/// Initializes the language model settings.
26pub fn init(fs: Arc<dyn Fs>, cx: &mut App) {
27 AllLanguageModelSettings::register(cx);
28
29 if AllLanguageModelSettings::get_global(cx)
30 .openai
31 .needs_setting_migration
32 {
33 update_settings_file::<AllLanguageModelSettings>(fs.clone(), cx, move |setting, _| {
34 if let Some(settings) = setting.openai.clone() {
35 let (newest_version, _) = settings.upgrade();
36 setting.openai = Some(OpenAiSettingsContent::Versioned(
37 VersionedOpenAiSettingsContent::V1(newest_version),
38 ));
39 }
40 });
41 }
42
43 if AllLanguageModelSettings::get_global(cx)
44 .anthropic
45 .needs_setting_migration
46 {
47 update_settings_file::<AllLanguageModelSettings>(fs, cx, move |setting, _| {
48 if let Some(settings) = setting.anthropic.clone() {
49 let (newest_version, _) = settings.upgrade();
50 setting.anthropic = Some(AnthropicSettingsContent::Versioned(
51 VersionedAnthropicSettingsContent::V1(newest_version),
52 ));
53 }
54 });
55 }
56}
57
58#[derive(Default)]
59pub struct AllLanguageModelSettings {
60 pub anthropic: AnthropicSettings,
61 pub bedrock: AmazonBedrockSettings,
62 pub ollama: OllamaSettings,
63 pub openai: OpenAiSettings,
64 pub zed_dot_dev: ZedDotDevSettings,
65 pub google: GoogleSettings,
66 pub copilot_chat: CopilotChatSettings,
67 pub lmstudio: LmStudioSettings,
68 pub deepseek: DeepSeekSettings,
69 pub mistral: MistralSettings,
70}
71
72#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
73pub struct AllLanguageModelSettingsContent {
74 pub anthropic: Option<AnthropicSettingsContent>,
75 pub ollama: Option<OllamaSettingsContent>,
76 pub lmstudio: Option<LmStudioSettingsContent>,
77 pub openai: Option<OpenAiSettingsContent>,
78 #[serde(rename = "zed.dev")]
79 pub zed_dot_dev: Option<ZedDotDevSettingsContent>,
80 pub google: Option<GoogleSettingsContent>,
81 pub deepseek: Option<DeepseekSettingsContent>,
82 pub copilot_chat: Option<CopilotChatSettingsContent>,
83 pub mistral: Option<MistralSettingsContent>,
84}
85
86#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
87#[serde(untagged)]
88pub enum AnthropicSettingsContent {
89 Legacy(LegacyAnthropicSettingsContent),
90 Versioned(VersionedAnthropicSettingsContent),
91}
92
93impl AnthropicSettingsContent {
94 pub fn upgrade(self) -> (AnthropicSettingsContentV1, bool) {
95 match self {
96 AnthropicSettingsContent::Legacy(content) => (
97 AnthropicSettingsContentV1 {
98 api_url: content.api_url,
99 available_models: content.available_models.map(|models| {
100 models
101 .into_iter()
102 .filter_map(|model| match model {
103 anthropic::Model::Custom {
104 name,
105 display_name,
106 max_tokens,
107 tool_override,
108 cache_configuration,
109 max_output_tokens,
110 default_temperature,
111 extra_beta_headers,
112 mode,
113 } => Some(provider::anthropic::AvailableModel {
114 name,
115 display_name,
116 max_tokens,
117 tool_override,
118 cache_configuration: cache_configuration.as_ref().map(
119 |config| LanguageModelCacheConfiguration {
120 max_cache_anchors: config.max_cache_anchors,
121 should_speculate: config.should_speculate,
122 min_total_token: config.min_total_token,
123 },
124 ),
125 max_output_tokens,
126 default_temperature,
127 extra_beta_headers,
128 mode: Some(mode.into()),
129 }),
130 _ => None,
131 })
132 .collect()
133 }),
134 },
135 true,
136 ),
137 AnthropicSettingsContent::Versioned(content) => match content {
138 VersionedAnthropicSettingsContent::V1(content) => (content, false),
139 },
140 }
141 }
142}
143
144#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
145pub struct LegacyAnthropicSettingsContent {
146 pub api_url: Option<String>,
147 pub available_models: Option<Vec<anthropic::Model>>,
148}
149
150#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
151#[serde(tag = "version")]
152pub enum VersionedAnthropicSettingsContent {
153 #[serde(rename = "1")]
154 V1(AnthropicSettingsContentV1),
155}
156
157#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
158pub struct AnthropicSettingsContentV1 {
159 pub api_url: Option<String>,
160 pub available_models: Option<Vec<provider::anthropic::AvailableModel>>,
161}
162
163#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
164pub struct OllamaSettingsContent {
165 pub api_url: Option<String>,
166 pub available_models: Option<Vec<provider::ollama::AvailableModel>>,
167}
168
169#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
170pub struct LmStudioSettingsContent {
171 pub api_url: Option<String>,
172 pub available_models: Option<Vec<provider::lmstudio::AvailableModel>>,
173}
174
175#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
176pub struct DeepseekSettingsContent {
177 pub api_url: Option<String>,
178 pub available_models: Option<Vec<provider::deepseek::AvailableModel>>,
179}
180
181#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
182pub struct MistralSettingsContent {
183 pub api_url: Option<String>,
184 pub available_models: Option<Vec<provider::mistral::AvailableModel>>,
185}
186
187#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
188#[serde(untagged)]
189pub enum OpenAiSettingsContent {
190 Legacy(LegacyOpenAiSettingsContent),
191 Versioned(VersionedOpenAiSettingsContent),
192}
193
194impl OpenAiSettingsContent {
195 pub fn upgrade(self) -> (OpenAiSettingsContentV1, bool) {
196 match self {
197 OpenAiSettingsContent::Legacy(content) => (
198 OpenAiSettingsContentV1 {
199 api_url: content.api_url,
200 available_models: content.available_models.map(|models| {
201 models
202 .into_iter()
203 .filter_map(|model| match model {
204 open_ai::Model::Custom {
205 name,
206 display_name,
207 max_tokens,
208 max_output_tokens,
209 max_completion_tokens,
210 } => Some(provider::open_ai::AvailableModel {
211 name,
212 max_tokens,
213 max_output_tokens,
214 display_name,
215 max_completion_tokens,
216 }),
217 _ => None,
218 })
219 .collect()
220 }),
221 },
222 true,
223 ),
224 OpenAiSettingsContent::Versioned(content) => match content {
225 VersionedOpenAiSettingsContent::V1(content) => (content, false),
226 },
227 }
228 }
229}
230
231#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
232pub struct LegacyOpenAiSettingsContent {
233 pub api_url: Option<String>,
234 pub available_models: Option<Vec<open_ai::Model>>,
235}
236
237#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
238#[serde(tag = "version")]
239pub enum VersionedOpenAiSettingsContent {
240 #[serde(rename = "1")]
241 V1(OpenAiSettingsContentV1),
242}
243
244#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
245pub struct OpenAiSettingsContentV1 {
246 pub api_url: Option<String>,
247 pub available_models: Option<Vec<provider::open_ai::AvailableModel>>,
248}
249
250#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
251pub struct GoogleSettingsContent {
252 pub api_url: Option<String>,
253 pub available_models: Option<Vec<provider::google::AvailableModel>>,
254}
255
256#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
257pub struct ZedDotDevSettingsContent {
258 available_models: Option<Vec<cloud::AvailableModel>>,
259}
260
261#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
262pub struct CopilotChatSettingsContent {}
263
264impl settings::Settings for AllLanguageModelSettings {
265 const KEY: Option<&'static str> = Some("language_models");
266
267 const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]);
268
269 type FileContent = AllLanguageModelSettingsContent;
270
271 fn load(sources: SettingsSources<Self::FileContent>, _: &mut App) -> Result<Self> {
272 fn merge<T>(target: &mut T, value: Option<T>) {
273 if let Some(value) = value {
274 *target = value;
275 }
276 }
277
278 let mut settings = AllLanguageModelSettings::default();
279
280 for value in sources.defaults_and_customizations() {
281 // Anthropic
282 let (anthropic, upgraded) = match value.anthropic.clone().map(|s| s.upgrade()) {
283 Some((content, upgraded)) => (Some(content), upgraded),
284 None => (None, false),
285 };
286
287 if upgraded {
288 settings.anthropic.needs_setting_migration = true;
289 }
290
291 merge(
292 &mut settings.anthropic.api_url,
293 anthropic.as_ref().and_then(|s| s.api_url.clone()),
294 );
295 merge(
296 &mut settings.anthropic.available_models,
297 anthropic.as_ref().and_then(|s| s.available_models.clone()),
298 );
299
300 // Ollama
301 let ollama = value.ollama.clone();
302
303 merge(
304 &mut settings.ollama.api_url,
305 value.ollama.as_ref().and_then(|s| s.api_url.clone()),
306 );
307 merge(
308 &mut settings.ollama.available_models,
309 ollama.as_ref().and_then(|s| s.available_models.clone()),
310 );
311
312 // LM Studio
313 let lmstudio = value.lmstudio.clone();
314
315 merge(
316 &mut settings.lmstudio.api_url,
317 value.lmstudio.as_ref().and_then(|s| s.api_url.clone()),
318 );
319 merge(
320 &mut settings.lmstudio.available_models,
321 lmstudio.as_ref().and_then(|s| s.available_models.clone()),
322 );
323
324 // DeepSeek
325 let deepseek = value.deepseek.clone();
326
327 merge(
328 &mut settings.deepseek.api_url,
329 value.deepseek.as_ref().and_then(|s| s.api_url.clone()),
330 );
331 merge(
332 &mut settings.deepseek.available_models,
333 deepseek.as_ref().and_then(|s| s.available_models.clone()),
334 );
335
336 // OpenAI
337 let (openai, upgraded) = match value.openai.clone().map(|s| s.upgrade()) {
338 Some((content, upgraded)) => (Some(content), upgraded),
339 None => (None, false),
340 };
341
342 if upgraded {
343 settings.openai.needs_setting_migration = true;
344 }
345
346 merge(
347 &mut settings.openai.api_url,
348 openai.as_ref().and_then(|s| s.api_url.clone()),
349 );
350 merge(
351 &mut settings.openai.available_models,
352 openai.as_ref().and_then(|s| s.available_models.clone()),
353 );
354 merge(
355 &mut settings.zed_dot_dev.available_models,
356 value
357 .zed_dot_dev
358 .as_ref()
359 .and_then(|s| s.available_models.clone()),
360 );
361 merge(
362 &mut settings.google.api_url,
363 value.google.as_ref().and_then(|s| s.api_url.clone()),
364 );
365 merge(
366 &mut settings.google.available_models,
367 value
368 .google
369 .as_ref()
370 .and_then(|s| s.available_models.clone()),
371 );
372
373 // Mistral
374 let mistral = value.mistral.clone();
375 merge(
376 &mut settings.mistral.api_url,
377 mistral.as_ref().and_then(|s| s.api_url.clone()),
378 );
379 merge(
380 &mut settings.mistral.available_models,
381 mistral.as_ref().and_then(|s| s.available_models.clone()),
382 );
383 }
384
385 Ok(settings)
386 }
387}