1use std::{sync::Arc, time::Duration};
2
3use anyhow::Result;
4use gpui::AppContext;
5use project::Fs;
6use schemars::JsonSchema;
7use serde::{Deserialize, Serialize};
8use settings::{update_settings_file, Settings, SettingsSources};
9
10use crate::{
11 provider::{
12 self,
13 anthropic::AnthropicSettings,
14 cloud::{self, ZedDotDevSettings},
15 copilot_chat::CopilotChatSettings,
16 google::GoogleSettings,
17 ollama::OllamaSettings,
18 open_ai::OpenAiSettings,
19 },
20 LanguageModelCacheConfiguration,
21};
22
23/// Initializes the language model settings.
24pub fn init(fs: Arc<dyn Fs>, cx: &mut AppContext) {
25 AllLanguageModelSettings::register(cx);
26
27 if AllLanguageModelSettings::get_global(cx)
28 .openai
29 .needs_setting_migration
30 {
31 update_settings_file::<AllLanguageModelSettings>(fs.clone(), cx, move |setting, _| {
32 if let Some(settings) = setting.openai.clone() {
33 let (newest_version, _) = settings.upgrade();
34 setting.openai = Some(OpenAiSettingsContent::Versioned(
35 VersionedOpenAiSettingsContent::V1(newest_version),
36 ));
37 }
38 });
39 }
40
41 if AllLanguageModelSettings::get_global(cx)
42 .anthropic
43 .needs_setting_migration
44 {
45 update_settings_file::<AllLanguageModelSettings>(fs, cx, move |setting, _| {
46 if let Some(settings) = setting.anthropic.clone() {
47 let (newest_version, _) = settings.upgrade();
48 setting.anthropic = Some(AnthropicSettingsContent::Versioned(
49 VersionedAnthropicSettingsContent::V1(newest_version),
50 ));
51 }
52 });
53 }
54}
55
56#[derive(Default)]
57pub struct AllLanguageModelSettings {
58 pub anthropic: AnthropicSettings,
59 pub ollama: OllamaSettings,
60 pub openai: OpenAiSettings,
61 pub zed_dot_dev: ZedDotDevSettings,
62 pub google: GoogleSettings,
63 pub copilot_chat: CopilotChatSettings,
64}
65
66#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
67pub struct AllLanguageModelSettingsContent {
68 pub anthropic: Option<AnthropicSettingsContent>,
69 pub ollama: Option<OllamaSettingsContent>,
70 pub openai: Option<OpenAiSettingsContent>,
71 #[serde(rename = "zed.dev")]
72 pub zed_dot_dev: Option<ZedDotDevSettingsContent>,
73 pub google: Option<GoogleSettingsContent>,
74 pub copilot_chat: Option<CopilotChatSettingsContent>,
75}
76
77#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
78#[serde(untagged)]
79pub enum AnthropicSettingsContent {
80 Legacy(LegacyAnthropicSettingsContent),
81 Versioned(VersionedAnthropicSettingsContent),
82}
83
84impl AnthropicSettingsContent {
85 pub fn upgrade(self) -> (AnthropicSettingsContentV1, bool) {
86 match self {
87 AnthropicSettingsContent::Legacy(content) => (
88 AnthropicSettingsContentV1 {
89 api_url: content.api_url,
90 low_speed_timeout_in_seconds: content.low_speed_timeout_in_seconds,
91 available_models: content.available_models.map(|models| {
92 models
93 .into_iter()
94 .filter_map(|model| match model {
95 anthropic::Model::Custom {
96 name,
97 display_name,
98 max_tokens,
99 tool_override,
100 cache_configuration,
101 max_output_tokens,
102 default_temperature,
103 } => Some(provider::anthropic::AvailableModel {
104 name,
105 display_name,
106 max_tokens,
107 tool_override,
108 cache_configuration: cache_configuration.as_ref().map(
109 |config| LanguageModelCacheConfiguration {
110 max_cache_anchors: config.max_cache_anchors,
111 should_speculate: config.should_speculate,
112 min_total_token: config.min_total_token,
113 },
114 ),
115 max_output_tokens,
116 default_temperature,
117 }),
118 _ => None,
119 })
120 .collect()
121 }),
122 },
123 true,
124 ),
125 AnthropicSettingsContent::Versioned(content) => match content {
126 VersionedAnthropicSettingsContent::V1(content) => (content, false),
127 },
128 }
129 }
130}
131
132#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
133pub struct LegacyAnthropicSettingsContent {
134 pub api_url: Option<String>,
135 pub low_speed_timeout_in_seconds: Option<u64>,
136 pub available_models: Option<Vec<anthropic::Model>>,
137}
138
139#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
140#[serde(tag = "version")]
141pub enum VersionedAnthropicSettingsContent {
142 #[serde(rename = "1")]
143 V1(AnthropicSettingsContentV1),
144}
145
146#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
147pub struct AnthropicSettingsContentV1 {
148 pub api_url: Option<String>,
149 pub low_speed_timeout_in_seconds: Option<u64>,
150 pub available_models: Option<Vec<provider::anthropic::AvailableModel>>,
151}
152
153#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
154pub struct OllamaSettingsContent {
155 pub api_url: Option<String>,
156 pub low_speed_timeout_in_seconds: Option<u64>,
157 pub available_models: Option<Vec<provider::ollama::AvailableModel>>,
158}
159
160#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
161#[serde(untagged)]
162pub enum OpenAiSettingsContent {
163 Legacy(LegacyOpenAiSettingsContent),
164 Versioned(VersionedOpenAiSettingsContent),
165}
166
167impl OpenAiSettingsContent {
168 pub fn upgrade(self) -> (OpenAiSettingsContentV1, bool) {
169 match self {
170 OpenAiSettingsContent::Legacy(content) => (
171 OpenAiSettingsContentV1 {
172 api_url: content.api_url,
173 low_speed_timeout_in_seconds: content.low_speed_timeout_in_seconds,
174 available_models: content.available_models.map(|models| {
175 models
176 .into_iter()
177 .filter_map(|model| match model {
178 open_ai::Model::Custom {
179 name,
180 display_name,
181 max_tokens,
182 max_output_tokens,
183 max_completion_tokens,
184 } => Some(provider::open_ai::AvailableModel {
185 name,
186 max_tokens,
187 max_output_tokens,
188 display_name,
189 max_completion_tokens,
190 }),
191 _ => None,
192 })
193 .collect()
194 }),
195 },
196 true,
197 ),
198 OpenAiSettingsContent::Versioned(content) => match content {
199 VersionedOpenAiSettingsContent::V1(content) => (content, false),
200 },
201 }
202 }
203}
204
205#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
206pub struct LegacyOpenAiSettingsContent {
207 pub api_url: Option<String>,
208 pub low_speed_timeout_in_seconds: Option<u64>,
209 pub available_models: Option<Vec<open_ai::Model>>,
210}
211
212#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
213#[serde(tag = "version")]
214pub enum VersionedOpenAiSettingsContent {
215 #[serde(rename = "1")]
216 V1(OpenAiSettingsContentV1),
217}
218
219#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
220pub struct OpenAiSettingsContentV1 {
221 pub api_url: Option<String>,
222 pub low_speed_timeout_in_seconds: Option<u64>,
223 pub available_models: Option<Vec<provider::open_ai::AvailableModel>>,
224}
225
226#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
227pub struct GoogleSettingsContent {
228 pub api_url: Option<String>,
229 pub low_speed_timeout_in_seconds: Option<u64>,
230 pub available_models: Option<Vec<provider::google::AvailableModel>>,
231}
232
233#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
234pub struct ZedDotDevSettingsContent {
235 available_models: Option<Vec<cloud::AvailableModel>>,
236 pub low_speed_timeout_in_seconds: Option<u64>,
237}
238
239#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
240pub struct CopilotChatSettingsContent {
241 low_speed_timeout_in_seconds: Option<u64>,
242}
243
244impl settings::Settings for AllLanguageModelSettings {
245 const KEY: Option<&'static str> = Some("language_models");
246
247 const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]);
248
249 type FileContent = AllLanguageModelSettingsContent;
250
251 fn load(sources: SettingsSources<Self::FileContent>, _: &mut AppContext) -> Result<Self> {
252 fn merge<T>(target: &mut T, value: Option<T>) {
253 if let Some(value) = value {
254 *target = value;
255 }
256 }
257
258 let mut settings = AllLanguageModelSettings::default();
259
260 for value in sources.defaults_and_customizations() {
261 // Anthropic
262 let (anthropic, upgraded) = match value.anthropic.clone().map(|s| s.upgrade()) {
263 Some((content, upgraded)) => (Some(content), upgraded),
264 None => (None, false),
265 };
266
267 if upgraded {
268 settings.anthropic.needs_setting_migration = true;
269 }
270
271 merge(
272 &mut settings.anthropic.api_url,
273 anthropic.as_ref().and_then(|s| s.api_url.clone()),
274 );
275 if let Some(low_speed_timeout_in_seconds) = anthropic
276 .as_ref()
277 .and_then(|s| s.low_speed_timeout_in_seconds)
278 {
279 settings.anthropic.low_speed_timeout =
280 Some(Duration::from_secs(low_speed_timeout_in_seconds));
281 }
282 merge(
283 &mut settings.anthropic.available_models,
284 anthropic.as_ref().and_then(|s| s.available_models.clone()),
285 );
286
287 // Ollama
288 let ollama = value.ollama.clone();
289
290 merge(
291 &mut settings.ollama.api_url,
292 value.ollama.as_ref().and_then(|s| s.api_url.clone()),
293 );
294 if let Some(low_speed_timeout_in_seconds) = value
295 .ollama
296 .as_ref()
297 .and_then(|s| s.low_speed_timeout_in_seconds)
298 {
299 settings.ollama.low_speed_timeout =
300 Some(Duration::from_secs(low_speed_timeout_in_seconds));
301 }
302 merge(
303 &mut settings.ollama.available_models,
304 ollama.as_ref().and_then(|s| s.available_models.clone()),
305 );
306
307 // OpenAI
308 let (openai, upgraded) = match value.openai.clone().map(|s| s.upgrade()) {
309 Some((content, upgraded)) => (Some(content), upgraded),
310 None => (None, false),
311 };
312
313 if upgraded {
314 settings.openai.needs_setting_migration = true;
315 }
316
317 merge(
318 &mut settings.openai.api_url,
319 openai.as_ref().and_then(|s| s.api_url.clone()),
320 );
321 if let Some(low_speed_timeout_in_seconds) =
322 openai.as_ref().and_then(|s| s.low_speed_timeout_in_seconds)
323 {
324 settings.openai.low_speed_timeout =
325 Some(Duration::from_secs(low_speed_timeout_in_seconds));
326 }
327 merge(
328 &mut settings.openai.available_models,
329 openai.as_ref().and_then(|s| s.available_models.clone()),
330 );
331
332 merge(
333 &mut settings.zed_dot_dev.available_models,
334 value
335 .zed_dot_dev
336 .as_ref()
337 .and_then(|s| s.available_models.clone()),
338 );
339 if let Some(low_speed_timeout_in_seconds) = value
340 .zed_dot_dev
341 .as_ref()
342 .and_then(|s| s.low_speed_timeout_in_seconds)
343 {
344 settings.zed_dot_dev.low_speed_timeout =
345 Some(Duration::from_secs(low_speed_timeout_in_seconds));
346 }
347
348 merge(
349 &mut settings.google.api_url,
350 value.google.as_ref().and_then(|s| s.api_url.clone()),
351 );
352 if let Some(low_speed_timeout_in_seconds) = value
353 .google
354 .as_ref()
355 .and_then(|s| s.low_speed_timeout_in_seconds)
356 {
357 settings.google.low_speed_timeout =
358 Some(Duration::from_secs(low_speed_timeout_in_seconds));
359 }
360 merge(
361 &mut settings.google.available_models,
362 value
363 .google
364 .as_ref()
365 .and_then(|s| s.available_models.clone()),
366 );
367
368 if let Some(low_speed_timeout) = value
369 .copilot_chat
370 .as_ref()
371 .and_then(|s| s.low_speed_timeout_in_seconds)
372 {
373 settings.copilot_chat.low_speed_timeout =
374 Some(Duration::from_secs(low_speed_timeout));
375 }
376 }
377
378 Ok(settings)
379 }
380}