1use std::{sync::Arc, time::Duration};
2
3use anyhow::Result;
4use gpui::AppContext;
5use project::Fs;
6use schemars::JsonSchema;
7use serde::{Deserialize, Serialize};
8use settings::{update_settings_file, Settings, SettingsSources};
9
10use crate::{
11 provider::{
12 self,
13 anthropic::AnthropicSettings,
14 cloud::{self, ZedDotDevSettings},
15 copilot_chat::CopilotChatSettings,
16 google::GoogleSettings,
17 ollama::OllamaSettings,
18 open_ai::OpenAiSettings,
19 },
20 LanguageModelCacheConfiguration,
21};
22
23/// Initializes the language model settings.
24pub fn init(fs: Arc<dyn Fs>, cx: &mut AppContext) {
25 AllLanguageModelSettings::register(cx);
26
27 if AllLanguageModelSettings::get_global(cx)
28 .openai
29 .needs_setting_migration
30 {
31 update_settings_file::<AllLanguageModelSettings>(fs.clone(), cx, move |setting, _| {
32 if let Some(settings) = setting.openai.clone() {
33 let (newest_version, _) = settings.upgrade();
34 setting.openai = Some(OpenAiSettingsContent::Versioned(
35 VersionedOpenAiSettingsContent::V1(newest_version),
36 ));
37 }
38 });
39 }
40
41 if AllLanguageModelSettings::get_global(cx)
42 .anthropic
43 .needs_setting_migration
44 {
45 update_settings_file::<AllLanguageModelSettings>(fs, cx, move |setting, _| {
46 if let Some(settings) = setting.anthropic.clone() {
47 let (newest_version, _) = settings.upgrade();
48 setting.anthropic = Some(AnthropicSettingsContent::Versioned(
49 VersionedAnthropicSettingsContent::V1(newest_version),
50 ));
51 }
52 });
53 }
54}
55
56#[derive(Default)]
57pub struct AllLanguageModelSettings {
58 pub anthropic: AnthropicSettings,
59 pub ollama: OllamaSettings,
60 pub openai: OpenAiSettings,
61 pub zed_dot_dev: ZedDotDevSettings,
62 pub google: GoogleSettings,
63 pub copilot_chat: CopilotChatSettings,
64}
65
66#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
67pub struct AllLanguageModelSettingsContent {
68 pub anthropic: Option<AnthropicSettingsContent>,
69 pub ollama: Option<OllamaSettingsContent>,
70 pub openai: Option<OpenAiSettingsContent>,
71 #[serde(rename = "zed.dev")]
72 pub zed_dot_dev: Option<ZedDotDevSettingsContent>,
73 pub google: Option<GoogleSettingsContent>,
74 pub copilot_chat: Option<CopilotChatSettingsContent>,
75}
76
77#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
78#[serde(untagged)]
79pub enum AnthropicSettingsContent {
80 Legacy(LegacyAnthropicSettingsContent),
81 Versioned(VersionedAnthropicSettingsContent),
82}
83
84impl AnthropicSettingsContent {
85 pub fn upgrade(self) -> (AnthropicSettingsContentV1, bool) {
86 match self {
87 AnthropicSettingsContent::Legacy(content) => (
88 AnthropicSettingsContentV1 {
89 api_url: content.api_url,
90 low_speed_timeout_in_seconds: content.low_speed_timeout_in_seconds,
91 available_models: content.available_models.map(|models| {
92 models
93 .into_iter()
94 .filter_map(|model| match model {
95 anthropic::Model::Custom {
96 name,
97 display_name,
98 max_tokens,
99 tool_override,
100 cache_configuration,
101 max_output_tokens,
102 } => Some(provider::anthropic::AvailableModel {
103 name,
104 display_name,
105 max_tokens,
106 tool_override,
107 cache_configuration: cache_configuration.as_ref().map(
108 |config| LanguageModelCacheConfiguration {
109 max_cache_anchors: config.max_cache_anchors,
110 should_speculate: config.should_speculate,
111 min_total_token: config.min_total_token,
112 },
113 ),
114 max_output_tokens,
115 }),
116 _ => None,
117 })
118 .collect()
119 }),
120 },
121 true,
122 ),
123 AnthropicSettingsContent::Versioned(content) => match content {
124 VersionedAnthropicSettingsContent::V1(content) => (content, false),
125 },
126 }
127 }
128}
129
130#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
131pub struct LegacyAnthropicSettingsContent {
132 pub api_url: Option<String>,
133 pub low_speed_timeout_in_seconds: Option<u64>,
134 pub available_models: Option<Vec<anthropic::Model>>,
135}
136
137#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
138#[serde(tag = "version")]
139pub enum VersionedAnthropicSettingsContent {
140 #[serde(rename = "1")]
141 V1(AnthropicSettingsContentV1),
142}
143
144#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
145pub struct AnthropicSettingsContentV1 {
146 pub api_url: Option<String>,
147 pub low_speed_timeout_in_seconds: Option<u64>,
148 pub available_models: Option<Vec<provider::anthropic::AvailableModel>>,
149}
150
151#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
152pub struct OllamaSettingsContent {
153 pub api_url: Option<String>,
154 pub low_speed_timeout_in_seconds: Option<u64>,
155 pub available_models: Option<Vec<provider::ollama::AvailableModel>>,
156}
157
158#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
159#[serde(untagged)]
160pub enum OpenAiSettingsContent {
161 Legacy(LegacyOpenAiSettingsContent),
162 Versioned(VersionedOpenAiSettingsContent),
163}
164
165impl OpenAiSettingsContent {
166 pub fn upgrade(self) -> (OpenAiSettingsContentV1, bool) {
167 match self {
168 OpenAiSettingsContent::Legacy(content) => (
169 OpenAiSettingsContentV1 {
170 api_url: content.api_url,
171 low_speed_timeout_in_seconds: content.low_speed_timeout_in_seconds,
172 available_models: content.available_models.map(|models| {
173 models
174 .into_iter()
175 .filter_map(|model| match model {
176 open_ai::Model::Custom {
177 name,
178 display_name,
179 max_tokens,
180 max_output_tokens,
181 max_completion_tokens,
182 } => Some(provider::open_ai::AvailableModel {
183 name,
184 max_tokens,
185 max_output_tokens,
186 display_name,
187 max_completion_tokens,
188 }),
189 _ => None,
190 })
191 .collect()
192 }),
193 },
194 true,
195 ),
196 OpenAiSettingsContent::Versioned(content) => match content {
197 VersionedOpenAiSettingsContent::V1(content) => (content, false),
198 },
199 }
200 }
201}
202
203#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
204pub struct LegacyOpenAiSettingsContent {
205 pub api_url: Option<String>,
206 pub low_speed_timeout_in_seconds: Option<u64>,
207 pub available_models: Option<Vec<open_ai::Model>>,
208}
209
210#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
211#[serde(tag = "version")]
212pub enum VersionedOpenAiSettingsContent {
213 #[serde(rename = "1")]
214 V1(OpenAiSettingsContentV1),
215}
216
217#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
218pub struct OpenAiSettingsContentV1 {
219 pub api_url: Option<String>,
220 pub low_speed_timeout_in_seconds: Option<u64>,
221 pub available_models: Option<Vec<provider::open_ai::AvailableModel>>,
222}
223
224#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
225pub struct GoogleSettingsContent {
226 pub api_url: Option<String>,
227 pub low_speed_timeout_in_seconds: Option<u64>,
228 pub available_models: Option<Vec<provider::google::AvailableModel>>,
229}
230
231#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
232pub struct ZedDotDevSettingsContent {
233 available_models: Option<Vec<cloud::AvailableModel>>,
234 pub low_speed_timeout_in_seconds: Option<u64>,
235}
236
237#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
238pub struct CopilotChatSettingsContent {
239 low_speed_timeout_in_seconds: Option<u64>,
240}
241
242impl settings::Settings for AllLanguageModelSettings {
243 const KEY: Option<&'static str> = Some("language_models");
244
245 const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]);
246
247 type FileContent = AllLanguageModelSettingsContent;
248
249 fn load(sources: SettingsSources<Self::FileContent>, _: &mut AppContext) -> Result<Self> {
250 fn merge<T>(target: &mut T, value: Option<T>) {
251 if let Some(value) = value {
252 *target = value;
253 }
254 }
255
256 let mut settings = AllLanguageModelSettings::default();
257
258 for value in sources.defaults_and_customizations() {
259 // Anthropic
260 let (anthropic, upgraded) = match value.anthropic.clone().map(|s| s.upgrade()) {
261 Some((content, upgraded)) => (Some(content), upgraded),
262 None => (None, false),
263 };
264
265 if upgraded {
266 settings.anthropic.needs_setting_migration = true;
267 }
268
269 merge(
270 &mut settings.anthropic.api_url,
271 anthropic.as_ref().and_then(|s| s.api_url.clone()),
272 );
273 if let Some(low_speed_timeout_in_seconds) = anthropic
274 .as_ref()
275 .and_then(|s| s.low_speed_timeout_in_seconds)
276 {
277 settings.anthropic.low_speed_timeout =
278 Some(Duration::from_secs(low_speed_timeout_in_seconds));
279 }
280 merge(
281 &mut settings.anthropic.available_models,
282 anthropic.as_ref().and_then(|s| s.available_models.clone()),
283 );
284
285 // Ollama
286 let ollama = value.ollama.clone();
287
288 merge(
289 &mut settings.ollama.api_url,
290 value.ollama.as_ref().and_then(|s| s.api_url.clone()),
291 );
292 if let Some(low_speed_timeout_in_seconds) = value
293 .ollama
294 .as_ref()
295 .and_then(|s| s.low_speed_timeout_in_seconds)
296 {
297 settings.ollama.low_speed_timeout =
298 Some(Duration::from_secs(low_speed_timeout_in_seconds));
299 }
300 merge(
301 &mut settings.ollama.available_models,
302 ollama.as_ref().and_then(|s| s.available_models.clone()),
303 );
304
305 // OpenAI
306 let (openai, upgraded) = match value.openai.clone().map(|s| s.upgrade()) {
307 Some((content, upgraded)) => (Some(content), upgraded),
308 None => (None, false),
309 };
310
311 if upgraded {
312 settings.openai.needs_setting_migration = true;
313 }
314
315 merge(
316 &mut settings.openai.api_url,
317 openai.as_ref().and_then(|s| s.api_url.clone()),
318 );
319 if let Some(low_speed_timeout_in_seconds) =
320 openai.as_ref().and_then(|s| s.low_speed_timeout_in_seconds)
321 {
322 settings.openai.low_speed_timeout =
323 Some(Duration::from_secs(low_speed_timeout_in_seconds));
324 }
325 merge(
326 &mut settings.openai.available_models,
327 openai.as_ref().and_then(|s| s.available_models.clone()),
328 );
329
330 merge(
331 &mut settings.zed_dot_dev.available_models,
332 value
333 .zed_dot_dev
334 .as_ref()
335 .and_then(|s| s.available_models.clone()),
336 );
337 if let Some(low_speed_timeout_in_seconds) = value
338 .zed_dot_dev
339 .as_ref()
340 .and_then(|s| s.low_speed_timeout_in_seconds)
341 {
342 settings.zed_dot_dev.low_speed_timeout =
343 Some(Duration::from_secs(low_speed_timeout_in_seconds));
344 }
345
346 merge(
347 &mut settings.google.api_url,
348 value.google.as_ref().and_then(|s| s.api_url.clone()),
349 );
350 if let Some(low_speed_timeout_in_seconds) = value
351 .google
352 .as_ref()
353 .and_then(|s| s.low_speed_timeout_in_seconds)
354 {
355 settings.google.low_speed_timeout =
356 Some(Duration::from_secs(low_speed_timeout_in_seconds));
357 }
358 merge(
359 &mut settings.google.available_models,
360 value
361 .google
362 .as_ref()
363 .and_then(|s| s.available_models.clone()),
364 );
365
366 if let Some(low_speed_timeout) = value
367 .copilot_chat
368 .as_ref()
369 .and_then(|s| s.low_speed_timeout_in_seconds)
370 {
371 settings.copilot_chat.low_speed_timeout =
372 Some(Duration::from_secs(low_speed_timeout));
373 }
374 }
375
376 Ok(settings)
377 }
378}