1use std::{sync::Arc, time::Duration};
2
3use anyhow::Result;
4use gpui::AppContext;
5use project::Fs;
6use schemars::JsonSchema;
7use serde::{Deserialize, Serialize};
8use settings::{update_settings_file, Settings, SettingsSources};
9
10use crate::{
11 provider::{
12 self,
13 anthropic::AnthropicSettings,
14 cloud::{self, ZedDotDevSettings},
15 copilot_chat::CopilotChatSettings,
16 google::GoogleSettings,
17 ollama::OllamaSettings,
18 open_ai::OpenAiSettings,
19 },
20 LanguageModelCacheConfiguration,
21};
22
23/// Initializes the language model settings.
24pub fn init(fs: Arc<dyn Fs>, cx: &mut AppContext) {
25 AllLanguageModelSettings::register(cx);
26
27 if AllLanguageModelSettings::get_global(cx)
28 .openai
29 .needs_setting_migration
30 {
31 update_settings_file::<AllLanguageModelSettings>(fs.clone(), cx, move |setting, _| {
32 if let Some(settings) = setting.openai.clone() {
33 let (newest_version, _) = settings.upgrade();
34 setting.openai = Some(OpenAiSettingsContent::Versioned(
35 VersionedOpenAiSettingsContent::V1(newest_version),
36 ));
37 }
38 });
39 }
40
41 if AllLanguageModelSettings::get_global(cx)
42 .anthropic
43 .needs_setting_migration
44 {
45 update_settings_file::<AllLanguageModelSettings>(fs, cx, move |setting, _| {
46 if let Some(settings) = setting.anthropic.clone() {
47 let (newest_version, _) = settings.upgrade();
48 setting.anthropic = Some(AnthropicSettingsContent::Versioned(
49 VersionedAnthropicSettingsContent::V1(newest_version),
50 ));
51 }
52 });
53 }
54}
55
56#[derive(Default)]
57pub struct AllLanguageModelSettings {
58 pub anthropic: AnthropicSettings,
59 pub ollama: OllamaSettings,
60 pub openai: OpenAiSettings,
61 pub zed_dot_dev: ZedDotDevSettings,
62 pub google: GoogleSettings,
63 pub copilot_chat: CopilotChatSettings,
64}
65
66#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
67pub struct AllLanguageModelSettingsContent {
68 pub anthropic: Option<AnthropicSettingsContent>,
69 pub ollama: Option<OllamaSettingsContent>,
70 pub openai: Option<OpenAiSettingsContent>,
71 #[serde(rename = "zed.dev")]
72 pub zed_dot_dev: Option<ZedDotDevSettingsContent>,
73 pub google: Option<GoogleSettingsContent>,
74 pub copilot_chat: Option<CopilotChatSettingsContent>,
75}
76
77#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
78#[serde(untagged)]
79pub enum AnthropicSettingsContent {
80 Legacy(LegacyAnthropicSettingsContent),
81 Versioned(VersionedAnthropicSettingsContent),
82}
83
84impl AnthropicSettingsContent {
85 pub fn upgrade(self) -> (AnthropicSettingsContentV1, bool) {
86 match self {
87 AnthropicSettingsContent::Legacy(content) => (
88 AnthropicSettingsContentV1 {
89 api_url: content.api_url,
90 low_speed_timeout_in_seconds: content.low_speed_timeout_in_seconds,
91 available_models: content.available_models.map(|models| {
92 models
93 .into_iter()
94 .filter_map(|model| match model {
95 anthropic::Model::Custom {
96 name,
97 display_name,
98 max_tokens,
99 tool_override,
100 cache_configuration,
101 max_output_tokens,
102 } => Some(provider::anthropic::AvailableModel {
103 name,
104 display_name,
105 max_tokens,
106 tool_override,
107 cache_configuration: cache_configuration.as_ref().map(
108 |config| LanguageModelCacheConfiguration {
109 max_cache_anchors: config.max_cache_anchors,
110 should_speculate: config.should_speculate,
111 min_total_token: config.min_total_token,
112 },
113 ),
114 max_output_tokens,
115 }),
116 _ => None,
117 })
118 .collect()
119 }),
120 },
121 true,
122 ),
123 AnthropicSettingsContent::Versioned(content) => match content {
124 VersionedAnthropicSettingsContent::V1(content) => (content, false),
125 },
126 }
127 }
128}
129
130#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
131pub struct LegacyAnthropicSettingsContent {
132 pub api_url: Option<String>,
133 pub low_speed_timeout_in_seconds: Option<u64>,
134 pub available_models: Option<Vec<anthropic::Model>>,
135}
136
137#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
138#[serde(tag = "version")]
139pub enum VersionedAnthropicSettingsContent {
140 #[serde(rename = "1")]
141 V1(AnthropicSettingsContentV1),
142}
143
144#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
145pub struct AnthropicSettingsContentV1 {
146 pub api_url: Option<String>,
147 pub low_speed_timeout_in_seconds: Option<u64>,
148 pub available_models: Option<Vec<provider::anthropic::AvailableModel>>,
149}
150
151#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
152pub struct OllamaSettingsContent {
153 pub api_url: Option<String>,
154 pub low_speed_timeout_in_seconds: Option<u64>,
155 pub available_models: Option<Vec<provider::ollama::AvailableModel>>,
156}
157
158#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
159#[serde(untagged)]
160pub enum OpenAiSettingsContent {
161 Legacy(LegacyOpenAiSettingsContent),
162 Versioned(VersionedOpenAiSettingsContent),
163}
164
165impl OpenAiSettingsContent {
166 pub fn upgrade(self) -> (OpenAiSettingsContentV1, bool) {
167 match self {
168 OpenAiSettingsContent::Legacy(content) => (
169 OpenAiSettingsContentV1 {
170 api_url: content.api_url,
171 low_speed_timeout_in_seconds: content.low_speed_timeout_in_seconds,
172 available_models: content.available_models.map(|models| {
173 models
174 .into_iter()
175 .filter_map(|model| match model {
176 open_ai::Model::Custom {
177 name,
178 display_name,
179 max_tokens,
180 max_output_tokens,
181 max_completion_tokens,
182 } => Some(provider::open_ai::AvailableModel {
183 name,
184 max_tokens,
185 max_output_tokens,
186 display_name,
187 max_completion_tokens,
188 }),
189 _ => None,
190 })
191 .collect()
192 }),
193 },
194 true,
195 ),
196 OpenAiSettingsContent::Versioned(content) => match content {
197 VersionedOpenAiSettingsContent::V1(content) => (content, false),
198 },
199 }
200 }
201}
202
203#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
204pub struct LegacyOpenAiSettingsContent {
205 pub api_url: Option<String>,
206 pub low_speed_timeout_in_seconds: Option<u64>,
207 pub available_models: Option<Vec<open_ai::Model>>,
208}
209
210#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
211#[serde(tag = "version")]
212pub enum VersionedOpenAiSettingsContent {
213 #[serde(rename = "1")]
214 V1(OpenAiSettingsContentV1),
215}
216
217#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
218pub struct OpenAiSettingsContentV1 {
219 pub api_url: Option<String>,
220 pub low_speed_timeout_in_seconds: Option<u64>,
221 pub available_models: Option<Vec<provider::open_ai::AvailableModel>>,
222}
223
224#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
225pub struct GoogleSettingsContent {
226 pub api_url: Option<String>,
227 pub low_speed_timeout_in_seconds: Option<u64>,
228 pub available_models: Option<Vec<provider::google::AvailableModel>>,
229}
230
231#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
232pub struct ZedDotDevSettingsContent {
233 available_models: Option<Vec<cloud::AvailableModel>>,
234}
235
236#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
237pub struct CopilotChatSettingsContent {
238 low_speed_timeout_in_seconds: Option<u64>,
239}
240
241impl settings::Settings for AllLanguageModelSettings {
242 const KEY: Option<&'static str> = Some("language_models");
243
244 const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]);
245
246 type FileContent = AllLanguageModelSettingsContent;
247
248 fn load(sources: SettingsSources<Self::FileContent>, _: &mut AppContext) -> Result<Self> {
249 fn merge<T>(target: &mut T, value: Option<T>) {
250 if let Some(value) = value {
251 *target = value;
252 }
253 }
254
255 let mut settings = AllLanguageModelSettings::default();
256
257 for value in sources.defaults_and_customizations() {
258 // Anthropic
259 let (anthropic, upgraded) = match value.anthropic.clone().map(|s| s.upgrade()) {
260 Some((content, upgraded)) => (Some(content), upgraded),
261 None => (None, false),
262 };
263
264 if upgraded {
265 settings.anthropic.needs_setting_migration = true;
266 }
267
268 merge(
269 &mut settings.anthropic.api_url,
270 anthropic.as_ref().and_then(|s| s.api_url.clone()),
271 );
272 if let Some(low_speed_timeout_in_seconds) = anthropic
273 .as_ref()
274 .and_then(|s| s.low_speed_timeout_in_seconds)
275 {
276 settings.anthropic.low_speed_timeout =
277 Some(Duration::from_secs(low_speed_timeout_in_seconds));
278 }
279 merge(
280 &mut settings.anthropic.available_models,
281 anthropic.as_ref().and_then(|s| s.available_models.clone()),
282 );
283
284 // Ollama
285 let ollama = value.ollama.clone();
286
287 merge(
288 &mut settings.ollama.api_url,
289 value.ollama.as_ref().and_then(|s| s.api_url.clone()),
290 );
291 if let Some(low_speed_timeout_in_seconds) = value
292 .ollama
293 .as_ref()
294 .and_then(|s| s.low_speed_timeout_in_seconds)
295 {
296 settings.ollama.low_speed_timeout =
297 Some(Duration::from_secs(low_speed_timeout_in_seconds));
298 }
299 merge(
300 &mut settings.ollama.available_models,
301 ollama.as_ref().and_then(|s| s.available_models.clone()),
302 );
303
304 // OpenAI
305 let (openai, upgraded) = match value.openai.clone().map(|s| s.upgrade()) {
306 Some((content, upgraded)) => (Some(content), upgraded),
307 None => (None, false),
308 };
309
310 if upgraded {
311 settings.openai.needs_setting_migration = true;
312 }
313
314 merge(
315 &mut settings.openai.api_url,
316 openai.as_ref().and_then(|s| s.api_url.clone()),
317 );
318 if let Some(low_speed_timeout_in_seconds) =
319 openai.as_ref().and_then(|s| s.low_speed_timeout_in_seconds)
320 {
321 settings.openai.low_speed_timeout =
322 Some(Duration::from_secs(low_speed_timeout_in_seconds));
323 }
324 merge(
325 &mut settings.openai.available_models,
326 openai.as_ref().and_then(|s| s.available_models.clone()),
327 );
328
329 merge(
330 &mut settings.zed_dot_dev.available_models,
331 value
332 .zed_dot_dev
333 .as_ref()
334 .and_then(|s| s.available_models.clone()),
335 );
336
337 merge(
338 &mut settings.google.api_url,
339 value.google.as_ref().and_then(|s| s.api_url.clone()),
340 );
341 if let Some(low_speed_timeout_in_seconds) = value
342 .google
343 .as_ref()
344 .and_then(|s| s.low_speed_timeout_in_seconds)
345 {
346 settings.google.low_speed_timeout =
347 Some(Duration::from_secs(low_speed_timeout_in_seconds));
348 }
349 merge(
350 &mut settings.google.available_models,
351 value
352 .google
353 .as_ref()
354 .and_then(|s| s.available_models.clone()),
355 );
356
357 if let Some(low_speed_timeout) = value
358 .copilot_chat
359 .as_ref()
360 .and_then(|s| s.low_speed_timeout_in_seconds)
361 {
362 settings.copilot_chat.low_speed_timeout =
363 Some(Duration::from_secs(low_speed_timeout));
364 }
365 }
366
367 Ok(settings)
368 }
369}