1use std::{sync::Arc, time::Duration};
2
3use anyhow::Result;
4use gpui::AppContext;
5use project::Fs;
6use schemars::JsonSchema;
7use serde::{Deserialize, Serialize};
8use settings::{update_settings_file, Settings, SettingsSources};
9
10use crate::{
11 provider::{
12 self,
13 anthropic::AnthropicSettings,
14 cloud::{self, ZedDotDevSettings},
15 copilot_chat::CopilotChatSettings,
16 google::GoogleSettings,
17 ollama::OllamaSettings,
18 open_ai::OpenAiSettings,
19 },
20 LanguageModelCacheConfiguration,
21};
22
23/// Initializes the language model settings.
24pub fn init(fs: Arc<dyn Fs>, cx: &mut AppContext) {
25 AllLanguageModelSettings::register(cx);
26
27 if AllLanguageModelSettings::get_global(cx)
28 .openai
29 .needs_setting_migration
30 {
31 update_settings_file::<AllLanguageModelSettings>(fs.clone(), cx, move |setting, _| {
32 if let Some(settings) = setting.openai.clone() {
33 let (newest_version, _) = settings.upgrade();
34 setting.openai = Some(OpenAiSettingsContent::Versioned(
35 VersionedOpenAiSettingsContent::V1(newest_version),
36 ));
37 }
38 });
39 }
40
41 if AllLanguageModelSettings::get_global(cx)
42 .anthropic
43 .needs_setting_migration
44 {
45 update_settings_file::<AllLanguageModelSettings>(fs, cx, move |setting, _| {
46 if let Some(settings) = setting.anthropic.clone() {
47 let (newest_version, _) = settings.upgrade();
48 setting.anthropic = Some(AnthropicSettingsContent::Versioned(
49 VersionedAnthropicSettingsContent::V1(newest_version),
50 ));
51 }
52 });
53 }
54}
55
56#[derive(Default)]
57pub struct AllLanguageModelSettings {
58 pub anthropic: AnthropicSettings,
59 pub ollama: OllamaSettings,
60 pub openai: OpenAiSettings,
61 pub zed_dot_dev: ZedDotDevSettings,
62 pub google: GoogleSettings,
63 pub copilot_chat: CopilotChatSettings,
64}
65
66#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
67pub struct AllLanguageModelSettingsContent {
68 pub anthropic: Option<AnthropicSettingsContent>,
69 pub ollama: Option<OllamaSettingsContent>,
70 pub openai: Option<OpenAiSettingsContent>,
71 #[serde(rename = "zed.dev")]
72 pub zed_dot_dev: Option<ZedDotDevSettingsContent>,
73 pub google: Option<GoogleSettingsContent>,
74 pub copilot_chat: Option<CopilotChatSettingsContent>,
75}
76
77#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
78#[serde(untagged)]
79pub enum AnthropicSettingsContent {
80 Legacy(LegacyAnthropicSettingsContent),
81 Versioned(VersionedAnthropicSettingsContent),
82}
83
84impl AnthropicSettingsContent {
85 pub fn upgrade(self) -> (AnthropicSettingsContentV1, bool) {
86 match self {
87 AnthropicSettingsContent::Legacy(content) => (
88 AnthropicSettingsContentV1 {
89 api_url: content.api_url,
90 low_speed_timeout_in_seconds: content.low_speed_timeout_in_seconds,
91 available_models: content.available_models.map(|models| {
92 models
93 .into_iter()
94 .filter_map(|model| match model {
95 anthropic::Model::Custom {
96 name,
97 display_name,
98 max_tokens,
99 tool_override,
100 cache_configuration,
101 max_output_tokens,
102 } => Some(provider::anthropic::AvailableModel {
103 name,
104 display_name,
105 max_tokens,
106 tool_override,
107 cache_configuration: cache_configuration.as_ref().map(
108 |config| LanguageModelCacheConfiguration {
109 max_cache_anchors: config.max_cache_anchors,
110 should_speculate: config.should_speculate,
111 min_total_token: config.min_total_token,
112 },
113 ),
114 max_output_tokens,
115 }),
116 _ => None,
117 })
118 .collect()
119 }),
120 },
121 true,
122 ),
123 AnthropicSettingsContent::Versioned(content) => match content {
124 VersionedAnthropicSettingsContent::V1(content) => (content, false),
125 },
126 }
127 }
128}
129
130#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
131pub struct LegacyAnthropicSettingsContent {
132 pub api_url: Option<String>,
133 pub low_speed_timeout_in_seconds: Option<u64>,
134 pub available_models: Option<Vec<anthropic::Model>>,
135}
136
137#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
138#[serde(tag = "version")]
139pub enum VersionedAnthropicSettingsContent {
140 #[serde(rename = "1")]
141 V1(AnthropicSettingsContentV1),
142}
143
144#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
145pub struct AnthropicSettingsContentV1 {
146 pub api_url: Option<String>,
147 pub low_speed_timeout_in_seconds: Option<u64>,
148 pub available_models: Option<Vec<provider::anthropic::AvailableModel>>,
149}
150
151#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
152pub struct OllamaSettingsContent {
153 pub api_url: Option<String>,
154 pub low_speed_timeout_in_seconds: Option<u64>,
155 pub available_models: Option<Vec<provider::ollama::AvailableModel>>,
156}
157
158#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
159#[serde(untagged)]
160pub enum OpenAiSettingsContent {
161 Legacy(LegacyOpenAiSettingsContent),
162 Versioned(VersionedOpenAiSettingsContent),
163}
164
165impl OpenAiSettingsContent {
166 pub fn upgrade(self) -> (OpenAiSettingsContentV1, bool) {
167 match self {
168 OpenAiSettingsContent::Legacy(content) => (
169 OpenAiSettingsContentV1 {
170 api_url: content.api_url,
171 low_speed_timeout_in_seconds: content.low_speed_timeout_in_seconds,
172 available_models: content.available_models.map(|models| {
173 models
174 .into_iter()
175 .filter_map(|model| match model {
176 open_ai::Model::Custom {
177 name,
178 max_tokens,
179 max_output_tokens,
180 } => Some(provider::open_ai::AvailableModel {
181 name,
182 max_tokens,
183 max_output_tokens,
184 }),
185 _ => None,
186 })
187 .collect()
188 }),
189 },
190 true,
191 ),
192 OpenAiSettingsContent::Versioned(content) => match content {
193 VersionedOpenAiSettingsContent::V1(content) => (content, false),
194 },
195 }
196 }
197}
198
199#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
200pub struct LegacyOpenAiSettingsContent {
201 pub api_url: Option<String>,
202 pub low_speed_timeout_in_seconds: Option<u64>,
203 pub available_models: Option<Vec<open_ai::Model>>,
204}
205
206#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
207#[serde(tag = "version")]
208pub enum VersionedOpenAiSettingsContent {
209 #[serde(rename = "1")]
210 V1(OpenAiSettingsContentV1),
211}
212
213#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
214pub struct OpenAiSettingsContentV1 {
215 pub api_url: Option<String>,
216 pub low_speed_timeout_in_seconds: Option<u64>,
217 pub available_models: Option<Vec<provider::open_ai::AvailableModel>>,
218}
219
220#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
221pub struct GoogleSettingsContent {
222 pub api_url: Option<String>,
223 pub low_speed_timeout_in_seconds: Option<u64>,
224 pub available_models: Option<Vec<provider::google::AvailableModel>>,
225}
226
227#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
228pub struct ZedDotDevSettingsContent {
229 available_models: Option<Vec<cloud::AvailableModel>>,
230}
231
232#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
233pub struct CopilotChatSettingsContent {
234 low_speed_timeout_in_seconds: Option<u64>,
235}
236
237impl settings::Settings for AllLanguageModelSettings {
238 const KEY: Option<&'static str> = Some("language_models");
239
240 const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]);
241
242 type FileContent = AllLanguageModelSettingsContent;
243
244 fn load(sources: SettingsSources<Self::FileContent>, _: &mut AppContext) -> Result<Self> {
245 fn merge<T>(target: &mut T, value: Option<T>) {
246 if let Some(value) = value {
247 *target = value;
248 }
249 }
250
251 let mut settings = AllLanguageModelSettings::default();
252
253 for value in sources.defaults_and_customizations() {
254 // Anthropic
255 let (anthropic, upgraded) = match value.anthropic.clone().map(|s| s.upgrade()) {
256 Some((content, upgraded)) => (Some(content), upgraded),
257 None => (None, false),
258 };
259
260 if upgraded {
261 settings.anthropic.needs_setting_migration = true;
262 }
263
264 merge(
265 &mut settings.anthropic.api_url,
266 anthropic.as_ref().and_then(|s| s.api_url.clone()),
267 );
268 if let Some(low_speed_timeout_in_seconds) = anthropic
269 .as_ref()
270 .and_then(|s| s.low_speed_timeout_in_seconds)
271 {
272 settings.anthropic.low_speed_timeout =
273 Some(Duration::from_secs(low_speed_timeout_in_seconds));
274 }
275 merge(
276 &mut settings.anthropic.available_models,
277 anthropic.as_ref().and_then(|s| s.available_models.clone()),
278 );
279
280 // Ollama
281 let ollama = value.ollama.clone();
282
283 merge(
284 &mut settings.ollama.api_url,
285 value.ollama.as_ref().and_then(|s| s.api_url.clone()),
286 );
287 if let Some(low_speed_timeout_in_seconds) = value
288 .ollama
289 .as_ref()
290 .and_then(|s| s.low_speed_timeout_in_seconds)
291 {
292 settings.ollama.low_speed_timeout =
293 Some(Duration::from_secs(low_speed_timeout_in_seconds));
294 }
295 merge(
296 &mut settings.ollama.available_models,
297 ollama.as_ref().and_then(|s| s.available_models.clone()),
298 );
299
300 // OpenAI
301 let (openai, upgraded) = match value.openai.clone().map(|s| s.upgrade()) {
302 Some((content, upgraded)) => (Some(content), upgraded),
303 None => (None, false),
304 };
305
306 if upgraded {
307 settings.openai.needs_setting_migration = true;
308 }
309
310 merge(
311 &mut settings.openai.api_url,
312 openai.as_ref().and_then(|s| s.api_url.clone()),
313 );
314 if let Some(low_speed_timeout_in_seconds) =
315 openai.as_ref().and_then(|s| s.low_speed_timeout_in_seconds)
316 {
317 settings.openai.low_speed_timeout =
318 Some(Duration::from_secs(low_speed_timeout_in_seconds));
319 }
320 merge(
321 &mut settings.openai.available_models,
322 openai.as_ref().and_then(|s| s.available_models.clone()),
323 );
324
325 merge(
326 &mut settings.zed_dot_dev.available_models,
327 value
328 .zed_dot_dev
329 .as_ref()
330 .and_then(|s| s.available_models.clone()),
331 );
332
333 merge(
334 &mut settings.google.api_url,
335 value.google.as_ref().and_then(|s| s.api_url.clone()),
336 );
337 if let Some(low_speed_timeout_in_seconds) = value
338 .google
339 .as_ref()
340 .and_then(|s| s.low_speed_timeout_in_seconds)
341 {
342 settings.google.low_speed_timeout =
343 Some(Duration::from_secs(low_speed_timeout_in_seconds));
344 }
345 merge(
346 &mut settings.google.available_models,
347 value
348 .google
349 .as_ref()
350 .and_then(|s| s.available_models.clone()),
351 );
352
353 if let Some(low_speed_timeout) = value
354 .copilot_chat
355 .as_ref()
356 .and_then(|s| s.low_speed_timeout_in_seconds)
357 {
358 settings.copilot_chat.low_speed_timeout =
359 Some(Duration::from_secs(low_speed_timeout));
360 }
361 }
362
363 Ok(settings)
364 }
365}