1use std::{sync::Arc, time::Duration};
2
3use anyhow::Result;
4use gpui::AppContext;
5use project::Fs;
6use schemars::JsonSchema;
7use serde::{Deserialize, Serialize};
8use settings::{update_settings_file, Settings, SettingsSources};
9
10use crate::provider::{
11 self,
12 anthropic::AnthropicSettings,
13 cloud::{self, ZedDotDevSettings},
14 copilot_chat::CopilotChatSettings,
15 google::GoogleSettings,
16 ollama::OllamaSettings,
17 open_ai::OpenAiSettings,
18};
19
20/// Initializes the language model settings.
21pub fn init(fs: Arc<dyn Fs>, cx: &mut AppContext) {
22 AllLanguageModelSettings::register(cx);
23
24 if AllLanguageModelSettings::get_global(cx)
25 .openai
26 .needs_setting_migration
27 {
28 update_settings_file::<AllLanguageModelSettings>(fs.clone(), cx, move |setting, _| {
29 if let Some(settings) = setting.openai.clone() {
30 let (newest_version, _) = settings.upgrade();
31 setting.openai = Some(OpenAiSettingsContent::Versioned(
32 VersionedOpenAiSettingsContent::V1(newest_version),
33 ));
34 }
35 });
36 }
37
38 if AllLanguageModelSettings::get_global(cx)
39 .anthropic
40 .needs_setting_migration
41 {
42 update_settings_file::<AllLanguageModelSettings>(fs, cx, move |setting, _| {
43 if let Some(settings) = setting.anthropic.clone() {
44 let (newest_version, _) = settings.upgrade();
45 setting.anthropic = Some(AnthropicSettingsContent::Versioned(
46 VersionedAnthropicSettingsContent::V1(newest_version),
47 ));
48 }
49 });
50 }
51}
52
53#[derive(Default)]
54pub struct AllLanguageModelSettings {
55 pub anthropic: AnthropicSettings,
56 pub ollama: OllamaSettings,
57 pub openai: OpenAiSettings,
58 pub zed_dot_dev: ZedDotDevSettings,
59 pub google: GoogleSettings,
60 pub copilot_chat: CopilotChatSettings,
61}
62
63#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
64pub struct AllLanguageModelSettingsContent {
65 pub anthropic: Option<AnthropicSettingsContent>,
66 pub ollama: Option<OllamaSettingsContent>,
67 pub openai: Option<OpenAiSettingsContent>,
68 #[serde(rename = "zed.dev")]
69 pub zed_dot_dev: Option<ZedDotDevSettingsContent>,
70 pub google: Option<GoogleSettingsContent>,
71 pub copilot_chat: Option<CopilotChatSettingsContent>,
72}
73
74#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
75#[serde(untagged)]
76pub enum AnthropicSettingsContent {
77 Legacy(LegacyAnthropicSettingsContent),
78 Versioned(VersionedAnthropicSettingsContent),
79}
80
81impl AnthropicSettingsContent {
82 pub fn upgrade(self) -> (AnthropicSettingsContentV1, bool) {
83 match self {
84 AnthropicSettingsContent::Legacy(content) => (
85 AnthropicSettingsContentV1 {
86 api_url: content.api_url,
87 low_speed_timeout_in_seconds: content.low_speed_timeout_in_seconds,
88 available_models: content.available_models.map(|models| {
89 models
90 .into_iter()
91 .filter_map(|model| match model {
92 anthropic::Model::Custom {
93 name,
94 max_tokens,
95 tool_override,
96 } => Some(provider::anthropic::AvailableModel {
97 name,
98 max_tokens,
99 tool_override,
100 }),
101 _ => None,
102 })
103 .collect()
104 }),
105 },
106 true,
107 ),
108 AnthropicSettingsContent::Versioned(content) => match content {
109 VersionedAnthropicSettingsContent::V1(content) => (content, false),
110 },
111 }
112 }
113}
114
115#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
116pub struct LegacyAnthropicSettingsContent {
117 pub api_url: Option<String>,
118 pub low_speed_timeout_in_seconds: Option<u64>,
119 pub available_models: Option<Vec<anthropic::Model>>,
120}
121
122#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
123#[serde(tag = "version")]
124pub enum VersionedAnthropicSettingsContent {
125 #[serde(rename = "1")]
126 V1(AnthropicSettingsContentV1),
127}
128
129#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
130pub struct AnthropicSettingsContentV1 {
131 pub api_url: Option<String>,
132 pub low_speed_timeout_in_seconds: Option<u64>,
133 pub available_models: Option<Vec<provider::anthropic::AvailableModel>>,
134}
135
136#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
137pub struct OllamaSettingsContent {
138 pub api_url: Option<String>,
139 pub low_speed_timeout_in_seconds: Option<u64>,
140}
141
142#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
143#[serde(untagged)]
144pub enum OpenAiSettingsContent {
145 Legacy(LegacyOpenAiSettingsContent),
146 Versioned(VersionedOpenAiSettingsContent),
147}
148
149impl OpenAiSettingsContent {
150 pub fn upgrade(self) -> (OpenAiSettingsContentV1, bool) {
151 match self {
152 OpenAiSettingsContent::Legacy(content) => (
153 OpenAiSettingsContentV1 {
154 api_url: content.api_url,
155 low_speed_timeout_in_seconds: content.low_speed_timeout_in_seconds,
156 available_models: content.available_models.map(|models| {
157 models
158 .into_iter()
159 .filter_map(|model| match model {
160 open_ai::Model::Custom { name, max_tokens } => {
161 Some(provider::open_ai::AvailableModel { name, max_tokens })
162 }
163 _ => None,
164 })
165 .collect()
166 }),
167 },
168 true,
169 ),
170 OpenAiSettingsContent::Versioned(content) => match content {
171 VersionedOpenAiSettingsContent::V1(content) => (content, false),
172 },
173 }
174 }
175}
176
177#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
178pub struct LegacyOpenAiSettingsContent {
179 pub api_url: Option<String>,
180 pub low_speed_timeout_in_seconds: Option<u64>,
181 pub available_models: Option<Vec<open_ai::Model>>,
182}
183
184#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
185#[serde(tag = "version")]
186pub enum VersionedOpenAiSettingsContent {
187 #[serde(rename = "1")]
188 V1(OpenAiSettingsContentV1),
189}
190
191#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
192pub struct OpenAiSettingsContentV1 {
193 pub api_url: Option<String>,
194 pub low_speed_timeout_in_seconds: Option<u64>,
195 pub available_models: Option<Vec<provider::open_ai::AvailableModel>>,
196}
197
198#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
199pub struct GoogleSettingsContent {
200 pub api_url: Option<String>,
201 pub low_speed_timeout_in_seconds: Option<u64>,
202 pub available_models: Option<Vec<provider::google::AvailableModel>>,
203}
204
205#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
206pub struct ZedDotDevSettingsContent {
207 available_models: Option<Vec<cloud::AvailableModel>>,
208}
209
210#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
211pub struct CopilotChatSettingsContent {
212 low_speed_timeout_in_seconds: Option<u64>,
213}
214
215impl settings::Settings for AllLanguageModelSettings {
216 const KEY: Option<&'static str> = Some("language_models");
217
218 const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]);
219
220 type FileContent = AllLanguageModelSettingsContent;
221
222 fn load(sources: SettingsSources<Self::FileContent>, _: &mut AppContext) -> Result<Self> {
223 fn merge<T>(target: &mut T, value: Option<T>) {
224 if let Some(value) = value {
225 *target = value;
226 }
227 }
228
229 let mut settings = AllLanguageModelSettings::default();
230
231 for value in sources.defaults_and_customizations() {
232 // Anthropic
233 let (anthropic, upgraded) = match value.anthropic.clone().map(|s| s.upgrade()) {
234 Some((content, upgraded)) => (Some(content), upgraded),
235 None => (None, false),
236 };
237
238 if upgraded {
239 settings.anthropic.needs_setting_migration = true;
240 }
241
242 merge(
243 &mut settings.anthropic.api_url,
244 anthropic.as_ref().and_then(|s| s.api_url.clone()),
245 );
246 if let Some(low_speed_timeout_in_seconds) = anthropic
247 .as_ref()
248 .and_then(|s| s.low_speed_timeout_in_seconds)
249 {
250 settings.anthropic.low_speed_timeout =
251 Some(Duration::from_secs(low_speed_timeout_in_seconds));
252 }
253 merge(
254 &mut settings.anthropic.available_models,
255 anthropic.as_ref().and_then(|s| s.available_models.clone()),
256 );
257
258 merge(
259 &mut settings.ollama.api_url,
260 value.ollama.as_ref().and_then(|s| s.api_url.clone()),
261 );
262 if let Some(low_speed_timeout_in_seconds) = value
263 .ollama
264 .as_ref()
265 .and_then(|s| s.low_speed_timeout_in_seconds)
266 {
267 settings.ollama.low_speed_timeout =
268 Some(Duration::from_secs(low_speed_timeout_in_seconds));
269 }
270
271 // OpenAI
272 let (openai, upgraded) = match value.openai.clone().map(|s| s.upgrade()) {
273 Some((content, upgraded)) => (Some(content), upgraded),
274 None => (None, false),
275 };
276
277 if upgraded {
278 settings.openai.needs_setting_migration = true;
279 }
280
281 merge(
282 &mut settings.openai.api_url,
283 openai.as_ref().and_then(|s| s.api_url.clone()),
284 );
285 if let Some(low_speed_timeout_in_seconds) =
286 openai.as_ref().and_then(|s| s.low_speed_timeout_in_seconds)
287 {
288 settings.openai.low_speed_timeout =
289 Some(Duration::from_secs(low_speed_timeout_in_seconds));
290 }
291 merge(
292 &mut settings.openai.available_models,
293 openai.as_ref().and_then(|s| s.available_models.clone()),
294 );
295
296 merge(
297 &mut settings.zed_dot_dev.available_models,
298 value
299 .zed_dot_dev
300 .as_ref()
301 .and_then(|s| s.available_models.clone()),
302 );
303
304 merge(
305 &mut settings.google.api_url,
306 value.google.as_ref().and_then(|s| s.api_url.clone()),
307 );
308 if let Some(low_speed_timeout_in_seconds) = value
309 .google
310 .as_ref()
311 .and_then(|s| s.low_speed_timeout_in_seconds)
312 {
313 settings.google.low_speed_timeout =
314 Some(Duration::from_secs(low_speed_timeout_in_seconds));
315 }
316 merge(
317 &mut settings.google.available_models,
318 value
319 .google
320 .as_ref()
321 .and_then(|s| s.available_models.clone()),
322 );
323
324 if let Some(low_speed_timeout) = value
325 .copilot_chat
326 .as_ref()
327 .and_then(|s| s.low_speed_timeout_in_seconds)
328 {
329 settings.copilot_chat.low_speed_timeout =
330 Some(Duration::from_secs(low_speed_timeout));
331 }
332 }
333
334 Ok(settings)
335 }
336}