1use std::{sync::Arc, time::Duration};
2
3use anyhow::Result;
4use gpui::AppContext;
5use project::Fs;
6use schemars::JsonSchema;
7use serde::{Deserialize, Serialize};
8use settings::{update_settings_file, Settings, SettingsSources};
9
10use crate::provider::{
11 self,
12 anthropic::AnthropicSettings,
13 cloud::{self, ZedDotDevSettings},
14 copilot_chat::CopilotChatSettings,
15 google::GoogleSettings,
16 ollama::OllamaSettings,
17 open_ai::OpenAiSettings,
18};
19
20/// Initializes the language model settings.
21pub fn init(fs: Arc<dyn Fs>, cx: &mut AppContext) {
22 AllLanguageModelSettings::register(cx);
23
24 if AllLanguageModelSettings::get_global(cx)
25 .openai
26 .needs_setting_migration
27 {
28 update_settings_file::<AllLanguageModelSettings>(fs.clone(), cx, move |setting, _| {
29 if let Some(settings) = setting.openai.clone() {
30 let (newest_version, _) = settings.upgrade();
31 setting.openai = Some(OpenAiSettingsContent::Versioned(
32 VersionedOpenAiSettingsContent::V1(newest_version),
33 ));
34 }
35 });
36 }
37
38 if AllLanguageModelSettings::get_global(cx)
39 .anthropic
40 .needs_setting_migration
41 {
42 update_settings_file::<AllLanguageModelSettings>(fs, cx, move |setting, _| {
43 if let Some(settings) = setting.anthropic.clone() {
44 let (newest_version, _) = settings.upgrade();
45 setting.anthropic = Some(AnthropicSettingsContent::Versioned(
46 VersionedAnthropicSettingsContent::V1(newest_version),
47 ));
48 }
49 });
50 }
51}
52
53#[derive(Default)]
54pub struct AllLanguageModelSettings {
55 pub anthropic: AnthropicSettings,
56 pub ollama: OllamaSettings,
57 pub openai: OpenAiSettings,
58 pub zed_dot_dev: ZedDotDevSettings,
59 pub google: GoogleSettings,
60 pub copilot_chat: CopilotChatSettings,
61}
62
63#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
64pub struct AllLanguageModelSettingsContent {
65 pub anthropic: Option<AnthropicSettingsContent>,
66 pub ollama: Option<OllamaSettingsContent>,
67 pub openai: Option<OpenAiSettingsContent>,
68 #[serde(rename = "zed.dev")]
69 pub zed_dot_dev: Option<ZedDotDevSettingsContent>,
70 pub google: Option<GoogleSettingsContent>,
71 pub copilot_chat: Option<CopilotChatSettingsContent>,
72}
73
74#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
75#[serde(untagged)]
76pub enum AnthropicSettingsContent {
77 Legacy(LegacyAnthropicSettingsContent),
78 Versioned(VersionedAnthropicSettingsContent),
79}
80
81impl AnthropicSettingsContent {
82 pub fn upgrade(self) -> (AnthropicSettingsContentV1, bool) {
83 match self {
84 AnthropicSettingsContent::Legacy(content) => (
85 AnthropicSettingsContentV1 {
86 api_url: content.api_url,
87 low_speed_timeout_in_seconds: content.low_speed_timeout_in_seconds,
88 available_models: content.available_models.map(|models| {
89 models
90 .into_iter()
91 .filter_map(|model| match model {
92 anthropic::Model::Custom { name, max_tokens } => {
93 Some(provider::anthropic::AvailableModel { name, max_tokens })
94 }
95 _ => None,
96 })
97 .collect()
98 }),
99 },
100 true,
101 ),
102 AnthropicSettingsContent::Versioned(content) => match content {
103 VersionedAnthropicSettingsContent::V1(content) => (content, false),
104 },
105 }
106 }
107}
108
109#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
110pub struct LegacyAnthropicSettingsContent {
111 pub api_url: Option<String>,
112 pub low_speed_timeout_in_seconds: Option<u64>,
113 pub available_models: Option<Vec<anthropic::Model>>,
114}
115
116#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
117#[serde(tag = "version")]
118pub enum VersionedAnthropicSettingsContent {
119 #[serde(rename = "1")]
120 V1(AnthropicSettingsContentV1),
121}
122
123#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
124pub struct AnthropicSettingsContentV1 {
125 pub api_url: Option<String>,
126 pub low_speed_timeout_in_seconds: Option<u64>,
127 pub available_models: Option<Vec<provider::anthropic::AvailableModel>>,
128}
129
130#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
131pub struct OllamaSettingsContent {
132 pub api_url: Option<String>,
133 pub low_speed_timeout_in_seconds: Option<u64>,
134}
135
136#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
137#[serde(untagged)]
138pub enum OpenAiSettingsContent {
139 Legacy(LegacyOpenAiSettingsContent),
140 Versioned(VersionedOpenAiSettingsContent),
141}
142
143impl OpenAiSettingsContent {
144 pub fn upgrade(self) -> (OpenAiSettingsContentV1, bool) {
145 match self {
146 OpenAiSettingsContent::Legacy(content) => (
147 OpenAiSettingsContentV1 {
148 api_url: content.api_url,
149 low_speed_timeout_in_seconds: content.low_speed_timeout_in_seconds,
150 available_models: content.available_models.map(|models| {
151 models
152 .into_iter()
153 .filter_map(|model| match model {
154 open_ai::Model::Custom { name, max_tokens } => {
155 Some(provider::open_ai::AvailableModel { name, max_tokens })
156 }
157 _ => None,
158 })
159 .collect()
160 }),
161 },
162 true,
163 ),
164 OpenAiSettingsContent::Versioned(content) => match content {
165 VersionedOpenAiSettingsContent::V1(content) => (content, false),
166 },
167 }
168 }
169}
170
171#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
172pub struct LegacyOpenAiSettingsContent {
173 pub api_url: Option<String>,
174 pub low_speed_timeout_in_seconds: Option<u64>,
175 pub available_models: Option<Vec<open_ai::Model>>,
176}
177
178#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
179#[serde(tag = "version")]
180pub enum VersionedOpenAiSettingsContent {
181 #[serde(rename = "1")]
182 V1(OpenAiSettingsContentV1),
183}
184
185#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
186pub struct OpenAiSettingsContentV1 {
187 pub api_url: Option<String>,
188 pub low_speed_timeout_in_seconds: Option<u64>,
189 pub available_models: Option<Vec<provider::open_ai::AvailableModel>>,
190}
191
192#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
193pub struct GoogleSettingsContent {
194 pub api_url: Option<String>,
195 pub low_speed_timeout_in_seconds: Option<u64>,
196 pub available_models: Option<Vec<provider::google::AvailableModel>>,
197}
198
199#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
200pub struct ZedDotDevSettingsContent {
201 available_models: Option<Vec<cloud::AvailableModel>>,
202}
203
204#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
205pub struct CopilotChatSettingsContent {
206 low_speed_timeout_in_seconds: Option<u64>,
207}
208
209impl settings::Settings for AllLanguageModelSettings {
210 const KEY: Option<&'static str> = Some("language_models");
211
212 const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]);
213
214 type FileContent = AllLanguageModelSettingsContent;
215
216 fn load(sources: SettingsSources<Self::FileContent>, _: &mut AppContext) -> Result<Self> {
217 fn merge<T>(target: &mut T, value: Option<T>) {
218 if let Some(value) = value {
219 *target = value;
220 }
221 }
222
223 let mut settings = AllLanguageModelSettings::default();
224
225 for value in sources.defaults_and_customizations() {
226 // Anthropic
227 let (anthropic, upgraded) = match value.anthropic.clone().map(|s| s.upgrade()) {
228 Some((content, upgraded)) => (Some(content), upgraded),
229 None => (None, false),
230 };
231
232 if upgraded {
233 settings.anthropic.needs_setting_migration = true;
234 }
235
236 merge(
237 &mut settings.anthropic.api_url,
238 anthropic.as_ref().and_then(|s| s.api_url.clone()),
239 );
240 if let Some(low_speed_timeout_in_seconds) = anthropic
241 .as_ref()
242 .and_then(|s| s.low_speed_timeout_in_seconds)
243 {
244 settings.anthropic.low_speed_timeout =
245 Some(Duration::from_secs(low_speed_timeout_in_seconds));
246 }
247 merge(
248 &mut settings.anthropic.available_models,
249 anthropic.as_ref().and_then(|s| s.available_models.clone()),
250 );
251
252 merge(
253 &mut settings.ollama.api_url,
254 value.ollama.as_ref().and_then(|s| s.api_url.clone()),
255 );
256 if let Some(low_speed_timeout_in_seconds) = value
257 .ollama
258 .as_ref()
259 .and_then(|s| s.low_speed_timeout_in_seconds)
260 {
261 settings.ollama.low_speed_timeout =
262 Some(Duration::from_secs(low_speed_timeout_in_seconds));
263 }
264
265 // OpenAI
266 let (openai, upgraded) = match value.openai.clone().map(|s| s.upgrade()) {
267 Some((content, upgraded)) => (Some(content), upgraded),
268 None => (None, false),
269 };
270
271 if upgraded {
272 settings.openai.needs_setting_migration = true;
273 }
274
275 merge(
276 &mut settings.openai.api_url,
277 openai.as_ref().and_then(|s| s.api_url.clone()),
278 );
279 if let Some(low_speed_timeout_in_seconds) =
280 openai.as_ref().and_then(|s| s.low_speed_timeout_in_seconds)
281 {
282 settings.openai.low_speed_timeout =
283 Some(Duration::from_secs(low_speed_timeout_in_seconds));
284 }
285 merge(
286 &mut settings.openai.available_models,
287 openai.as_ref().and_then(|s| s.available_models.clone()),
288 );
289
290 merge(
291 &mut settings.zed_dot_dev.available_models,
292 value
293 .zed_dot_dev
294 .as_ref()
295 .and_then(|s| s.available_models.clone()),
296 );
297
298 merge(
299 &mut settings.google.api_url,
300 value.google.as_ref().and_then(|s| s.api_url.clone()),
301 );
302 if let Some(low_speed_timeout_in_seconds) = value
303 .google
304 .as_ref()
305 .and_then(|s| s.low_speed_timeout_in_seconds)
306 {
307 settings.google.low_speed_timeout =
308 Some(Duration::from_secs(low_speed_timeout_in_seconds));
309 }
310 merge(
311 &mut settings.google.available_models,
312 value
313 .google
314 .as_ref()
315 .and_then(|s| s.available_models.clone()),
316 );
317
318 if let Some(low_speed_timeout) = value
319 .copilot_chat
320 .as_ref()
321 .and_then(|s| s.low_speed_timeout_in_seconds)
322 {
323 settings.copilot_chat.low_speed_timeout =
324 Some(Duration::from_secs(low_speed_timeout));
325 }
326 }
327
328 Ok(settings)
329 }
330}