1use std::sync::Arc;
2
3use anyhow::Result;
4use gpui::AppContext;
5use language_model::LanguageModelCacheConfiguration;
6use project::Fs;
7use schemars::JsonSchema;
8use serde::{Deserialize, Serialize};
9use settings::{update_settings_file, Settings, SettingsSources};
10
11use crate::provider::{
12 self,
13 anthropic::AnthropicSettings,
14 cloud::{self, ZedDotDevSettings},
15 copilot_chat::CopilotChatSettings,
16 google::GoogleSettings,
17 lmstudio::LmStudioSettings,
18 ollama::OllamaSettings,
19 open_ai::OpenAiSettings,
20};
21
22/// Initializes the language model settings.
23pub fn init(fs: Arc<dyn Fs>, cx: &mut AppContext) {
24 AllLanguageModelSettings::register(cx);
25
26 if AllLanguageModelSettings::get_global(cx)
27 .openai
28 .needs_setting_migration
29 {
30 update_settings_file::<AllLanguageModelSettings>(fs.clone(), cx, move |setting, _| {
31 if let Some(settings) = setting.openai.clone() {
32 let (newest_version, _) = settings.upgrade();
33 setting.openai = Some(OpenAiSettingsContent::Versioned(
34 VersionedOpenAiSettingsContent::V1(newest_version),
35 ));
36 }
37 });
38 }
39
40 if AllLanguageModelSettings::get_global(cx)
41 .anthropic
42 .needs_setting_migration
43 {
44 update_settings_file::<AllLanguageModelSettings>(fs, cx, move |setting, _| {
45 if let Some(settings) = setting.anthropic.clone() {
46 let (newest_version, _) = settings.upgrade();
47 setting.anthropic = Some(AnthropicSettingsContent::Versioned(
48 VersionedAnthropicSettingsContent::V1(newest_version),
49 ));
50 }
51 });
52 }
53}
54
55#[derive(Default)]
56pub struct AllLanguageModelSettings {
57 pub anthropic: AnthropicSettings,
58 pub ollama: OllamaSettings,
59 pub openai: OpenAiSettings,
60 pub zed_dot_dev: ZedDotDevSettings,
61 pub google: GoogleSettings,
62 pub copilot_chat: CopilotChatSettings,
63 pub lmstudio: LmStudioSettings,
64}
65
66#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
67pub struct AllLanguageModelSettingsContent {
68 pub anthropic: Option<AnthropicSettingsContent>,
69 pub ollama: Option<OllamaSettingsContent>,
70 pub lmstudio: Option<LmStudioSettingsContent>,
71 pub openai: Option<OpenAiSettingsContent>,
72 #[serde(rename = "zed.dev")]
73 pub zed_dot_dev: Option<ZedDotDevSettingsContent>,
74 pub google: Option<GoogleSettingsContent>,
75 pub copilot_chat: Option<CopilotChatSettingsContent>,
76}
77
78#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
79#[serde(untagged)]
80pub enum AnthropicSettingsContent {
81 Legacy(LegacyAnthropicSettingsContent),
82 Versioned(VersionedAnthropicSettingsContent),
83}
84
85impl AnthropicSettingsContent {
86 pub fn upgrade(self) -> (AnthropicSettingsContentV1, bool) {
87 match self {
88 AnthropicSettingsContent::Legacy(content) => (
89 AnthropicSettingsContentV1 {
90 api_url: content.api_url,
91 available_models: content.available_models.map(|models| {
92 models
93 .into_iter()
94 .filter_map(|model| match model {
95 anthropic::Model::Custom {
96 name,
97 display_name,
98 max_tokens,
99 tool_override,
100 cache_configuration,
101 max_output_tokens,
102 default_temperature,
103 extra_beta_headers,
104 } => Some(provider::anthropic::AvailableModel {
105 name,
106 display_name,
107 max_tokens,
108 tool_override,
109 cache_configuration: cache_configuration.as_ref().map(
110 |config| LanguageModelCacheConfiguration {
111 max_cache_anchors: config.max_cache_anchors,
112 should_speculate: config.should_speculate,
113 min_total_token: config.min_total_token,
114 },
115 ),
116 max_output_tokens,
117 default_temperature,
118 extra_beta_headers,
119 }),
120 _ => None,
121 })
122 .collect()
123 }),
124 },
125 true,
126 ),
127 AnthropicSettingsContent::Versioned(content) => match content {
128 VersionedAnthropicSettingsContent::V1(content) => (content, false),
129 },
130 }
131 }
132}
133
134#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
135pub struct LegacyAnthropicSettingsContent {
136 pub api_url: Option<String>,
137 pub available_models: Option<Vec<anthropic::Model>>,
138}
139
140#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
141#[serde(tag = "version")]
142pub enum VersionedAnthropicSettingsContent {
143 #[serde(rename = "1")]
144 V1(AnthropicSettingsContentV1),
145}
146
147#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
148pub struct AnthropicSettingsContentV1 {
149 pub api_url: Option<String>,
150 pub available_models: Option<Vec<provider::anthropic::AvailableModel>>,
151}
152
153#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
154pub struct OllamaSettingsContent {
155 pub api_url: Option<String>,
156 pub available_models: Option<Vec<provider::ollama::AvailableModel>>,
157}
158
159#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
160pub struct LmStudioSettingsContent {
161 pub api_url: Option<String>,
162 pub available_models: Option<Vec<provider::lmstudio::AvailableModel>>,
163}
164
165#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
166#[serde(untagged)]
167pub enum OpenAiSettingsContent {
168 Legacy(LegacyOpenAiSettingsContent),
169 Versioned(VersionedOpenAiSettingsContent),
170}
171
172impl OpenAiSettingsContent {
173 pub fn upgrade(self) -> (OpenAiSettingsContentV1, bool) {
174 match self {
175 OpenAiSettingsContent::Legacy(content) => (
176 OpenAiSettingsContentV1 {
177 api_url: content.api_url,
178 available_models: content.available_models.map(|models| {
179 models
180 .into_iter()
181 .filter_map(|model| match model {
182 open_ai::Model::Custom {
183 name,
184 display_name,
185 max_tokens,
186 max_output_tokens,
187 max_completion_tokens,
188 } => Some(provider::open_ai::AvailableModel {
189 name,
190 max_tokens,
191 max_output_tokens,
192 display_name,
193 max_completion_tokens,
194 }),
195 _ => None,
196 })
197 .collect()
198 }),
199 },
200 true,
201 ),
202 OpenAiSettingsContent::Versioned(content) => match content {
203 VersionedOpenAiSettingsContent::V1(content) => (content, false),
204 },
205 }
206 }
207}
208
209#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
210pub struct LegacyOpenAiSettingsContent {
211 pub api_url: Option<String>,
212 pub available_models: Option<Vec<open_ai::Model>>,
213}
214
215#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
216#[serde(tag = "version")]
217pub enum VersionedOpenAiSettingsContent {
218 #[serde(rename = "1")]
219 V1(OpenAiSettingsContentV1),
220}
221
222#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
223pub struct OpenAiSettingsContentV1 {
224 pub api_url: Option<String>,
225 pub available_models: Option<Vec<provider::open_ai::AvailableModel>>,
226}
227
228#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
229pub struct GoogleSettingsContent {
230 pub api_url: Option<String>,
231 pub available_models: Option<Vec<provider::google::AvailableModel>>,
232}
233
234#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
235pub struct ZedDotDevSettingsContent {
236 available_models: Option<Vec<cloud::AvailableModel>>,
237}
238
239#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
240pub struct CopilotChatSettingsContent {}
241
242impl settings::Settings for AllLanguageModelSettings {
243 const KEY: Option<&'static str> = Some("language_models");
244
245 const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]);
246
247 type FileContent = AllLanguageModelSettingsContent;
248
249 fn load(sources: SettingsSources<Self::FileContent>, _: &mut AppContext) -> Result<Self> {
250 fn merge<T>(target: &mut T, value: Option<T>) {
251 if let Some(value) = value {
252 *target = value;
253 }
254 }
255
256 let mut settings = AllLanguageModelSettings::default();
257
258 for value in sources.defaults_and_customizations() {
259 // Anthropic
260 let (anthropic, upgraded) = match value.anthropic.clone().map(|s| s.upgrade()) {
261 Some((content, upgraded)) => (Some(content), upgraded),
262 None => (None, false),
263 };
264
265 if upgraded {
266 settings.anthropic.needs_setting_migration = true;
267 }
268
269 merge(
270 &mut settings.anthropic.api_url,
271 anthropic.as_ref().and_then(|s| s.api_url.clone()),
272 );
273 merge(
274 &mut settings.anthropic.available_models,
275 anthropic.as_ref().and_then(|s| s.available_models.clone()),
276 );
277
278 // Ollama
279 let ollama = value.ollama.clone();
280
281 merge(
282 &mut settings.ollama.api_url,
283 value.ollama.as_ref().and_then(|s| s.api_url.clone()),
284 );
285 merge(
286 &mut settings.ollama.available_models,
287 ollama.as_ref().and_then(|s| s.available_models.clone()),
288 );
289
290 // LM Studio
291 let lmstudio = value.lmstudio.clone();
292
293 merge(
294 &mut settings.lmstudio.api_url,
295 value.lmstudio.as_ref().and_then(|s| s.api_url.clone()),
296 );
297 merge(
298 &mut settings.lmstudio.available_models,
299 lmstudio.as_ref().and_then(|s| s.available_models.clone()),
300 );
301
302 // OpenAI
303 let (openai, upgraded) = match value.openai.clone().map(|s| s.upgrade()) {
304 Some((content, upgraded)) => (Some(content), upgraded),
305 None => (None, false),
306 };
307
308 if upgraded {
309 settings.openai.needs_setting_migration = true;
310 }
311
312 merge(
313 &mut settings.openai.api_url,
314 openai.as_ref().and_then(|s| s.api_url.clone()),
315 );
316 merge(
317 &mut settings.openai.available_models,
318 openai.as_ref().and_then(|s| s.available_models.clone()),
319 );
320 merge(
321 &mut settings.zed_dot_dev.available_models,
322 value
323 .zed_dot_dev
324 .as_ref()
325 .and_then(|s| s.available_models.clone()),
326 );
327 merge(
328 &mut settings.google.api_url,
329 value.google.as_ref().and_then(|s| s.api_url.clone()),
330 );
331 merge(
332 &mut settings.google.available_models,
333 value
334 .google
335 .as_ref()
336 .and_then(|s| s.available_models.clone()),
337 );
338 }
339
340 Ok(settings)
341 }
342}