1use std::sync::Arc;
2
3use anyhow::Result;
4use gpui::AppContext;
5use language_model::LanguageModelCacheConfiguration;
6use project::Fs;
7use schemars::JsonSchema;
8use serde::{Deserialize, Serialize};
9use settings::{update_settings_file, Settings, SettingsSources};
10
11use crate::provider::{
12 self,
13 anthropic::AnthropicSettings,
14 cloud::{self, ZedDotDevSettings},
15 copilot_chat::CopilotChatSettings,
16 google::GoogleSettings,
17 ollama::OllamaSettings,
18 open_ai::OpenAiSettings,
19};
20
21/// Initializes the language model settings.
22pub fn init(fs: Arc<dyn Fs>, cx: &mut AppContext) {
23 AllLanguageModelSettings::register(cx);
24
25 if AllLanguageModelSettings::get_global(cx)
26 .openai
27 .needs_setting_migration
28 {
29 update_settings_file::<AllLanguageModelSettings>(fs.clone(), cx, move |setting, _| {
30 if let Some(settings) = setting.openai.clone() {
31 let (newest_version, _) = settings.upgrade();
32 setting.openai = Some(OpenAiSettingsContent::Versioned(
33 VersionedOpenAiSettingsContent::V1(newest_version),
34 ));
35 }
36 });
37 }
38
39 if AllLanguageModelSettings::get_global(cx)
40 .anthropic
41 .needs_setting_migration
42 {
43 update_settings_file::<AllLanguageModelSettings>(fs, cx, move |setting, _| {
44 if let Some(settings) = setting.anthropic.clone() {
45 let (newest_version, _) = settings.upgrade();
46 setting.anthropic = Some(AnthropicSettingsContent::Versioned(
47 VersionedAnthropicSettingsContent::V1(newest_version),
48 ));
49 }
50 });
51 }
52}
53
54#[derive(Default)]
55pub struct AllLanguageModelSettings {
56 pub anthropic: AnthropicSettings,
57 pub ollama: OllamaSettings,
58 pub openai: OpenAiSettings,
59 pub zed_dot_dev: ZedDotDevSettings,
60 pub google: GoogleSettings,
61 pub copilot_chat: CopilotChatSettings,
62}
63
64#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
65pub struct AllLanguageModelSettingsContent {
66 pub anthropic: Option<AnthropicSettingsContent>,
67 pub ollama: Option<OllamaSettingsContent>,
68 pub openai: Option<OpenAiSettingsContent>,
69 #[serde(rename = "zed.dev")]
70 pub zed_dot_dev: Option<ZedDotDevSettingsContent>,
71 pub google: Option<GoogleSettingsContent>,
72 pub copilot_chat: Option<CopilotChatSettingsContent>,
73}
74
75#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
76#[serde(untagged)]
77pub enum AnthropicSettingsContent {
78 Legacy(LegacyAnthropicSettingsContent),
79 Versioned(VersionedAnthropicSettingsContent),
80}
81
82impl AnthropicSettingsContent {
83 pub fn upgrade(self) -> (AnthropicSettingsContentV1, bool) {
84 match self {
85 AnthropicSettingsContent::Legacy(content) => (
86 AnthropicSettingsContentV1 {
87 api_url: content.api_url,
88 available_models: content.available_models.map(|models| {
89 models
90 .into_iter()
91 .filter_map(|model| match model {
92 anthropic::Model::Custom {
93 name,
94 display_name,
95 max_tokens,
96 tool_override,
97 cache_configuration,
98 max_output_tokens,
99 default_temperature,
100 extra_beta_headers,
101 } => Some(provider::anthropic::AvailableModel {
102 name,
103 display_name,
104 max_tokens,
105 tool_override,
106 cache_configuration: cache_configuration.as_ref().map(
107 |config| LanguageModelCacheConfiguration {
108 max_cache_anchors: config.max_cache_anchors,
109 should_speculate: config.should_speculate,
110 min_total_token: config.min_total_token,
111 },
112 ),
113 max_output_tokens,
114 default_temperature,
115 extra_beta_headers,
116 }),
117 _ => None,
118 })
119 .collect()
120 }),
121 },
122 true,
123 ),
124 AnthropicSettingsContent::Versioned(content) => match content {
125 VersionedAnthropicSettingsContent::V1(content) => (content, false),
126 },
127 }
128 }
129}
130
131#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
132pub struct LegacyAnthropicSettingsContent {
133 pub api_url: Option<String>,
134 pub available_models: Option<Vec<anthropic::Model>>,
135}
136
137#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
138#[serde(tag = "version")]
139pub enum VersionedAnthropicSettingsContent {
140 #[serde(rename = "1")]
141 V1(AnthropicSettingsContentV1),
142}
143
144#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
145pub struct AnthropicSettingsContentV1 {
146 pub api_url: Option<String>,
147 pub available_models: Option<Vec<provider::anthropic::AvailableModel>>,
148}
149
150#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
151pub struct OllamaSettingsContent {
152 pub api_url: Option<String>,
153 pub available_models: Option<Vec<provider::ollama::AvailableModel>>,
154}
155
156#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
157#[serde(untagged)]
158pub enum OpenAiSettingsContent {
159 Legacy(LegacyOpenAiSettingsContent),
160 Versioned(VersionedOpenAiSettingsContent),
161}
162
163impl OpenAiSettingsContent {
164 pub fn upgrade(self) -> (OpenAiSettingsContentV1, bool) {
165 match self {
166 OpenAiSettingsContent::Legacy(content) => (
167 OpenAiSettingsContentV1 {
168 api_url: content.api_url,
169 available_models: content.available_models.map(|models| {
170 models
171 .into_iter()
172 .filter_map(|model| match model {
173 open_ai::Model::Custom {
174 name,
175 display_name,
176 max_tokens,
177 max_output_tokens,
178 max_completion_tokens,
179 } => Some(provider::open_ai::AvailableModel {
180 name,
181 max_tokens,
182 max_output_tokens,
183 display_name,
184 max_completion_tokens,
185 }),
186 _ => None,
187 })
188 .collect()
189 }),
190 },
191 true,
192 ),
193 OpenAiSettingsContent::Versioned(content) => match content {
194 VersionedOpenAiSettingsContent::V1(content) => (content, false),
195 },
196 }
197 }
198}
199
200#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
201pub struct LegacyOpenAiSettingsContent {
202 pub api_url: Option<String>,
203 pub available_models: Option<Vec<open_ai::Model>>,
204}
205
206#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
207#[serde(tag = "version")]
208pub enum VersionedOpenAiSettingsContent {
209 #[serde(rename = "1")]
210 V1(OpenAiSettingsContentV1),
211}
212
213#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
214pub struct OpenAiSettingsContentV1 {
215 pub api_url: Option<String>,
216 pub available_models: Option<Vec<provider::open_ai::AvailableModel>>,
217}
218
219#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
220pub struct GoogleSettingsContent {
221 pub api_url: Option<String>,
222 pub available_models: Option<Vec<provider::google::AvailableModel>>,
223}
224
225#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
226pub struct ZedDotDevSettingsContent {
227 available_models: Option<Vec<cloud::AvailableModel>>,
228}
229
230#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
231pub struct CopilotChatSettingsContent {}
232
233impl settings::Settings for AllLanguageModelSettings {
234 const KEY: Option<&'static str> = Some("language_models");
235
236 const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]);
237
238 type FileContent = AllLanguageModelSettingsContent;
239
240 fn load(sources: SettingsSources<Self::FileContent>, _: &mut AppContext) -> Result<Self> {
241 fn merge<T>(target: &mut T, value: Option<T>) {
242 if let Some(value) = value {
243 *target = value;
244 }
245 }
246
247 let mut settings = AllLanguageModelSettings::default();
248
249 for value in sources.defaults_and_customizations() {
250 // Anthropic
251 let (anthropic, upgraded) = match value.anthropic.clone().map(|s| s.upgrade()) {
252 Some((content, upgraded)) => (Some(content), upgraded),
253 None => (None, false),
254 };
255
256 if upgraded {
257 settings.anthropic.needs_setting_migration = true;
258 }
259
260 merge(
261 &mut settings.anthropic.api_url,
262 anthropic.as_ref().and_then(|s| s.api_url.clone()),
263 );
264 merge(
265 &mut settings.anthropic.available_models,
266 anthropic.as_ref().and_then(|s| s.available_models.clone()),
267 );
268
269 // Ollama
270 let ollama = value.ollama.clone();
271
272 merge(
273 &mut settings.ollama.api_url,
274 value.ollama.as_ref().and_then(|s| s.api_url.clone()),
275 );
276 merge(
277 &mut settings.ollama.available_models,
278 ollama.as_ref().and_then(|s| s.available_models.clone()),
279 );
280
281 // OpenAI
282 let (openai, upgraded) = match value.openai.clone().map(|s| s.upgrade()) {
283 Some((content, upgraded)) => (Some(content), upgraded),
284 None => (None, false),
285 };
286
287 if upgraded {
288 settings.openai.needs_setting_migration = true;
289 }
290
291 merge(
292 &mut settings.openai.api_url,
293 openai.as_ref().and_then(|s| s.api_url.clone()),
294 );
295 merge(
296 &mut settings.openai.available_models,
297 openai.as_ref().and_then(|s| s.available_models.clone()),
298 );
299 merge(
300 &mut settings.zed_dot_dev.available_models,
301 value
302 .zed_dot_dev
303 .as_ref()
304 .and_then(|s| s.available_models.clone()),
305 );
306 merge(
307 &mut settings.google.api_url,
308 value.google.as_ref().and_then(|s| s.api_url.clone()),
309 );
310 merge(
311 &mut settings.google.available_models,
312 value
313 .google
314 .as_ref()
315 .and_then(|s| s.available_models.clone()),
316 );
317 }
318
319 Ok(settings)
320 }
321}