1use std::sync::Arc;
2
3use anyhow::Result;
4use gpui::AppContext;
5use language_model::LanguageModelCacheConfiguration;
6use project::Fs;
7use schemars::JsonSchema;
8use serde::{Deserialize, Serialize};
9use settings::{update_settings_file, Settings, SettingsSources};
10
11use crate::provider::{
12 self,
13 anthropic::AnthropicSettings,
14 cloud::{self, ZedDotDevSettings},
15 copilot_chat::CopilotChatSettings,
16 google::GoogleSettings,
17 ollama::OllamaSettings,
18 open_ai::OpenAiSettings,
19};
20
21/// Initializes the language model settings.
22pub fn init(fs: Arc<dyn Fs>, cx: &mut AppContext) {
23 AllLanguageModelSettings::register(cx);
24
25 if AllLanguageModelSettings::get_global(cx)
26 .openai
27 .needs_setting_migration
28 {
29 update_settings_file::<AllLanguageModelSettings>(fs.clone(), cx, move |setting, _| {
30 if let Some(settings) = setting.openai.clone() {
31 let (newest_version, _) = settings.upgrade();
32 setting.openai = Some(OpenAiSettingsContent::Versioned(
33 VersionedOpenAiSettingsContent::V1(newest_version),
34 ));
35 }
36 });
37 }
38
39 if AllLanguageModelSettings::get_global(cx)
40 .anthropic
41 .needs_setting_migration
42 {
43 update_settings_file::<AllLanguageModelSettings>(fs, cx, move |setting, _| {
44 if let Some(settings) = setting.anthropic.clone() {
45 let (newest_version, _) = settings.upgrade();
46 setting.anthropic = Some(AnthropicSettingsContent::Versioned(
47 VersionedAnthropicSettingsContent::V1(newest_version),
48 ));
49 }
50 });
51 }
52}
53
54#[derive(Default)]
55pub struct AllLanguageModelSettings {
56 pub anthropic: AnthropicSettings,
57 pub ollama: OllamaSettings,
58 pub openai: OpenAiSettings,
59 pub zed_dot_dev: ZedDotDevSettings,
60 pub google: GoogleSettings,
61 pub copilot_chat: CopilotChatSettings,
62}
63
64#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
65pub struct AllLanguageModelSettingsContent {
66 pub anthropic: Option<AnthropicSettingsContent>,
67 pub ollama: Option<OllamaSettingsContent>,
68 pub openai: Option<OpenAiSettingsContent>,
69 #[serde(rename = "zed.dev")]
70 pub zed_dot_dev: Option<ZedDotDevSettingsContent>,
71 pub google: Option<GoogleSettingsContent>,
72 pub copilot_chat: Option<CopilotChatSettingsContent>,
73}
74
75#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
76#[serde(untagged)]
77pub enum AnthropicSettingsContent {
78 Legacy(LegacyAnthropicSettingsContent),
79 Versioned(VersionedAnthropicSettingsContent),
80}
81
82impl AnthropicSettingsContent {
83 pub fn upgrade(self) -> (AnthropicSettingsContentV1, bool) {
84 match self {
85 AnthropicSettingsContent::Legacy(content) => (
86 AnthropicSettingsContentV1 {
87 api_url: content.api_url,
88 available_models: content.available_models.map(|models| {
89 models
90 .into_iter()
91 .filter_map(|model| match model {
92 anthropic::Model::Custom {
93 name,
94 display_name,
95 max_tokens,
96 tool_override,
97 cache_configuration,
98 max_output_tokens,
99 default_temperature,
100 } => Some(provider::anthropic::AvailableModel {
101 name,
102 display_name,
103 max_tokens,
104 tool_override,
105 cache_configuration: cache_configuration.as_ref().map(
106 |config| LanguageModelCacheConfiguration {
107 max_cache_anchors: config.max_cache_anchors,
108 should_speculate: config.should_speculate,
109 min_total_token: config.min_total_token,
110 },
111 ),
112 max_output_tokens,
113 default_temperature,
114 }),
115 _ => None,
116 })
117 .collect()
118 }),
119 },
120 true,
121 ),
122 AnthropicSettingsContent::Versioned(content) => match content {
123 VersionedAnthropicSettingsContent::V1(content) => (content, false),
124 },
125 }
126 }
127}
128
129#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
130pub struct LegacyAnthropicSettingsContent {
131 pub api_url: Option<String>,
132 pub available_models: Option<Vec<anthropic::Model>>,
133}
134
135#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
136#[serde(tag = "version")]
137pub enum VersionedAnthropicSettingsContent {
138 #[serde(rename = "1")]
139 V1(AnthropicSettingsContentV1),
140}
141
142#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
143pub struct AnthropicSettingsContentV1 {
144 pub api_url: Option<String>,
145 pub available_models: Option<Vec<provider::anthropic::AvailableModel>>,
146}
147
148#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
149pub struct OllamaSettingsContent {
150 pub api_url: Option<String>,
151 pub available_models: Option<Vec<provider::ollama::AvailableModel>>,
152}
153
154#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
155#[serde(untagged)]
156pub enum OpenAiSettingsContent {
157 Legacy(LegacyOpenAiSettingsContent),
158 Versioned(VersionedOpenAiSettingsContent),
159}
160
161impl OpenAiSettingsContent {
162 pub fn upgrade(self) -> (OpenAiSettingsContentV1, bool) {
163 match self {
164 OpenAiSettingsContent::Legacy(content) => (
165 OpenAiSettingsContentV1 {
166 api_url: content.api_url,
167 available_models: content.available_models.map(|models| {
168 models
169 .into_iter()
170 .filter_map(|model| match model {
171 open_ai::Model::Custom {
172 name,
173 display_name,
174 max_tokens,
175 max_output_tokens,
176 max_completion_tokens,
177 } => Some(provider::open_ai::AvailableModel {
178 name,
179 max_tokens,
180 max_output_tokens,
181 display_name,
182 max_completion_tokens,
183 }),
184 _ => None,
185 })
186 .collect()
187 }),
188 },
189 true,
190 ),
191 OpenAiSettingsContent::Versioned(content) => match content {
192 VersionedOpenAiSettingsContent::V1(content) => (content, false),
193 },
194 }
195 }
196}
197
198#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
199pub struct LegacyOpenAiSettingsContent {
200 pub api_url: Option<String>,
201 pub available_models: Option<Vec<open_ai::Model>>,
202}
203
204#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
205#[serde(tag = "version")]
206pub enum VersionedOpenAiSettingsContent {
207 #[serde(rename = "1")]
208 V1(OpenAiSettingsContentV1),
209}
210
211#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
212pub struct OpenAiSettingsContentV1 {
213 pub api_url: Option<String>,
214 pub available_models: Option<Vec<provider::open_ai::AvailableModel>>,
215}
216
217#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
218pub struct GoogleSettingsContent {
219 pub api_url: Option<String>,
220 pub available_models: Option<Vec<provider::google::AvailableModel>>,
221}
222
223#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
224pub struct ZedDotDevSettingsContent {
225 available_models: Option<Vec<cloud::AvailableModel>>,
226}
227
228#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
229pub struct CopilotChatSettingsContent {}
230
231impl settings::Settings for AllLanguageModelSettings {
232 const KEY: Option<&'static str> = Some("language_models");
233
234 const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]);
235
236 type FileContent = AllLanguageModelSettingsContent;
237
238 fn load(sources: SettingsSources<Self::FileContent>, _: &mut AppContext) -> Result<Self> {
239 fn merge<T>(target: &mut T, value: Option<T>) {
240 if let Some(value) = value {
241 *target = value;
242 }
243 }
244
245 let mut settings = AllLanguageModelSettings::default();
246
247 for value in sources.defaults_and_customizations() {
248 // Anthropic
249 let (anthropic, upgraded) = match value.anthropic.clone().map(|s| s.upgrade()) {
250 Some((content, upgraded)) => (Some(content), upgraded),
251 None => (None, false),
252 };
253
254 if upgraded {
255 settings.anthropic.needs_setting_migration = true;
256 }
257
258 merge(
259 &mut settings.anthropic.api_url,
260 anthropic.as_ref().and_then(|s| s.api_url.clone()),
261 );
262 merge(
263 &mut settings.anthropic.available_models,
264 anthropic.as_ref().and_then(|s| s.available_models.clone()),
265 );
266
267 // Ollama
268 let ollama = value.ollama.clone();
269
270 merge(
271 &mut settings.ollama.api_url,
272 value.ollama.as_ref().and_then(|s| s.api_url.clone()),
273 );
274 merge(
275 &mut settings.ollama.available_models,
276 ollama.as_ref().and_then(|s| s.available_models.clone()),
277 );
278
279 // OpenAI
280 let (openai, upgraded) = match value.openai.clone().map(|s| s.upgrade()) {
281 Some((content, upgraded)) => (Some(content), upgraded),
282 None => (None, false),
283 };
284
285 if upgraded {
286 settings.openai.needs_setting_migration = true;
287 }
288
289 merge(
290 &mut settings.openai.api_url,
291 openai.as_ref().and_then(|s| s.api_url.clone()),
292 );
293 merge(
294 &mut settings.openai.available_models,
295 openai.as_ref().and_then(|s| s.available_models.clone()),
296 );
297 merge(
298 &mut settings.zed_dot_dev.available_models,
299 value
300 .zed_dot_dev
301 .as_ref()
302 .and_then(|s| s.available_models.clone()),
303 );
304 merge(
305 &mut settings.google.api_url,
306 value.google.as_ref().and_then(|s| s.api_url.clone()),
307 );
308 merge(
309 &mut settings.google.available_models,
310 value
311 .google
312 .as_ref()
313 .and_then(|s| s.available_models.clone()),
314 );
315 }
316
317 Ok(settings)
318 }
319}