1mod model;
2mod rate_limiter;
3mod registry;
4mod request;
5mod role;
6mod telemetry;
7
8#[cfg(any(test, feature = "test-support"))]
9pub mod fake_provider;
10
11use anyhow::Result;
12use client::Client;
13use futures::FutureExt;
14use futures::{future::BoxFuture, stream::BoxStream, StreamExt, TryStreamExt as _};
15use gpui::{AnyElement, AnyView, App, AsyncApp, SharedString, Task, Window};
16use proto::Plan;
17use schemars::JsonSchema;
18use serde::{de::DeserializeOwned, Deserialize, Serialize};
19use std::fmt;
20use std::ops::{Add, Sub};
21use std::{future::Future, sync::Arc};
22use thiserror::Error;
23use ui::IconName;
24use util::serde::is_default;
25
26pub use crate::model::*;
27pub use crate::rate_limiter::*;
28pub use crate::registry::*;
29pub use crate::request::*;
30pub use crate::role::*;
31pub use crate::telemetry::*;
32
33pub const ZED_CLOUD_PROVIDER_ID: &str = "zed.dev";
34
35pub fn init(client: Arc<Client>, cx: &mut App) {
36 registry::init(cx);
37 RefreshLlmTokenListener::register(client.clone(), cx);
38}
39
40/// The availability of a [`LanguageModel`].
41#[derive(Debug, PartialEq, Eq, Clone, Copy)]
42pub enum LanguageModelAvailability {
43 /// The language model is available to the general public.
44 Public,
45 /// The language model is available to users on the indicated plan.
46 RequiresPlan(Plan),
47}
48
49/// Configuration for caching language model messages.
50#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
51pub struct LanguageModelCacheConfiguration {
52 pub max_cache_anchors: usize,
53 pub should_speculate: bool,
54 pub min_total_token: usize,
55}
56
57/// A completion event from a language model.
58#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
59pub enum LanguageModelCompletionEvent {
60 Stop(StopReason),
61 Text(String),
62 ToolUse(LanguageModelToolUse),
63 StartMessage { message_id: String },
64 UsageUpdate(TokenUsage),
65}
66
67#[derive(Debug, PartialEq, Clone, Copy, Serialize, Deserialize)]
68#[serde(rename_all = "snake_case")]
69pub enum StopReason {
70 EndTurn,
71 MaxTokens,
72 ToolUse,
73}
74
75#[derive(Debug, PartialEq, Clone, Serialize, Deserialize, Default)]
76pub struct TokenUsage {
77 #[serde(default, skip_serializing_if = "is_default")]
78 pub input_tokens: u32,
79 #[serde(default, skip_serializing_if = "is_default")]
80 pub output_tokens: u32,
81 #[serde(default, skip_serializing_if = "is_default")]
82 pub cache_creation_input_tokens: u32,
83 #[serde(default, skip_serializing_if = "is_default")]
84 pub cache_read_input_tokens: u32,
85}
86
87impl Add<TokenUsage> for TokenUsage {
88 type Output = Self;
89
90 fn add(self, other: Self) -> Self {
91 Self {
92 input_tokens: self.input_tokens + other.input_tokens,
93 output_tokens: self.output_tokens + other.output_tokens,
94 cache_creation_input_tokens: self.cache_creation_input_tokens
95 + other.cache_creation_input_tokens,
96 cache_read_input_tokens: self.cache_read_input_tokens + other.cache_read_input_tokens,
97 }
98 }
99}
100
101impl Sub<TokenUsage> for TokenUsage {
102 type Output = Self;
103
104 fn sub(self, other: Self) -> Self {
105 Self {
106 input_tokens: self.input_tokens - other.input_tokens,
107 output_tokens: self.output_tokens - other.output_tokens,
108 cache_creation_input_tokens: self.cache_creation_input_tokens
109 - other.cache_creation_input_tokens,
110 cache_read_input_tokens: self.cache_read_input_tokens - other.cache_read_input_tokens,
111 }
112 }
113}
114
115#[derive(Debug, PartialEq, Eq, Hash, Clone, Serialize, Deserialize)]
116pub struct LanguageModelToolUseId(Arc<str>);
117
118impl fmt::Display for LanguageModelToolUseId {
119 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
120 write!(f, "{}", self.0)
121 }
122}
123
124impl<T> From<T> for LanguageModelToolUseId
125where
126 T: Into<Arc<str>>,
127{
128 fn from(value: T) -> Self {
129 Self(value.into())
130 }
131}
132
133#[derive(Debug, PartialEq, Eq, Hash, Clone, Serialize, Deserialize)]
134pub struct LanguageModelToolUse {
135 pub id: LanguageModelToolUseId,
136 pub name: Arc<str>,
137 pub input: serde_json::Value,
138}
139
140pub struct LanguageModelTextStream {
141 pub message_id: Option<String>,
142 pub stream: BoxStream<'static, Result<String>>,
143}
144
145impl Default for LanguageModelTextStream {
146 fn default() -> Self {
147 Self {
148 message_id: None,
149 stream: Box::pin(futures::stream::empty()),
150 }
151 }
152}
153
154pub trait LanguageModel: Send + Sync {
155 fn id(&self) -> LanguageModelId;
156 fn name(&self) -> LanguageModelName;
157 /// If None, falls back to [LanguageModelProvider::icon]
158 fn icon(&self) -> Option<IconName> {
159 None
160 }
161 fn provider_id(&self) -> LanguageModelProviderId;
162 fn provider_name(&self) -> LanguageModelProviderName;
163 fn telemetry_id(&self) -> String;
164
165 fn api_key(&self, _cx: &App) -> Option<String> {
166 None
167 }
168
169 /// Returns the availability of this language model.
170 fn availability(&self) -> LanguageModelAvailability {
171 LanguageModelAvailability::Public
172 }
173
174 fn max_token_count(&self) -> usize;
175 fn max_output_tokens(&self) -> Option<u32> {
176 None
177 }
178
179 fn count_tokens(
180 &self,
181 request: LanguageModelRequest,
182 cx: &App,
183 ) -> BoxFuture<'static, Result<usize>>;
184
185 fn stream_completion(
186 &self,
187 request: LanguageModelRequest,
188 cx: &AsyncApp,
189 ) -> BoxFuture<'static, Result<BoxStream<'static, Result<LanguageModelCompletionEvent>>>>;
190
191 fn stream_completion_text(
192 &self,
193 request: LanguageModelRequest,
194 cx: &AsyncApp,
195 ) -> BoxFuture<'static, Result<LanguageModelTextStream>> {
196 let events = self.stream_completion(request, cx);
197
198 async move {
199 let mut events = events.await?.fuse();
200 let mut message_id = None;
201 let mut first_item_text = None;
202
203 if let Some(first_event) = events.next().await {
204 match first_event {
205 Ok(LanguageModelCompletionEvent::StartMessage { message_id: id }) => {
206 message_id = Some(id.clone());
207 }
208 Ok(LanguageModelCompletionEvent::Text(text)) => {
209 first_item_text = Some(text);
210 }
211 _ => (),
212 }
213 }
214
215 let stream = futures::stream::iter(first_item_text.map(Ok))
216 .chain(events.filter_map(|result| async move {
217 match result {
218 Ok(LanguageModelCompletionEvent::StartMessage { .. }) => None,
219 Ok(LanguageModelCompletionEvent::Text(text)) => Some(Ok(text)),
220 Ok(LanguageModelCompletionEvent::Stop(_)) => None,
221 Ok(LanguageModelCompletionEvent::ToolUse(_)) => None,
222 Ok(LanguageModelCompletionEvent::UsageUpdate(_)) => None,
223 Err(err) => Some(Err(err)),
224 }
225 }))
226 .boxed();
227
228 Ok(LanguageModelTextStream { message_id, stream })
229 }
230 .boxed()
231 }
232
233 fn use_any_tool(
234 &self,
235 request: LanguageModelRequest,
236 name: String,
237 description: String,
238 schema: serde_json::Value,
239 cx: &AsyncApp,
240 ) -> BoxFuture<'static, Result<BoxStream<'static, Result<String>>>>;
241
242 fn cache_configuration(&self) -> Option<LanguageModelCacheConfiguration> {
243 None
244 }
245
246 #[cfg(any(test, feature = "test-support"))]
247 fn as_fake(&self) -> &fake_provider::FakeLanguageModel {
248 unimplemented!()
249 }
250}
251
252impl dyn LanguageModel {
253 pub fn use_tool<T: LanguageModelTool>(
254 &self,
255 request: LanguageModelRequest,
256 cx: &AsyncApp,
257 ) -> impl 'static + Future<Output = Result<T>> {
258 let schema = schemars::schema_for!(T);
259 let schema_json = serde_json::to_value(&schema).unwrap();
260 let stream = self.use_any_tool(request, T::name(), T::description(), schema_json, cx);
261 async move {
262 let stream = stream.await?;
263 let response = stream.try_collect::<String>().await?;
264 Ok(serde_json::from_str(&response)?)
265 }
266 }
267
268 pub fn use_tool_stream<T: LanguageModelTool>(
269 &self,
270 request: LanguageModelRequest,
271 cx: &AsyncApp,
272 ) -> BoxFuture<'static, Result<BoxStream<'static, Result<String>>>> {
273 let schema = schemars::schema_for!(T);
274 let schema_json = serde_json::to_value(&schema).unwrap();
275 self.use_any_tool(request, T::name(), T::description(), schema_json, cx)
276 }
277}
278
279pub trait LanguageModelTool: 'static + DeserializeOwned + JsonSchema {
280 fn name() -> String;
281 fn description() -> String;
282}
283
284/// An error that occurred when trying to authenticate the language model provider.
285#[derive(Debug, Error)]
286pub enum AuthenticateError {
287 #[error("credentials not found")]
288 CredentialsNotFound,
289 #[error(transparent)]
290 Other(#[from] anyhow::Error),
291}
292
293pub trait LanguageModelProvider: 'static {
294 fn id(&self) -> LanguageModelProviderId;
295 fn name(&self) -> LanguageModelProviderName;
296 fn icon(&self) -> IconName {
297 IconName::ZedAssistant
298 }
299 fn default_model(&self, cx: &App) -> Option<Arc<dyn LanguageModel>>;
300 fn provided_models(&self, cx: &App) -> Vec<Arc<dyn LanguageModel>>;
301 fn load_model(&self, _model: Arc<dyn LanguageModel>, _cx: &App) {}
302 fn is_authenticated(&self, cx: &App) -> bool;
303 fn authenticate(&self, cx: &mut App) -> Task<Result<(), AuthenticateError>>;
304 fn configuration_view(&self, window: &mut Window, cx: &mut App) -> AnyView;
305 fn must_accept_terms(&self, _cx: &App) -> bool {
306 false
307 }
308 fn render_accept_terms(
309 &self,
310 _view: LanguageModelProviderTosView,
311 _cx: &mut App,
312 ) -> Option<AnyElement> {
313 None
314 }
315 fn reset_credentials(&self, cx: &mut App) -> Task<Result<()>>;
316}
317
318#[derive(PartialEq, Eq)]
319pub enum LanguageModelProviderTosView {
320 ThreadEmptyState,
321 PromptEditorPopup,
322 Configuration,
323}
324
325pub trait LanguageModelProviderState: 'static {
326 type ObservableEntity;
327
328 fn observable_entity(&self) -> Option<gpui::Entity<Self::ObservableEntity>>;
329
330 fn subscribe<T: 'static>(
331 &self,
332 cx: &mut gpui::Context<T>,
333 callback: impl Fn(&mut T, &mut gpui::Context<T>) + 'static,
334 ) -> Option<gpui::Subscription> {
335 let entity = self.observable_entity()?;
336 Some(cx.observe(&entity, move |this, _, cx| {
337 callback(this, cx);
338 }))
339 }
340}
341
342#[derive(Clone, Eq, PartialEq, Hash, Debug, Ord, PartialOrd)]
343pub struct LanguageModelId(pub SharedString);
344
345#[derive(Clone, Eq, PartialEq, Hash, Debug, Ord, PartialOrd)]
346pub struct LanguageModelName(pub SharedString);
347
348#[derive(Clone, Eq, PartialEq, Hash, Debug, Ord, PartialOrd)]
349pub struct LanguageModelProviderId(pub SharedString);
350
351#[derive(Clone, Eq, PartialEq, Hash, Debug, Ord, PartialOrd)]
352pub struct LanguageModelProviderName(pub SharedString);
353
354impl fmt::Display for LanguageModelProviderId {
355 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
356 write!(f, "{}", self.0)
357 }
358}
359
360impl From<String> for LanguageModelId {
361 fn from(value: String) -> Self {
362 Self(SharedString::from(value))
363 }
364}
365
366impl From<String> for LanguageModelName {
367 fn from(value: String) -> Self {
368 Self(SharedString::from(value))
369 }
370}
371
372impl From<String> for LanguageModelProviderId {
373 fn from(value: String) -> Self {
374 Self(SharedString::from(value))
375 }
376}
377
378impl From<String> for LanguageModelProviderName {
379 fn from(value: String) -> Self {
380 Self(SharedString::from(value))
381 }
382}