1mod model;
2mod rate_limiter;
3mod registry;
4mod request;
5mod role;
6mod telemetry;
7
8#[cfg(any(test, feature = "test-support"))]
9pub mod fake_provider;
10
11use anyhow::Result;
12use client::Client;
13use futures::FutureExt;
14use futures::{future::BoxFuture, stream::BoxStream, StreamExt, TryStreamExt as _};
15use gpui::{AnyElement, AnyView, App, AsyncApp, SharedString, Task, Window};
16use proto::Plan;
17use schemars::JsonSchema;
18use serde::{de::DeserializeOwned, Deserialize, Serialize};
19use std::fmt;
20use std::{future::Future, sync::Arc};
21use thiserror::Error;
22use ui::IconName;
23
24pub use crate::model::*;
25pub use crate::rate_limiter::*;
26pub use crate::registry::*;
27pub use crate::request::*;
28pub use crate::role::*;
29pub use crate::telemetry::*;
30
31pub const ZED_CLOUD_PROVIDER_ID: &str = "zed.dev";
32
33pub fn init(client: Arc<Client>, cx: &mut App) {
34 registry::init(cx);
35 RefreshLlmTokenListener::register(client.clone(), cx);
36}
37
38/// The availability of a [`LanguageModel`].
39#[derive(Debug, PartialEq, Eq, Clone, Copy)]
40pub enum LanguageModelAvailability {
41 /// The language model is available to the general public.
42 Public,
43 /// The language model is available to users on the indicated plan.
44 RequiresPlan(Plan),
45}
46
47/// Configuration for caching language model messages.
48#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
49pub struct LanguageModelCacheConfiguration {
50 pub max_cache_anchors: usize,
51 pub should_speculate: bool,
52 pub min_total_token: usize,
53}
54
55/// A completion event from a language model.
56#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
57pub enum LanguageModelCompletionEvent {
58 Stop(StopReason),
59 Text(String),
60 ToolUse(LanguageModelToolUse),
61 StartMessage { message_id: String },
62}
63
64#[derive(Debug, PartialEq, Clone, Copy, Serialize, Deserialize)]
65#[serde(rename_all = "snake_case")]
66pub enum StopReason {
67 EndTurn,
68 MaxTokens,
69 ToolUse,
70}
71
72#[derive(Debug, PartialEq, Eq, Hash, Clone, Serialize, Deserialize)]
73pub struct LanguageModelToolUseId(Arc<str>);
74
75impl fmt::Display for LanguageModelToolUseId {
76 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
77 write!(f, "{}", self.0)
78 }
79}
80
81impl<T> From<T> for LanguageModelToolUseId
82where
83 T: Into<Arc<str>>,
84{
85 fn from(value: T) -> Self {
86 Self(value.into())
87 }
88}
89
90#[derive(Debug, PartialEq, Eq, Hash, Clone, Serialize, Deserialize)]
91pub struct LanguageModelToolUse {
92 pub id: LanguageModelToolUseId,
93 pub name: Arc<str>,
94 pub input: serde_json::Value,
95}
96
97pub struct LanguageModelTextStream {
98 pub message_id: Option<String>,
99 pub stream: BoxStream<'static, Result<String>>,
100}
101
102impl Default for LanguageModelTextStream {
103 fn default() -> Self {
104 Self {
105 message_id: None,
106 stream: Box::pin(futures::stream::empty()),
107 }
108 }
109}
110
111pub trait LanguageModel: Send + Sync {
112 fn id(&self) -> LanguageModelId;
113 fn name(&self) -> LanguageModelName;
114 /// If None, falls back to [LanguageModelProvider::icon]
115 fn icon(&self) -> Option<IconName> {
116 None
117 }
118 fn provider_id(&self) -> LanguageModelProviderId;
119 fn provider_name(&self) -> LanguageModelProviderName;
120 fn telemetry_id(&self) -> String;
121
122 fn api_key(&self, _cx: &App) -> Option<String> {
123 None
124 }
125
126 /// Returns the availability of this language model.
127 fn availability(&self) -> LanguageModelAvailability {
128 LanguageModelAvailability::Public
129 }
130
131 fn max_token_count(&self) -> usize;
132 fn max_output_tokens(&self) -> Option<u32> {
133 None
134 }
135
136 fn count_tokens(
137 &self,
138 request: LanguageModelRequest,
139 cx: &App,
140 ) -> BoxFuture<'static, Result<usize>>;
141
142 fn stream_completion(
143 &self,
144 request: LanguageModelRequest,
145 cx: &AsyncApp,
146 ) -> BoxFuture<'static, Result<BoxStream<'static, Result<LanguageModelCompletionEvent>>>>;
147
148 fn stream_completion_text(
149 &self,
150 request: LanguageModelRequest,
151 cx: &AsyncApp,
152 ) -> BoxFuture<'static, Result<LanguageModelTextStream>> {
153 let events = self.stream_completion(request, cx);
154
155 async move {
156 let mut events = events.await?.fuse();
157 let mut message_id = None;
158 let mut first_item_text = None;
159
160 if let Some(first_event) = events.next().await {
161 match first_event {
162 Ok(LanguageModelCompletionEvent::StartMessage { message_id: id }) => {
163 message_id = Some(id.clone());
164 }
165 Ok(LanguageModelCompletionEvent::Text(text)) => {
166 first_item_text = Some(text);
167 }
168 _ => (),
169 }
170 }
171
172 let stream = futures::stream::iter(first_item_text.map(Ok))
173 .chain(events.filter_map(|result| async move {
174 match result {
175 Ok(LanguageModelCompletionEvent::StartMessage { .. }) => None,
176 Ok(LanguageModelCompletionEvent::Text(text)) => Some(Ok(text)),
177 Ok(LanguageModelCompletionEvent::Stop(_)) => None,
178 Ok(LanguageModelCompletionEvent::ToolUse(_)) => None,
179 Err(err) => Some(Err(err)),
180 }
181 }))
182 .boxed();
183
184 Ok(LanguageModelTextStream { message_id, stream })
185 }
186 .boxed()
187 }
188
189 fn use_any_tool(
190 &self,
191 request: LanguageModelRequest,
192 name: String,
193 description: String,
194 schema: serde_json::Value,
195 cx: &AsyncApp,
196 ) -> BoxFuture<'static, Result<BoxStream<'static, Result<String>>>>;
197
198 fn cache_configuration(&self) -> Option<LanguageModelCacheConfiguration> {
199 None
200 }
201
202 #[cfg(any(test, feature = "test-support"))]
203 fn as_fake(&self) -> &fake_provider::FakeLanguageModel {
204 unimplemented!()
205 }
206}
207
208impl dyn LanguageModel {
209 pub fn use_tool<T: LanguageModelTool>(
210 &self,
211 request: LanguageModelRequest,
212 cx: &AsyncApp,
213 ) -> impl 'static + Future<Output = Result<T>> {
214 let schema = schemars::schema_for!(T);
215 let schema_json = serde_json::to_value(&schema).unwrap();
216 let stream = self.use_any_tool(request, T::name(), T::description(), schema_json, cx);
217 async move {
218 let stream = stream.await?;
219 let response = stream.try_collect::<String>().await?;
220 Ok(serde_json::from_str(&response)?)
221 }
222 }
223
224 pub fn use_tool_stream<T: LanguageModelTool>(
225 &self,
226 request: LanguageModelRequest,
227 cx: &AsyncApp,
228 ) -> BoxFuture<'static, Result<BoxStream<'static, Result<String>>>> {
229 let schema = schemars::schema_for!(T);
230 let schema_json = serde_json::to_value(&schema).unwrap();
231 self.use_any_tool(request, T::name(), T::description(), schema_json, cx)
232 }
233}
234
235pub trait LanguageModelTool: 'static + DeserializeOwned + JsonSchema {
236 fn name() -> String;
237 fn description() -> String;
238}
239
240/// An error that occurred when trying to authenticate the language model provider.
241#[derive(Debug, Error)]
242pub enum AuthenticateError {
243 #[error("credentials not found")]
244 CredentialsNotFound,
245 #[error(transparent)]
246 Other(#[from] anyhow::Error),
247}
248
249pub trait LanguageModelProvider: 'static {
250 fn id(&self) -> LanguageModelProviderId;
251 fn name(&self) -> LanguageModelProviderName;
252 fn icon(&self) -> IconName {
253 IconName::ZedAssistant
254 }
255 fn default_model(&self, cx: &App) -> Option<Arc<dyn LanguageModel>>;
256 fn provided_models(&self, cx: &App) -> Vec<Arc<dyn LanguageModel>>;
257 fn load_model(&self, _model: Arc<dyn LanguageModel>, _cx: &App) {}
258 fn is_authenticated(&self, cx: &App) -> bool;
259 fn authenticate(&self, cx: &mut App) -> Task<Result<(), AuthenticateError>>;
260 fn configuration_view(&self, window: &mut Window, cx: &mut App) -> AnyView;
261 fn must_accept_terms(&self, _cx: &App) -> bool {
262 false
263 }
264 fn render_accept_terms(
265 &self,
266 _view: LanguageModelProviderTosView,
267 _cx: &mut App,
268 ) -> Option<AnyElement> {
269 None
270 }
271 fn reset_credentials(&self, cx: &mut App) -> Task<Result<()>>;
272}
273
274#[derive(PartialEq, Eq)]
275pub enum LanguageModelProviderTosView {
276 ThreadEmptyState,
277 PromptEditorPopup,
278 Configuration,
279}
280
281pub trait LanguageModelProviderState: 'static {
282 type ObservableEntity;
283
284 fn observable_entity(&self) -> Option<gpui::Entity<Self::ObservableEntity>>;
285
286 fn subscribe<T: 'static>(
287 &self,
288 cx: &mut gpui::Context<T>,
289 callback: impl Fn(&mut T, &mut gpui::Context<T>) + 'static,
290 ) -> Option<gpui::Subscription> {
291 let entity = self.observable_entity()?;
292 Some(cx.observe(&entity, move |this, _, cx| {
293 callback(this, cx);
294 }))
295 }
296}
297
298#[derive(Clone, Eq, PartialEq, Hash, Debug, Ord, PartialOrd)]
299pub struct LanguageModelId(pub SharedString);
300
301#[derive(Clone, Eq, PartialEq, Hash, Debug, Ord, PartialOrd)]
302pub struct LanguageModelName(pub SharedString);
303
304#[derive(Clone, Eq, PartialEq, Hash, Debug, Ord, PartialOrd)]
305pub struct LanguageModelProviderId(pub SharedString);
306
307#[derive(Clone, Eq, PartialEq, Hash, Debug, Ord, PartialOrd)]
308pub struct LanguageModelProviderName(pub SharedString);
309
310impl fmt::Display for LanguageModelProviderId {
311 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
312 write!(f, "{}", self.0)
313 }
314}
315
316impl From<String> for LanguageModelId {
317 fn from(value: String) -> Self {
318 Self(SharedString::from(value))
319 }
320}
321
322impl From<String> for LanguageModelName {
323 fn from(value: String) -> Self {
324 Self(SharedString::from(value))
325 }
326}
327
328impl From<String> for LanguageModelProviderId {
329 fn from(value: String) -> Self {
330 Self(SharedString::from(value))
331 }
332}
333
334impl From<String> for LanguageModelProviderName {
335 fn from(value: String) -> Self {
336 Self(SharedString::from(value))
337 }
338}