Remove legacy billing code (#46927)

Marshall Bowers created

This PR removes the code for the legacy plans.

No more users will be on this plan as of January 17th, so it's fine to
land these changes now (as they won't be released until the 21st).

Closes CLO-76.

Release Notes:

- N/A

Change summary

crates/agent/src/tests/mod.rs                                   | 192 --
crates/agent/src/thread.rs                                      |  55 
crates/agent_ui/src/acp/thread_view.rs                          | 157 --
crates/agent_ui/src/agent_configuration.rs                      |  14 
crates/agent_ui/src/agent_panel.rs                              |  79 
crates/agent_ui/src/text_thread_editor.rs                       |   1 
crates/agent_ui/src/ui.rs                                       |   2 
crates/agent_ui/src/ui/end_trial_upsell.rs                      |  14 
crates/agent_ui/src/ui/usage_callout.rs                         | 200 ---
crates/ai_onboarding/src/agent_panel_onboarding_content.rs      |  21 
crates/ai_onboarding/src/ai_onboarding.rs                       |  34 
crates/ai_onboarding/src/ai_upsell_card.rs                      |  35 
crates/ai_onboarding/src/edit_prediction_onboarding_content.rs  |  10 
crates/ai_onboarding/src/plan_definitions.rs                    |  74 
crates/assistant_text_thread/src/assistant_text_thread_tests.rs |   9 
crates/assistant_text_thread/src/text_thread.rs                 |  39 
crates/assistant_text_thread/src/text_thread_store.rs           |   7 
crates/client/src/test.rs                                       |  10 
crates/client/src/user.rs                                       |  51 
crates/cloud_api_types/src/cloud_api_types.rs                   |   7 
crates/cloud_llm_client/src/cloud_llm_client.rs                 |  73 -
crates/eval/src/instance.rs                                     |   6 
crates/git_ui/src/git_panel.rs                                  |  25 
crates/language_model/src/language_model.rs                     |  19 
crates/language_model/src/model/cloud_model.rs                  |  37 
crates/language_models/src/provider/cloud.rs                    | 135 -
crates/title_bar/src/title_bar.rs                               |  10 
27 files changed, 152 insertions(+), 1,164 deletions(-)

Detailed changes

crates/agent/src/tests/mod.rs 🔗

@@ -905,198 +905,6 @@ async fn test_tool_hallucination(cx: &mut TestAppContext) {
     assert_eq!(update.fields.status, Some(acp::ToolCallStatus::Failed));
 }
 
-#[gpui::test]
-async fn test_resume_after_tool_use_limit(cx: &mut TestAppContext) {
-    let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await;
-    let fake_model = model.as_fake();
-
-    let events = thread
-        .update(cx, |thread, cx| {
-            thread.add_tool(EchoTool);
-            thread.send(UserMessageId::new(), ["abc"], cx)
-        })
-        .unwrap();
-    cx.run_until_parked();
-    let tool_use = LanguageModelToolUse {
-        id: "tool_id_1".into(),
-        name: EchoTool::name().into(),
-        raw_input: "{}".into(),
-        input: serde_json::to_value(&EchoToolInput { text: "def".into() }).unwrap(),
-        is_input_complete: true,
-        thought_signature: None,
-    };
-    fake_model
-        .send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(tool_use.clone()));
-    fake_model.end_last_completion_stream();
-
-    cx.run_until_parked();
-    let completion = fake_model.pending_completions().pop().unwrap();
-    let tool_result = LanguageModelToolResult {
-        tool_use_id: "tool_id_1".into(),
-        tool_name: EchoTool::name().into(),
-        is_error: false,
-        content: "def".into(),
-        output: Some("def".into()),
-    };
-    assert_eq!(
-        completion.messages[1..],
-        vec![
-            LanguageModelRequestMessage {
-                role: Role::User,
-                content: vec!["abc".into()],
-                cache: false,
-                reasoning_details: None,
-            },
-            LanguageModelRequestMessage {
-                role: Role::Assistant,
-                content: vec![MessageContent::ToolUse(tool_use.clone())],
-                cache: false,
-                reasoning_details: None,
-            },
-            LanguageModelRequestMessage {
-                role: Role::User,
-                content: vec![MessageContent::ToolResult(tool_result.clone())],
-                cache: true,
-                reasoning_details: None,
-            },
-        ]
-    );
-
-    // Simulate reaching tool use limit.
-    fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUseLimitReached);
-    fake_model.end_last_completion_stream();
-    let last_event = events.collect::<Vec<_>>().await.pop().unwrap();
-    assert!(
-        last_event
-            .unwrap_err()
-            .is::<language_model::ToolUseLimitReachedError>()
-    );
-
-    let events = thread.update(cx, |thread, cx| thread.resume(cx)).unwrap();
-    cx.run_until_parked();
-    let completion = fake_model.pending_completions().pop().unwrap();
-    assert_eq!(
-        completion.messages[1..],
-        vec![
-            LanguageModelRequestMessage {
-                role: Role::User,
-                content: vec!["abc".into()],
-                cache: false,
-                reasoning_details: None,
-            },
-            LanguageModelRequestMessage {
-                role: Role::Assistant,
-                content: vec![MessageContent::ToolUse(tool_use)],
-                cache: false,
-                reasoning_details: None,
-            },
-            LanguageModelRequestMessage {
-                role: Role::User,
-                content: vec![MessageContent::ToolResult(tool_result)],
-                cache: false,
-                reasoning_details: None,
-            },
-            LanguageModelRequestMessage {
-                role: Role::User,
-                content: vec!["Continue where you left off".into()],
-                cache: true,
-                reasoning_details: None,
-            }
-        ]
-    );
-
-    fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::Text("Done".into()));
-    fake_model.end_last_completion_stream();
-    events.collect::<Vec<_>>().await;
-    thread.read_with(cx, |thread, _cx| {
-        assert_eq!(
-            thread.last_message().unwrap().to_markdown(),
-            indoc! {"
-                ## Assistant
-
-                Done
-            "}
-        )
-    });
-}
-
-#[gpui::test]
-async fn test_send_after_tool_use_limit(cx: &mut TestAppContext) {
-    let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await;
-    let fake_model = model.as_fake();
-
-    let events = thread
-        .update(cx, |thread, cx| {
-            thread.add_tool(EchoTool);
-            thread.send(UserMessageId::new(), ["abc"], cx)
-        })
-        .unwrap();
-    cx.run_until_parked();
-
-    let tool_use = LanguageModelToolUse {
-        id: "tool_id_1".into(),
-        name: EchoTool::name().into(),
-        raw_input: "{}".into(),
-        input: serde_json::to_value(&EchoToolInput { text: "def".into() }).unwrap(),
-        is_input_complete: true,
-        thought_signature: None,
-    };
-    let tool_result = LanguageModelToolResult {
-        tool_use_id: "tool_id_1".into(),
-        tool_name: EchoTool::name().into(),
-        is_error: false,
-        content: "def".into(),
-        output: Some("def".into()),
-    };
-    fake_model
-        .send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(tool_use.clone()));
-    fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUseLimitReached);
-    fake_model.end_last_completion_stream();
-    let last_event = events.collect::<Vec<_>>().await.pop().unwrap();
-    assert!(
-        last_event
-            .unwrap_err()
-            .is::<language_model::ToolUseLimitReachedError>()
-    );
-
-    thread
-        .update(cx, |thread, cx| {
-            thread.send(UserMessageId::new(), vec!["ghi"], cx)
-        })
-        .unwrap();
-    cx.run_until_parked();
-    let completion = fake_model.pending_completions().pop().unwrap();
-    assert_eq!(
-        completion.messages[1..],
-        vec![
-            LanguageModelRequestMessage {
-                role: Role::User,
-                content: vec!["abc".into()],
-                cache: false,
-                reasoning_details: None,
-            },
-            LanguageModelRequestMessage {
-                role: Role::Assistant,
-                content: vec![MessageContent::ToolUse(tool_use)],
-                cache: false,
-                reasoning_details: None,
-            },
-            LanguageModelRequestMessage {
-                role: Role::User,
-                content: vec![MessageContent::ToolResult(tool_result)],
-                cache: false,
-                reasoning_details: None,
-            },
-            LanguageModelRequestMessage {
-                role: Role::User,
-                content: vec!["ghi".into()],
-                cache: true,
-                reasoning_details: None,
-            }
-        ]
-    );
-}
-
 async fn expect_tool_call(events: &mut UnboundedReceiver<Result<ThreadEvent>>) -> acp::ToolCall {
     let event = events
         .next()

crates/agent/src/thread.rs 🔗

@@ -17,8 +17,8 @@ use agent_settings::{
 };
 use anyhow::{Context as _, Result, anyhow};
 use chrono::{DateTime, Utc};
-use client::{ModelRequestUsage, RequestUsage, UserStore};
-use cloud_llm_client::{CompletionIntent, Plan, UsageLimit};
+use client::UserStore;
+use cloud_llm_client::{CompletionIntent, Plan};
 use collections::{HashMap, HashSet, IndexMap};
 use fs::Fs;
 use futures::stream;
@@ -698,7 +698,6 @@ pub struct Thread {
     running_turn: Option<RunningTurn>,
     pending_message: Option<AgentMessage>,
     tools: BTreeMap<SharedString, Arc<dyn AnyAgentTool>>,
-    tool_use_limit_reached: bool,
     request_token_usage: HashMap<UserMessageId, language_model::TokenUsage>,
     #[allow(unused)]
     cumulative_token_usage: TokenUsage,
@@ -758,7 +757,6 @@ impl Thread {
             running_turn: None,
             pending_message: None,
             tools: BTreeMap::default(),
-            tool_use_limit_reached: false,
             request_token_usage: HashMap::default(),
             cumulative_token_usage: TokenUsage::default(),
             initial_project_snapshot: {
@@ -812,7 +810,6 @@ impl Thread {
             running_turn: None,
             pending_message: None,
             tools: parent_tools,
-            tool_use_limit_reached: false,
             request_token_usage: HashMap::default(),
             cumulative_token_usage: TokenUsage::default(),
             initial_project_snapshot: Task::ready(None).shared(),
@@ -1010,7 +1007,6 @@ impl Thread {
             running_turn: None,
             pending_message: None,
             tools: BTreeMap::default(),
-            tool_use_limit_reached: false,
             request_token_usage: db_thread.request_token_usage.clone(),
             cumulative_token_usage: db_thread.cumulative_token_usage,
             initial_project_snapshot: Task::ready(db_thread.initial_project_snapshot).shared(),
@@ -1456,7 +1452,6 @@ impl Thread {
         let (events_tx, events_rx) = mpsc::unbounded::<Result<ThreadEvent>>();
         let event_stream = ThreadEventStream(events_tx);
         let message_ix = self.messages.len().saturating_sub(1);
-        self.tool_use_limit_reached = false;
         self.clear_summary();
         let (cancellation_tx, mut cancellation_rx) = watch::channel(false);
         self.running_turn = Some(RunningTurn {
@@ -1656,8 +1651,6 @@ impl Thread {
                         }
                     }
                 })?;
-            } else if this.read_with(cx, |this, _| this.tool_use_limit_reached)? {
-                return Err(language_model::ToolUseLimitReachedError.into());
             } else if end_turn {
                 return Ok(());
             } else {
@@ -1680,7 +1673,6 @@ impl Thread {
         let auto_retry = if model.provider_id() == ZED_CLOUD_PROVIDER_ID {
             match plan {
                 Some(Plan::V2(_)) => true,
-                Some(Plan::V1(_)) => self.completion_mode == CompletionMode::Burn,
                 None => false,
             }
         } else {
@@ -1789,12 +1781,6 @@ impl Thread {
                 );
                 self.update_token_usage(usage, cx);
             }
-            UsageUpdated { amount, limit } => {
-                self.update_model_request_usage(amount, limit, cx);
-            }
-            ToolUseLimitReached => {
-                self.tool_use_limit_reached = true;
-            }
             Stop(StopReason::Refusal) => return Err(CompletionError::Refusal.into()),
             Stop(StopReason::MaxTokens) => return Err(CompletionError::MaxTokens.into()),
             Stop(StopReason::ToolUse | StopReason::EndTurn) => {}
@@ -1975,21 +1961,6 @@ impl Thread {
         }
     }
 
-    fn update_model_request_usage(&self, amount: usize, limit: UsageLimit, cx: &mut Context<Self>) {
-        self.project
-            .read(cx)
-            .user_store()
-            .update(cx, |user_store, cx| {
-                user_store.update_model_request_usage(
-                    ModelRequestUsage(RequestUsage {
-                        amount: amount as i32,
-                        limit,
-                    }),
-                    cx,
-                )
-            });
-    }
-
     pub fn title(&self) -> SharedString {
         self.title.clone().unwrap_or("New Thread".into())
     }
@@ -2038,13 +2009,6 @@ impl Thread {
                     let event = event.log_err()?;
                     let text = match event {
                         LanguageModelCompletionEvent::Text(text) => text,
-                        LanguageModelCompletionEvent::UsageUpdated { amount, limit } => {
-                            this.update(cx, |thread, cx| {
-                                thread.update_model_request_usage(amount, limit, cx);
-                            })
-                            .ok()?;
-                            continue;
-                        }
                         _ => continue,
                     };
 
@@ -2103,12 +2067,6 @@ impl Thread {
                     let event = event?;
                     let text = match event {
                         LanguageModelCompletionEvent::Text(text) => text,
-                        LanguageModelCompletionEvent::UsageUpdated { amount, limit } => {
-                            this.update(cx, |thread, cx| {
-                                thread.update_model_request_usage(amount, limit, cx);
-                            })?;
-                            continue;
-                        }
                         _ => continue,
                     };
 
@@ -2550,13 +2508,8 @@ impl Thread {
                     max_attempts: 3,
                 })
             }
-            Other(err)
-                if err.is::<language_model::PaymentRequiredError>()
-                    || err.is::<language_model::ModelRequestLimitReachedError>() =>
-            {
-                // Retrying won't help for Payment Required or Model Request Limit errors (where
-                // the user must upgrade to usage-based billing to get more requests, or else wait
-                // for a significant amount of time for the request limit to reset).
+            Other(err) if err.is::<language_model::PaymentRequiredError>() => {
+                // Retrying won't help for Payment Required errors.
                 None
             }
             // Conservatively assume that any other errors are non-retryable

crates/agent_ui/src/acp/thread_view.rs 🔗

@@ -14,7 +14,6 @@ use arrayvec::ArrayVec;
 use audio::{Audio, Sound};
 use buffer_diff::BufferDiff;
 use client::zed_urls;
-use cloud_llm_client::PlanV1;
 use collections::{HashMap, HashSet};
 use editor::scroll::Autoscroll;
 use editor::{
@@ -33,7 +32,6 @@ use gpui::{
     pulsating_between,
 };
 use language::Buffer;
-
 use language_model::LanguageModelRegistry;
 use markdown::{HeadingLevelStyles, Markdown, MarkdownElement, MarkdownStyle};
 use project::{AgentServerStore, ExternalAgentServerName, Project, ProjectEntryId};
@@ -50,9 +48,9 @@ use text::{Anchor, ToPoint as _};
 use theme::{AgentFontSize, ThemeSettings};
 use ui::{
     Callout, CommonAnimationExt, ContextMenu, ContextMenuEntry, CopyButton, DecoratedIcon,
-    DiffStat, Disclosure, Divider, DividerColor, ElevationIndex, IconDecoration,
-    IconDecorationKind, KeyBinding, PopoverMenu, PopoverMenuHandle, SpinnerLabel, TintColor,
-    Tooltip, WithScrollbar, prelude::*, right_click_menu,
+    DiffStat, Disclosure, Divider, DividerColor, IconDecoration, IconDecorationKind, KeyBinding,
+    PopoverMenu, PopoverMenuHandle, SpinnerLabel, TintColor, Tooltip, WithScrollbar, prelude::*,
+    right_click_menu,
 };
 use util::defer;
 use util::{ResultExt, size::format_file_size, time::duration_alt_display};
@@ -69,14 +67,12 @@ use crate::acp::entry_view_state::{EntryViewEvent, ViewEvent};
 use crate::acp::message_editor::{MessageEditor, MessageEditorEvent};
 use crate::agent_diff::AgentDiff;
 use crate::profile_selector::{ProfileProvider, ProfileSelector};
-
-use crate::ui::{AgentNotification, AgentNotificationEvent, BurnModeTooltip, UsageCallout};
+use crate::ui::{AgentNotification, AgentNotificationEvent, BurnModeTooltip};
 use crate::{
     AgentDiffPane, AgentPanel, AllowAlways, AllowOnce, AuthorizeToolCall, ClearMessageQueue,
-    ContinueThread, ContinueWithBurnMode, CycleFavoriteModels, CycleModeSelector,
-    ExpandMessageEditor, Follow, KeepAll, NewThread, OpenAgentDiff, OpenHistory, RejectAll,
-    RejectOnce, SelectPermissionGranularity, SendImmediately, SendNextQueuedMessage,
-    ToggleBurnMode, ToggleProfileSelector,
+    CycleFavoriteModels, CycleModeSelector, ExpandMessageEditor, Follow, KeepAll, NewThread,
+    OpenAgentDiff, OpenHistory, RejectAll, RejectOnce, SelectPermissionGranularity,
+    SendImmediately, SendNextQueuedMessage, ToggleBurnMode, ToggleProfileSelector,
 };
 
 const MAX_COLLAPSED_LINES: usize = 3;
@@ -92,8 +88,6 @@ enum ThreadFeedback {
 #[derive(Debug)]
 enum ThreadError {
     PaymentRequired,
-    ModelRequestLimitReached(cloud_llm_client::Plan),
-    ToolUseLimitReached,
     Refusal,
     AuthenticationRequired(SharedString),
     Other(SharedString),
@@ -103,12 +97,6 @@ impl ThreadError {
     fn from_err(error: anyhow::Error, agent: &Rc<dyn AgentServer>) -> Self {
         if error.is::<language_model::PaymentRequiredError>() {
             Self::PaymentRequired
-        } else if error.is::<language_model::ToolUseLimitReachedError>() {
-            Self::ToolUseLimitReached
-        } else if let Some(error) =
-            error.downcast_ref::<language_model::ModelRequestLimitReachedError>()
-        {
-            Self::ModelRequestLimitReached(error.plan)
         } else if let Some(acp_error) = error.downcast_ref::<acp::Error>()
             && acp_error.code == acp::ErrorCode::AuthRequired
         {
@@ -6007,12 +5995,6 @@ impl AcpThreadView {
         thread.read(cx).is_imported()
     }
 
-    fn is_using_zed_ai_models(&self, cx: &App) -> bool {
-        self.as_native_thread(cx)
-            .and_then(|thread| thread.read(cx).model())
-            .is_some_and(|model| model.provider_id() == language_model::ZED_CLOUD_PROVIDER_ID)
-    }
-
     fn supports_split_token_display(&self, cx: &App) -> bool {
         self.as_native_thread(cx)
             .and_then(|thread| thread.read(cx).model())
@@ -7385,29 +7367,6 @@ impl AcpThreadView {
         )
     }
 
-    fn render_usage_callout(&self, line_height: Pixels, cx: &mut Context<Self>) -> Option<Div> {
-        if !self.is_using_zed_ai_models(cx) {
-            return None;
-        }
-
-        let user_store = self.project.read(cx).user_store().read(cx);
-        if user_store.is_usage_based_billing_enabled() {
-            return None;
-        }
-
-        let plan = user_store
-            .plan()
-            .unwrap_or(cloud_llm_client::Plan::V1(PlanV1::ZedFree));
-
-        let usage = user_store.model_request_usage()?;
-
-        Some(
-            div()
-                .child(UsageCallout::new(plan, usage))
-                .line_height(line_height),
-        )
-    }
-
     fn agent_ui_font_size_changed(&mut self, _window: &mut Window, cx: &mut Context<Self>) {
         self.entry_view_state.update(cx, |entry_view_state, cx| {
             entry_view_state.agent_ui_font_size_changed(cx);
@@ -7531,10 +7490,6 @@ impl AcpThreadView {
                 self.render_authentication_required_error(error.clone(), cx)
             }
             ThreadError::PaymentRequired => self.render_payment_required_error(cx),
-            ThreadError::ModelRequestLimitReached(plan) => {
-                self.render_model_request_limit_reached_error(*plan, cx)
-            }
-            ThreadError::ToolUseLimitReached => self.render_tool_use_limit_reached_error(cx)?,
         };
 
         Some(div().child(content))
@@ -7724,95 +7679,6 @@ impl AcpThreadView {
             .dismiss_action(self.dismiss_error_button(cx))
     }
 
-    fn render_model_request_limit_reached_error(
-        &self,
-        plan: cloud_llm_client::Plan,
-        cx: &mut Context<Self>,
-    ) -> Callout {
-        let error_message = match plan {
-            cloud_llm_client::Plan::V1(PlanV1::ZedPro) => {
-                "Upgrade to usage-based billing for more prompts."
-            }
-            cloud_llm_client::Plan::V1(PlanV1::ZedProTrial)
-            | cloud_llm_client::Plan::V1(PlanV1::ZedFree) => "Upgrade to Zed Pro for more prompts.",
-            cloud_llm_client::Plan::V2(_) => "",
-        };
-
-        Callout::new()
-            .severity(Severity::Error)
-            .title("Model Prompt Limit Reached")
-            .icon(IconName::XCircle)
-            .description(error_message)
-            .actions_slot(
-                h_flex()
-                    .gap_0p5()
-                    .child(self.upgrade_button(cx))
-                    .child(self.create_copy_button(error_message)),
-            )
-            .dismiss_action(self.dismiss_error_button(cx))
-    }
-
-    fn render_tool_use_limit_reached_error(&self, cx: &mut Context<Self>) -> Option<Callout> {
-        let thread = self.as_native_thread(cx)?;
-        let supports_burn_mode = thread
-            .read(cx)
-            .model()
-            .is_some_and(|model| model.supports_burn_mode());
-
-        let focus_handle = self.focus_handle(cx);
-
-        Some(
-            Callout::new()
-                .icon(IconName::Info)
-                .title("Consecutive tool use limit reached.")
-                .actions_slot(
-                    h_flex()
-                        .gap_0p5()
-                        .when(supports_burn_mode, |this| {
-                            this.child(
-                                Button::new("continue-burn-mode", "Continue with Burn Mode")
-                                    .style(ButtonStyle::Filled)
-                                    .style(ButtonStyle::Tinted(ui::TintColor::Accent))
-                                    .layer(ElevationIndex::ModalSurface)
-                                    .label_size(LabelSize::Small)
-                                    .key_binding(
-                                        KeyBinding::for_action_in(
-                                            &ContinueWithBurnMode,
-                                            &focus_handle,
-                                            cx,
-                                        )
-                                        .map(|kb| kb.size(rems_from_px(10.))),
-                                    )
-                                    .tooltip(Tooltip::text(
-                                        "Enable Burn Mode for unlimited tool use.",
-                                    ))
-                                    .on_click({
-                                        cx.listener(move |this, _, _window, cx| {
-                                            thread.update(cx, |thread, cx| {
-                                                thread
-                                                    .set_completion_mode(CompletionMode::Burn, cx);
-                                            });
-                                            this.resume_chat(cx);
-                                        })
-                                    }),
-                            )
-                        })
-                        .child(
-                            Button::new("continue-conversation", "Continue")
-                                .layer(ElevationIndex::ModalSurface)
-                                .label_size(LabelSize::Small)
-                                .key_binding(
-                                    KeyBinding::for_action_in(&ContinueThread, &focus_handle, cx)
-                                        .map(|kb| kb.size(rems_from_px(10.))),
-                                )
-                                .on_click(cx.listener(|this, _, _window, cx| {
-                                    this.resume_chat(cx);
-                                })),
-                        ),
-                ),
-        )
-    }
-
     fn create_copy_button(&self, message: impl Into<String>) -> impl IntoElement {
         let message = message.into();
 
@@ -8020,7 +7886,6 @@ impl AcpThreadView {
 impl Render for AcpThreadView {
     fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
         let has_messages = self.list_state.item_count() > 0;
-        let line_height = TextSize::Small.rems(cx).to_pixels(window.rem_size()) * 1.5;
 
         v_flex()
             .size_full()
@@ -8207,12 +8072,8 @@ impl Render for AcpThreadView {
                 |this, version| this.child(self.render_new_version_callout(&version, cx)),
             )
             .children(
-                if let Some(usage_callout) = self.render_usage_callout(line_height, cx) {
-                    Some(usage_callout.into_any_element())
-                } else {
-                    self.render_token_limit_callout(cx)
-                        .map(|token_limit_callout| token_limit_callout.into_any_element())
-                },
+                self.render_token_limit_callout(cx)
+                    .map(|token_limit_callout| token_limit_callout.into_any_element()),
             )
             .child(self.render_message_editor(window, cx))
     }

crates/agent_ui/src/agent_configuration.rs 🔗

@@ -9,7 +9,7 @@ use std::{ops::Range, sync::Arc};
 use agent::ContextServerRegistry;
 use anyhow::Result;
 use client::zed_urls;
-use cloud_llm_client::{Plan, PlanV1, PlanV2};
+use cloud_llm_client::{Plan, PlanV2};
 use collections::HashMap;
 use context_server::ContextServerId;
 use editor::{Editor, MultiBufferOffset, SelectionEffects, scroll::Autoscroll};
@@ -497,15 +497,9 @@ impl AgentConfiguration {
                 .blend(cx.theme().colors().text_accent.opacity(0.2));
 
             let (plan_name, label_color, bg_color) = match plan {
-                Plan::V1(PlanV1::ZedFree) | Plan::V2(PlanV2::ZedFree) => {
-                    ("Free", Color::Default, free_chip_bg)
-                }
-                Plan::V1(PlanV1::ZedProTrial) | Plan::V2(PlanV2::ZedProTrial) => {
-                    ("Pro Trial", Color::Accent, pro_chip_bg)
-                }
-                Plan::V1(PlanV1::ZedPro) | Plan::V2(PlanV2::ZedPro) => {
-                    ("Pro", Color::Accent, pro_chip_bg)
-                }
+                Plan::V2(PlanV2::ZedFree) => ("Free", Color::Default, free_chip_bg),
+                Plan::V2(PlanV2::ZedProTrial) => ("Pro Trial", Color::Accent, pro_chip_bg),
+                Plan::V2(PlanV2::ZedPro) => ("Pro", Color::Accent, pro_chip_bg),
             };
 
             Chip::new(plan_name.to_string())

crates/agent_ui/src/agent_panel.rs 🔗

@@ -38,8 +38,8 @@ use ai_onboarding::AgentPanelOnboarding;
 use anyhow::{Result, anyhow};
 use assistant_slash_command::SlashCommandWorkingSet;
 use assistant_text_thread::{TextThread, TextThreadEvent, TextThreadSummary};
-use client::{UserStore, zed_urls};
-use cloud_llm_client::{Plan, PlanV1, PlanV2, UsageLimit};
+use client::UserStore;
+use cloud_llm_client::{Plan, PlanV2};
 use editor::{Anchor, AnchorRangeExt as _, Editor, EditorEvent, MultiBuffer};
 use extension::ExtensionEvents;
 use extension_host::ExtensionStore;
@@ -58,8 +58,8 @@ use search::{BufferSearchBar, buffer_search};
 use settings::{Settings, update_settings_file};
 use theme::ThemeSettings;
 use ui::{
-    Callout, ContextMenu, ContextMenuEntry, KeyBinding, PopoverMenu, PopoverMenuHandle,
-    ProgressBar, Tab, Tooltip, prelude::*, utils::WithRemSize,
+    Callout, ContextMenu, ContextMenuEntry, KeyBinding, PopoverMenu, PopoverMenuHandle, Tab,
+    Tooltip, prelude::*, utils::WithRemSize,
 };
 use util::ResultExt as _;
 use workspace::{
@@ -1848,10 +1848,6 @@ impl AgentPanel {
         window: &mut Window,
         cx: &mut Context<Self>,
     ) -> impl IntoElement {
-        let user_store = self.user_store.read(cx);
-        let usage = user_store.model_request_usage();
-        let account_url = zed_urls::account_url(cx);
-
         let focus_handle = self.focus_handle(cx);
 
         let full_screen_label = if self.is_zoomed(window, cx) {
@@ -1914,43 +1910,6 @@ impl AgentPanel {
                     Some(ContextMenu::build(window, cx, |mut menu, _window, _| {
                         menu = menu.context(focus_handle.clone());
 
-                        if let Some(usage) = usage {
-                            menu = menu
-                                .header_with_link("Prompt Usage", "Manage", account_url.clone())
-                                .custom_entry(
-                                    move |_window, cx| {
-                                        let used_percentage = match usage.limit {
-                                            UsageLimit::Limited(limit) => {
-                                                Some((usage.amount as f32 / limit as f32) * 100.)
-                                            }
-                                            UsageLimit::Unlimited => None,
-                                        };
-
-                                        h_flex()
-                                            .flex_1()
-                                            .gap_1p5()
-                                            .children(used_percentage.map(|percent| {
-                                                ProgressBar::new("usage", percent, 100., cx)
-                                            }))
-                                            .child(
-                                                Label::new(match usage.limit {
-                                                    UsageLimit::Limited(limit) => {
-                                                        format!("{} / {limit}", usage.amount)
-                                                    }
-                                                    UsageLimit::Unlimited => {
-                                                        format!("{} / ∞", usage.amount)
-                                                    }
-                                                })
-                                                .size(LabelSize::Small)
-                                                .color(Color::Muted),
-                                            )
-                                            .into_any_element()
-                                    },
-                                    move |_, cx| cx.open_url(&zed_urls::account_url(cx)),
-                                )
-                                .separator()
-                        }
-
                         if thread_with_messages | text_thread_with_messages {
                             menu = menu.header("Current Thread");
 
@@ -2484,10 +2443,7 @@ impl AgentPanel {
         let plan = self.user_store.read(cx).plan();
         let has_previous_trial = self.user_store.read(cx).trial_started_at().is_some();
 
-        matches!(
-            plan,
-            Some(Plan::V1(PlanV1::ZedFree) | Plan::V2(PlanV2::ZedFree))
-        ) && has_previous_trial
+        plan.is_some_and(|plan| plan == Plan::V2(PlanV2::ZedFree)) && has_previous_trial
     }
 
     fn should_render_onboarding(&self, cx: &mut Context<Self>) -> bool {
@@ -2499,7 +2455,7 @@ impl AgentPanel {
 
         if user_store
             .plan()
-            .is_some_and(|plan| matches!(plan, Plan::V1(PlanV1::ZedPro) | Plan::V2(PlanV2::ZedPro)))
+            .is_some_and(|plan| plan == Plan::V2(PlanV2::ZedPro))
             && user_store
                 .subscription_period()
                 .and_then(|period| period.0.checked_add_days(chrono::Days::new(1)))
@@ -2561,8 +2517,6 @@ impl AgentPanel {
             return None;
         }
 
-        let plan = self.user_store.read(cx).plan()?;
-
         Some(
             v_flex()
                 .absolute()
@@ -2571,18 +2525,15 @@ impl AgentPanel {
                 .bg(cx.theme().colors().panel_background)
                 .opacity(0.85)
                 .block_mouse_except_scroll()
-                .child(EndTrialUpsell::new(
-                    plan,
-                    Arc::new({
-                        let this = cx.entity();
-                        move |_, cx| {
-                            this.update(cx, |_this, cx| {
-                                TrialEndUpsell::set_dismissed(true, cx);
-                                cx.notify();
-                            });
-                        }
-                    }),
-                )),
+                .child(EndTrialUpsell::new(Arc::new({
+                    let this = cx.entity();
+                    move |_, cx| {
+                        this.update(cx, |_this, cx| {
+                            TrialEndUpsell::set_dismissed(true, cx);
+                            cx.notify();
+                        });
+                    }
+                }))),
         )
     }
 

crates/agent_ui/src/text_thread_editor.rs 🔗

@@ -3548,7 +3548,6 @@ mod tests {
         cx.new(|cx| {
             let mut text_thread = TextThread::local(
                 registry,
-                None,
                 prompt_builder.clone(),
                 Arc::new(SlashCommandWorkingSet::default()),
                 cx,

crates/agent_ui/src/ui.rs 🔗

@@ -7,7 +7,6 @@ mod hold_for_default;
 mod mention_crease;
 mod model_selector_components;
 mod onboarding_modal;
-mod usage_callout;
 
 pub use acp_onboarding_modal::*;
 pub use agent_notification::*;
@@ -18,4 +17,3 @@ pub use hold_for_default::*;
 pub use mention_crease::*;
 pub use model_selector_components::*;
 pub use onboarding_modal::*;
-pub use usage_callout::*;

crates/agent_ui/src/ui/end_trial_upsell.rs 🔗

@@ -2,22 +2,17 @@ use std::sync::Arc;
 
 use ai_onboarding::{AgentPanelOnboardingCard, PlanDefinitions};
 use client::zed_urls;
-use cloud_llm_client::{Plan, PlanV2};
 use gpui::{AnyElement, App, IntoElement, RenderOnce, Window};
 use ui::{Divider, Tooltip, prelude::*};
 
 #[derive(IntoElement, RegisterComponent)]
 pub struct EndTrialUpsell {
-    plan: Plan,
     dismiss_upsell: Arc<dyn Fn(&mut Window, &mut App)>,
 }
 
 impl EndTrialUpsell {
-    pub fn new(plan: Plan, dismiss_upsell: Arc<dyn Fn(&mut Window, &mut App)>) -> Self {
-        Self {
-            plan,
-            dismiss_upsell,
-        }
+    pub fn new(dismiss_upsell: Arc<dyn Fn(&mut Window, &mut App)>) -> Self {
+        Self { dismiss_upsell }
     }
 }
 
@@ -36,7 +31,7 @@ impl RenderOnce for EndTrialUpsell {
                     )
                     .child(Divider::horizontal()),
             )
-            .child(PlanDefinitions.pro_plan(self.plan.is_v2(), false))
+            .child(PlanDefinitions.pro_plan())
             .child(
                 Button::new("cta-button", "Upgrade to Zed Pro")
                     .full_width()
@@ -67,7 +62,7 @@ impl RenderOnce for EndTrialUpsell {
                     )
                     .child(Divider::horizontal()),
             )
-            .child(PlanDefinitions.free_plan(self.plan.is_v2()));
+            .child(PlanDefinitions.free_plan());
 
         AgentPanelOnboardingCard::new()
             .child(Headline::new("Your Zed Pro Trial has expired"))
@@ -112,7 +107,6 @@ impl Component for EndTrialUpsell {
         Some(
             v_flex()
                 .child(EndTrialUpsell {
-                    plan: Plan::V2(PlanV2::ZedFree),
                     dismiss_upsell: Arc::new(|_, _| {}),
                 })
                 .into_any_element(),

crates/agent_ui/src/ui/usage_callout.rs 🔗

@@ -1,200 +0,0 @@
-use client::{ModelRequestUsage, RequestUsage, zed_urls};
-use cloud_llm_client::{Plan, PlanV1, PlanV2, UsageLimit};
-use component::{empty_example, example_group_with_title, single_example};
-use gpui::{AnyElement, App, IntoElement, RenderOnce, Window};
-use ui::{Callout, prelude::*};
-
-#[derive(IntoElement, RegisterComponent)]
-pub struct UsageCallout {
-    plan: Plan,
-    usage: ModelRequestUsage,
-}
-
-impl UsageCallout {
-    pub fn new(plan: Plan, usage: ModelRequestUsage) -> Self {
-        Self { plan, usage }
-    }
-}
-
-impl RenderOnce for UsageCallout {
-    fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement {
-        let (is_limit_reached, is_approaching_limit, remaining) = match self.usage.limit {
-            UsageLimit::Limited(limit) => {
-                let percentage = self.usage.amount as f32 / limit as f32;
-                let is_limit_reached = percentage >= 1.0;
-                let is_near_limit = percentage >= 0.9 && percentage < 1.0;
-                (
-                    is_limit_reached,
-                    is_near_limit,
-                    limit.saturating_sub(self.usage.amount),
-                )
-            }
-            UsageLimit::Unlimited => (false, false, 0),
-        };
-
-        if !is_limit_reached && !is_approaching_limit {
-            return div().into_any_element();
-        }
-
-        let (title, message, button_text, url) = if is_limit_reached {
-            match self.plan {
-                Plan::V1(PlanV1::ZedFree) | Plan::V2(PlanV2::ZedFree) => (
-                    "Out of free prompts",
-                    "Upgrade to continue, wait for the next reset, or switch to API key."
-                        .to_string(),
-                    "Upgrade",
-                    zed_urls::account_url(cx),
-                ),
-                Plan::V1(PlanV1::ZedProTrial) | Plan::V2(PlanV2::ZedProTrial) => (
-                    "Out of trial prompts",
-                    "Upgrade to Zed Pro to continue, or switch to API key.".to_string(),
-                    "Upgrade",
-                    zed_urls::account_url(cx),
-                ),
-                Plan::V1(PlanV1::ZedPro) | Plan::V2(PlanV2::ZedPro) => (
-                    "Out of included prompts",
-                    "Enable usage-based billing to continue.".to_string(),
-                    "Manage",
-                    zed_urls::account_url(cx),
-                ),
-            }
-        } else {
-            match self.plan {
-                Plan::V1(PlanV1::ZedFree) => (
-                    "Reaching free plan limit soon",
-                    format!(
-                        "{remaining} remaining - Upgrade to increase limit, or switch providers",
-                    ),
-                    "Upgrade",
-                    zed_urls::account_url(cx),
-                ),
-                Plan::V1(PlanV1::ZedProTrial) => (
-                    "Reaching trial limit soon",
-                    format!(
-                        "{remaining} remaining - Upgrade to increase limit, or switch providers",
-                    ),
-                    "Upgrade",
-                    zed_urls::account_url(cx),
-                ),
-                Plan::V1(PlanV1::ZedPro) | Plan::V2(_) => return div().into_any_element(),
-            }
-        };
-
-        let (icon, severity) = if is_limit_reached {
-            (IconName::Close, Severity::Error)
-        } else {
-            (IconName::Warning, Severity::Warning)
-        };
-
-        Callout::new()
-            .icon(icon)
-            .severity(severity)
-            .icon(icon)
-            .title(title)
-            .description(message)
-            .actions_slot(
-                Button::new("upgrade", button_text)
-                    .label_size(LabelSize::Small)
-                    .on_click(move |_, _, cx| {
-                        cx.open_url(&url);
-                    }),
-            )
-            .into_any_element()
-    }
-}
-
-impl Component for UsageCallout {
-    fn scope() -> ComponentScope {
-        ComponentScope::Agent
-    }
-
-    fn sort_name() -> &'static str {
-        "AgentUsageCallout"
-    }
-
-    fn preview(_window: &mut Window, _cx: &mut App) -> Option<AnyElement> {
-        let free_examples = example_group_with_title(
-            "Free Plan",
-            vec![
-                single_example(
-                    "Approaching limit (90%)",
-                    UsageCallout::new(
-                        Plan::V1(PlanV1::ZedFree),
-                        ModelRequestUsage(RequestUsage {
-                            limit: UsageLimit::Limited(50),
-                            amount: 45, // 90% of limit
-                        }),
-                    )
-                    .into_any_element(),
-                ),
-                single_example(
-                    "Limit reached (100%)",
-                    UsageCallout::new(
-                        Plan::V1(PlanV1::ZedFree),
-                        ModelRequestUsage(RequestUsage {
-                            limit: UsageLimit::Limited(50),
-                            amount: 50, // 100% of limit
-                        }),
-                    )
-                    .into_any_element(),
-                ),
-            ],
-        );
-
-        let trial_examples = example_group_with_title(
-            "Zed Pro Trial",
-            vec![
-                single_example(
-                    "Approaching limit (90%)",
-                    UsageCallout::new(
-                        Plan::V1(PlanV1::ZedProTrial),
-                        ModelRequestUsage(RequestUsage {
-                            limit: UsageLimit::Limited(150),
-                            amount: 135, // 90% of limit
-                        }),
-                    )
-                    .into_any_element(),
-                ),
-                single_example(
-                    "Limit reached (100%)",
-                    UsageCallout::new(
-                        Plan::V1(PlanV1::ZedProTrial),
-                        ModelRequestUsage(RequestUsage {
-                            limit: UsageLimit::Limited(150),
-                            amount: 150, // 100% of limit
-                        }),
-                    )
-                    .into_any_element(),
-                ),
-            ],
-        );
-
-        let pro_examples = example_group_with_title(
-            "Zed Pro",
-            vec![
-                single_example(
-                    "Limit reached (100%)",
-                    UsageCallout::new(
-                        Plan::V1(PlanV1::ZedPro),
-                        ModelRequestUsage(RequestUsage {
-                            limit: UsageLimit::Limited(500),
-                            amount: 500, // 100% of limit
-                        }),
-                    )
-                    .into_any_element(),
-                ),
-                empty_example("Unlimited plan (no callout shown)"),
-            ],
-        );
-
-        Some(
-            v_flex()
-                .p_4()
-                .gap_4()
-                .child(free_examples)
-                .child(trial_examples)
-                .child(pro_examples)
-                .into_any_element(),
-        )
-    }
-}

crates/ai_onboarding/src/agent_panel_onboarding_content.rs 🔗

@@ -1,7 +1,7 @@
 use std::sync::Arc;
 
 use client::{Client, UserStore};
-use cloud_llm_client::{Plan, PlanV1, PlanV2};
+use cloud_llm_client::{Plan, PlanV2};
 use gpui::{Entity, IntoElement, ParentElement};
 use language_model::{LanguageModelRegistry, ZED_CLOUD_PROVIDER_ID};
 use ui::prelude::*;
@@ -54,15 +54,16 @@ impl AgentPanelOnboarding {
 
 impl Render for AgentPanelOnboarding {
     fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
-        let enrolled_in_trial = self.user_store.read(cx).plan().is_some_and(|plan| {
-            matches!(
-                plan,
-                Plan::V1(PlanV1::ZedProTrial) | Plan::V2(PlanV2::ZedProTrial)
-            )
-        });
-        let is_pro_user = self.user_store.read(cx).plan().is_some_and(|plan| {
-            matches!(plan, Plan::V1(PlanV1::ZedPro) | Plan::V2(PlanV2::ZedPro))
-        });
+        let enrolled_in_trial = self
+            .user_store
+            .read(cx)
+            .plan()
+            .is_some_and(|plan| plan == Plan::V2(PlanV2::ZedProTrial));
+        let is_pro_user = self
+            .user_store
+            .read(cx)
+            .plan()
+            .is_some_and(|plan| plan == Plan::V2(PlanV2::ZedPro));
 
         AgentPanelOnboardingCard::new()
             .child(

crates/ai_onboarding/src/ai_onboarding.rs 🔗

@@ -10,7 +10,7 @@ pub use agent_api_keys_onboarding::{ApiKeysWithProviders, ApiKeysWithoutProvider
 pub use agent_panel_onboarding_card::AgentPanelOnboardingCard;
 pub use agent_panel_onboarding_content::AgentPanelOnboarding;
 pub use ai_upsell_card::AiUpsellCard;
-use cloud_llm_client::{Plan, PlanV1, PlanV2};
+use cloud_llm_client::{Plan, PlanV2};
 pub use edit_prediction_onboarding_content::EditPredictionOnboarding;
 pub use plan_definitions::PlanDefinitions;
 pub use young_account_banner::YoungAccountBanner;
@@ -117,7 +117,7 @@ impl ZedAiOnboarding {
                     .color(Color::Muted)
                     .mb_2(),
             )
-            .child(PlanDefinitions.pro_plan(true, false))
+            .child(PlanDefinitions.pro_plan())
             .child(
                 Button::new("sign_in", "Try Zed Pro for Free")
                     .disabled(signing_in)
@@ -135,7 +135,7 @@ impl ZedAiOnboarding {
             .into_any_element()
     }
 
-    fn render_free_plan_state(&self, is_v2: bool, cx: &mut App) -> AnyElement {
+    fn render_free_plan_state(&self, cx: &mut App) -> AnyElement {
         if self.account_too_young {
             v_flex()
                 .relative()
@@ -158,7 +158,7 @@ impl ZedAiOnboarding {
                                 )
                                 .child(Divider::horizontal()),
                         )
-                        .child(PlanDefinitions.pro_plan(is_v2, true))
+                        .child(PlanDefinitions.pro_plan())
                         .child(
                             Button::new("pro", "Get Started")
                                 .full_width()
@@ -201,7 +201,7 @@ impl ZedAiOnboarding {
                                 )
                                 .child(Divider::horizontal()),
                         )
-                        .child(PlanDefinitions.free_plan(is_v2)),
+                        .child(PlanDefinitions.free_plan()),
                 )
                 .children(self.render_dismiss_button())
                 .child(
@@ -219,7 +219,7 @@ impl ZedAiOnboarding {
                                 )
                                 .child(Divider::horizontal()),
                         )
-                        .child(PlanDefinitions.pro_trial(is_v2, true))
+                        .child(PlanDefinitions.pro_trial(true))
                         .child(
                             Button::new("pro", "Start Free Trial")
                                 .full_width()
@@ -237,7 +237,7 @@ impl ZedAiOnboarding {
         }
     }
 
-    fn render_trial_state(&self, is_v2: bool, _cx: &mut App) -> AnyElement {
+    fn render_trial_state(&self, _cx: &mut App) -> AnyElement {
         v_flex()
             .relative()
             .gap_1()
@@ -247,12 +247,12 @@ impl ZedAiOnboarding {
                     .color(Color::Muted)
                     .mb_2(),
             )
-            .child(PlanDefinitions.pro_trial(is_v2, false))
+            .child(PlanDefinitions.pro_trial(false))
             .children(self.render_dismiss_button())
             .into_any_element()
     }
 
-    fn render_pro_plan_state(&self, is_v2: bool, _cx: &mut App) -> AnyElement {
+    fn render_pro_plan_state(&self, _cx: &mut App) -> AnyElement {
         v_flex()
             .gap_1()
             .child(Headline::new("Welcome to Zed Pro"))
@@ -261,7 +261,7 @@ impl ZedAiOnboarding {
                     .color(Color::Muted)
                     .mb_2(),
             )
-            .child(PlanDefinitions.pro_plan(is_v2, false))
+            .child(PlanDefinitions.pro_plan())
             .children(self.render_dismiss_button())
             .into_any_element()
     }
@@ -271,16 +271,10 @@ impl RenderOnce for ZedAiOnboarding {
     fn render(self, _window: &mut ui::Window, cx: &mut App) -> impl IntoElement {
         if matches!(self.sign_in_status, SignInStatus::SignedIn) {
             match self.plan {
-                None => self.render_free_plan_state(true, cx),
-                Some(plan @ (Plan::V1(PlanV1::ZedFree) | Plan::V2(PlanV2::ZedFree))) => {
-                    self.render_free_plan_state(plan.is_v2(), cx)
-                }
-                Some(plan @ (Plan::V1(PlanV1::ZedProTrial) | Plan::V2(PlanV2::ZedProTrial))) => {
-                    self.render_trial_state(plan.is_v2(), cx)
-                }
-                Some(plan @ (Plan::V1(PlanV1::ZedPro) | Plan::V2(PlanV2::ZedPro))) => {
-                    self.render_pro_plan_state(plan.is_v2(), cx)
-                }
+                None => self.render_free_plan_state(cx),
+                Some(Plan::V2(PlanV2::ZedFree)) => self.render_free_plan_state(cx),
+                Some(Plan::V2(PlanV2::ZedProTrial)) => self.render_trial_state(cx),
+                Some(Plan::V2(PlanV2::ZedPro)) => self.render_pro_plan_state(cx),
             }
         } else {
             self.render_sign_in_disclaimer(cx)

crates/ai_onboarding/src/ai_upsell_card.rs 🔗

@@ -1,7 +1,7 @@
 use std::sync::Arc;
 
 use client::{Client, UserStore, zed_urls};
-use cloud_llm_client::{Plan, PlanV1, PlanV2};
+use cloud_llm_client::{Plan, PlanV2};
 use gpui::{AnyElement, App, Entity, IntoElement, RenderOnce, Window};
 use ui::{CommonAnimationExt, Divider, Vector, VectorName, prelude::*};
 
@@ -49,8 +49,6 @@ impl AiUpsellCard {
 
 impl RenderOnce for AiUpsellCard {
     fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement {
-        let is_v2_plan = self.user_plan.map_or(true, |plan| plan.is_v2());
-
         let pro_section = v_flex()
             .flex_grow()
             .w_full()
@@ -66,7 +64,7 @@ impl RenderOnce for AiUpsellCard {
                     )
                     .child(Divider::horizontal()),
             )
-            .child(PlanDefinitions.pro_plan(is_v2_plan, false));
+            .child(PlanDefinitions.pro_plan());
 
         let free_section = v_flex()
             .flex_grow()
@@ -83,7 +81,7 @@ impl RenderOnce for AiUpsellCard {
                     )
                     .child(Divider::horizontal()),
             )
-            .child(PlanDefinitions.free_plan(is_v2_plan));
+            .child(PlanDefinitions.free_plan());
 
         let grid_bg = h_flex()
             .absolute()
@@ -168,7 +166,7 @@ impl RenderOnce for AiUpsellCard {
 
         match self.sign_in_status {
             SignInStatus::SignedIn => match self.user_plan {
-                None | Some(Plan::V1(PlanV1::ZedFree) | Plan::V2(PlanV2::ZedFree)) => card
+                None | Some(Plan::V2(PlanV2::ZedFree)) => card
                     .child(Label::new("Try Zed AI").size(LabelSize::Large))
                     .map(|this| {
                         if self.account_too_young {
@@ -187,7 +185,7 @@ impl RenderOnce for AiUpsellCard {
                                             )
                                             .child(Divider::horizontal()),
                                     )
-                                    .child(PlanDefinitions.pro_plan(is_v2_plan, true))
+                                    .child(PlanDefinitions.pro_plan())
                                     .child(
                                         Button::new("pro", "Get Started")
                                             .full_width()
@@ -234,17 +232,16 @@ impl RenderOnce for AiUpsellCard {
                             )
                         }
                     }),
-                Some(plan @ (Plan::V1(PlanV1::ZedProTrial) | Plan::V2(PlanV2::ZedProTrial))) => {
-                    card.child(pro_trial_stamp)
-                        .child(Label::new("You're in the Zed Pro Trial").size(LabelSize::Large))
-                        .child(
-                            Label::new("Here's what you get for the next 14 days:")
-                                .color(Color::Muted)
-                                .mb_2(),
-                        )
-                        .child(PlanDefinitions.pro_trial(plan.is_v2(), false))
-                }
-                Some(plan @ (Plan::V1(PlanV1::ZedPro) | Plan::V2(PlanV2::ZedPro))) => card
+                Some(Plan::V2(PlanV2::ZedProTrial)) => card
+                    .child(pro_trial_stamp)
+                    .child(Label::new("You're in the Zed Pro Trial").size(LabelSize::Large))
+                    .child(
+                        Label::new("Here's what you get for the next 14 days:")
+                            .color(Color::Muted)
+                            .mb_2(),
+                    )
+                    .child(PlanDefinitions.pro_trial(false)),
+                Some(Plan::V2(PlanV2::ZedPro)) => card
                     .child(certified_user_stamp)
                     .child(Label::new("You're in the Zed Pro plan").size(LabelSize::Large))
                     .child(
@@ -252,7 +249,7 @@ impl RenderOnce for AiUpsellCard {
                             .color(Color::Muted)
                             .mb_2(),
                     )
-                    .child(PlanDefinitions.pro_plan(plan.is_v2(), false)),
+                    .child(PlanDefinitions.pro_plan()),
             },
             // Signed Out State
             _ => card

crates/ai_onboarding/src/edit_prediction_onboarding_content.rs 🔗

@@ -1,7 +1,7 @@
 use std::sync::Arc;
 
 use client::{Client, UserStore};
-use cloud_llm_client::{Plan, PlanV1, PlanV2};
+use cloud_llm_client::{Plan, PlanV2};
 use gpui::{Entity, IntoElement, ParentElement};
 use ui::prelude::*;
 
@@ -36,9 +36,11 @@ impl EditPredictionOnboarding {
 
 impl Render for EditPredictionOnboarding {
     fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
-        let is_free_plan = self.user_store.read(cx).plan().is_some_and(|plan| {
-            matches!(plan, Plan::V1(PlanV1::ZedFree) | Plan::V2(PlanV2::ZedFree))
-        });
+        let is_free_plan = self
+            .user_store
+            .read(cx)
+            .plan()
+            .is_some_and(|plan| plan == Plan::V2(PlanV2::ZedFree));
 
         let github_copilot = v_flex()
             .gap_1()

crates/ai_onboarding/src/plan_definitions.rs 🔗

@@ -7,62 +7,32 @@ pub struct PlanDefinitions;
 impl PlanDefinitions {
     pub const AI_DESCRIPTION: &'static str = "Zed offers a complete agentic experience, with robust editing and reviewing features to collaborate with AI.";
 
-    pub fn free_plan(&self, is_v2: bool) -> impl IntoElement {
-        if is_v2 {
-            List::new()
-                .child(ListBulletItem::new("2,000 accepted edit predictions"))
-                .child(ListBulletItem::new(
-                    "Unlimited prompts with your AI API keys",
-                ))
-                .child(ListBulletItem::new(
-                    "Unlimited use of external agents like Claude Code",
-                ))
-        } else {
-            List::new()
-                .child(ListBulletItem::new("50 prompts with Claude models"))
-                .child(ListBulletItem::new("2,000 accepted edit predictions"))
-        }
+    pub fn free_plan(&self) -> impl IntoElement {
+        List::new()
+            .child(ListBulletItem::new("2,000 accepted edit predictions"))
+            .child(ListBulletItem::new(
+                "Unlimited prompts with your AI API keys",
+            ))
+            .child(ListBulletItem::new(
+                "Unlimited use of external agents like Claude Code",
+            ))
     }
 
-    pub fn pro_trial(&self, is_v2: bool, period: bool) -> impl IntoElement {
-        if is_v2 {
-            List::new()
-                .child(ListBulletItem::new("Unlimited edit predictions"))
-                .child(ListBulletItem::new("$20 of tokens"))
-                .when(period, |this| {
-                    this.child(ListBulletItem::new(
-                        "Try it out for 14 days, no credit card required",
-                    ))
-                })
-        } else {
-            List::new()
-                .child(ListBulletItem::new("150 prompts with Claude models"))
-                .child(ListBulletItem::new(
-                    "Unlimited edit predictions with Zeta, our open-source model",
+    pub fn pro_trial(&self, period: bool) -> impl IntoElement {
+        List::new()
+            .child(ListBulletItem::new("Unlimited edit predictions"))
+            .child(ListBulletItem::new("$20 of tokens"))
+            .when(period, |this| {
+                this.child(ListBulletItem::new(
+                    "Try it out for 14 days, no credit card required",
                 ))
-                .when(period, |this| {
-                    this.child(ListBulletItem::new(
-                        "Try it out for 14 days, no credit card required",
-                    ))
-                })
-        }
+            })
     }
 
-    pub fn pro_plan(&self, is_v2: bool, price: bool) -> impl IntoElement {
-        if is_v2 {
-            List::new()
-                .child(ListBulletItem::new("Unlimited edit predictions"))
-                .child(ListBulletItem::new("$5 of tokens"))
-                .child(ListBulletItem::new("Usage-based billing beyond $5"))
-        } else {
-            List::new()
-                .child(ListBulletItem::new("500 prompts with Claude models"))
-                .child(ListBulletItem::new(
-                    "Unlimited edit predictions with Zeta, our open-source model",
-                ))
-                .when(price, |this| {
-                    this.child(ListBulletItem::new("$20 USD per month"))
-                })
-        }
+    pub fn pro_plan(&self) -> impl IntoElement {
+        List::new()
+            .child(ListBulletItem::new("Unlimited edit predictions"))
+            .child(ListBulletItem::new("$5 of tokens"))
+            .child(ListBulletItem::new("Usage-based billing beyond $5"))
     }
 }

crates/assistant_text_thread/src/assistant_text_thread_tests.rs 🔗

@@ -49,7 +49,6 @@ fn test_inserting_and_removing_messages(cx: &mut App) {
     let text_thread = cx.new(|cx| {
         TextThread::local(
             registry,
-            None,
             prompt_builder.clone(),
             Arc::new(SlashCommandWorkingSet::default()),
             cx,
@@ -187,7 +186,6 @@ fn test_message_splitting(cx: &mut App) {
     let text_thread = cx.new(|cx| {
         TextThread::local(
             registry.clone(),
-            None,
             prompt_builder.clone(),
             Arc::new(SlashCommandWorkingSet::default()),
             cx,
@@ -291,7 +289,6 @@ fn test_messages_for_offsets(cx: &mut App) {
     let text_thread = cx.new(|cx| {
         TextThread::local(
             registry,
-            None,
             prompt_builder.clone(),
             Arc::new(SlashCommandWorkingSet::default()),
             cx,
@@ -401,7 +398,6 @@ async fn test_slash_commands(cx: &mut TestAppContext) {
     let text_thread = cx.new(|cx| {
         TextThread::local(
             registry.clone(),
-            None,
             prompt_builder.clone(),
             Arc::new(SlashCommandWorkingSet::default()),
             cx,
@@ -672,7 +668,6 @@ async fn test_serialization(cx: &mut TestAppContext) {
     let text_thread = cx.new(|cx| {
         TextThread::local(
             registry.clone(),
-            None,
             prompt_builder.clone(),
             Arc::new(SlashCommandWorkingSet::default()),
             cx,
@@ -718,7 +713,6 @@ async fn test_serialization(cx: &mut TestAppContext) {
             registry.clone(),
             prompt_builder.clone(),
             Arc::new(SlashCommandWorkingSet::default()),
-            None,
             cx,
         )
     });
@@ -773,7 +767,6 @@ async fn test_random_context_collaboration(cx: &mut TestAppContext, mut rng: Std
                 registry.clone(),
                 prompt_builder.clone(),
                 Arc::new(SlashCommandWorkingSet::default()),
-                None,
                 cx,
             )
         });
@@ -1033,7 +1026,6 @@ fn test_mark_cache_anchors(cx: &mut App) {
     let text_thread = cx.new(|cx| {
         TextThread::local(
             registry,
-            None,
             prompt_builder.clone(),
             Arc::new(SlashCommandWorkingSet::default()),
             cx,
@@ -1359,7 +1351,6 @@ fn setup_context_editor_with_fake_model(
     let context = cx.new(|cx| {
         TextThread::local(
             registry,
-            None,
             prompt_builder.clone(),
             Arc::new(SlashCommandWorkingSet::default()),
             cx,

crates/assistant_text_thread/src/text_thread.rs 🔗

@@ -5,16 +5,16 @@ use assistant_slash_command::{
     SlashCommandResult, SlashCommandWorkingSet,
 };
 use assistant_slash_commands::FileCommandMetadata;
-use client::{self, ModelRequestUsage, RequestUsage, proto};
+use client::{self, proto};
 use clock::ReplicaId;
-use cloud_llm_client::{CompletionIntent, UsageLimit};
+use cloud_llm_client::CompletionIntent;
 use collections::{HashMap, HashSet};
 use fs::{Fs, RenameOptions};
 
 use futures::{FutureExt, StreamExt, future::Shared};
 use gpui::{
     App, AppContext as _, Context, Entity, EventEmitter, RenderImage, SharedString, Subscription,
-    Task, WeakEntity,
+    Task,
 };
 use itertools::Itertools as _;
 use language::{AnchorRangeExt, Bias, Buffer, LanguageRegistry, OffsetRangeExt, Point, ToOffset};
@@ -27,7 +27,6 @@ use language_model::{
 };
 use open_ai::Model as OpenAiModel;
 use paths::text_threads_dir;
-use project::Project;
 use prompt_store::PromptBuilder;
 use serde::{Deserialize, Serialize};
 use settings::Settings;
@@ -688,7 +687,6 @@ pub struct TextThread {
     path: Option<Arc<Path>>,
     _subscriptions: Vec<Subscription>,
     language_registry: Arc<LanguageRegistry>,
-    project: Option<WeakEntity<Project>>,
     prompt_builder: Arc<PromptBuilder>,
     completion_mode: agent_settings::CompletionMode,
 }
@@ -708,7 +706,6 @@ impl EventEmitter<TextThreadEvent> for TextThread {}
 impl TextThread {
     pub fn local(
         language_registry: Arc<LanguageRegistry>,
-        project: Option<WeakEntity<Project>>,
         prompt_builder: Arc<PromptBuilder>,
         slash_commands: Arc<SlashCommandWorkingSet>,
         cx: &mut Context<Self>,
@@ -720,7 +717,6 @@ impl TextThread {
             language_registry,
             prompt_builder,
             slash_commands,
-            project,
             cx,
         )
     }
@@ -740,7 +736,6 @@ impl TextThread {
         language_registry: Arc<LanguageRegistry>,
         prompt_builder: Arc<PromptBuilder>,
         slash_commands: Arc<SlashCommandWorkingSet>,
-        project: Option<WeakEntity<Project>>,
         cx: &mut Context<Self>,
     ) -> Self {
         let buffer = cx.new(|_cx| {
@@ -781,7 +776,6 @@ impl TextThread {
             completion_mode: AgentSettings::get_global(cx).preferred_completion_mode,
             path: None,
             buffer,
-            project,
             language_registry,
             slash_commands,
             prompt_builder,
@@ -869,7 +863,6 @@ impl TextThread {
         language_registry: Arc<LanguageRegistry>,
         prompt_builder: Arc<PromptBuilder>,
         slash_commands: Arc<SlashCommandWorkingSet>,
-        project: Option<WeakEntity<Project>>,
         cx: &mut Context<Self>,
     ) -> Self {
         let id = saved_context.id.clone().unwrap_or_else(TextThreadId::new);
@@ -880,7 +873,6 @@ impl TextThread {
             language_registry,
             prompt_builder,
             slash_commands,
-            project,
             cx,
         );
         this.path = Some(path);
@@ -2068,15 +2060,7 @@ impl TextThread {
 
                                 match event {
                                     LanguageModelCompletionEvent::Started |
-                                    LanguageModelCompletionEvent::Queued {..} |
-                                    LanguageModelCompletionEvent::ToolUseLimitReached { .. } => {}
-                                    LanguageModelCompletionEvent::UsageUpdated { amount, limit } => {
-                                        this.update_model_request_usage(
-                                            amount as u32,
-                                            limit,
-                                            cx,
-                                        );
-                                    }
+                                    LanguageModelCompletionEvent::Queued {..} => {}
                                     LanguageModelCompletionEvent::StartMessage { .. } => {}
                                     LanguageModelCompletionEvent::ReasoningDetails(_) => {
                                         // ReasoningDetails are metadata (signatures, encrypted data, format info)
@@ -2957,21 +2941,6 @@ impl TextThread {
         summary.text = custom_summary;
         cx.emit(TextThreadEvent::SummaryChanged);
     }
-
-    fn update_model_request_usage(&self, amount: u32, limit: UsageLimit, cx: &mut App) {
-        let Some(project) = self.project.as_ref().and_then(|project| project.upgrade()) else {
-            return;
-        };
-        project.read(cx).user_store().update(cx, |user_store, cx| {
-            user_store.update_model_request_usage(
-                ModelRequestUsage(RequestUsage {
-                    amount: amount as i32,
-                    limit,
-                }),
-                cx,
-            )
-        });
-    }
 }
 
 #[derive(Debug, Default)]

crates/assistant_text_thread/src/text_thread_store.rs 🔗

@@ -383,7 +383,6 @@ impl TextThreadStore {
         let context = cx.new(|cx| {
             TextThread::local(
                 self.languages.clone(),
-                Some(self.project.clone()),
                 self.prompt_builder.clone(),
                 self.slash_commands.clone(),
                 cx,
@@ -405,7 +404,6 @@ impl TextThreadStore {
         let replica_id = project.replica_id();
         let capability = project.capability();
         let language_registry = self.languages.clone();
-        let project = self.project.clone();
 
         let prompt_builder = self.prompt_builder.clone();
         let slash_commands = self.slash_commands.clone();
@@ -422,7 +420,6 @@ impl TextThreadStore {
                     language_registry,
                     prompt_builder,
                     slash_commands,
-                    Some(project),
                     cx,
                 )
             });
@@ -459,7 +456,6 @@ impl TextThreadStore {
 
         let fs = self.fs.clone();
         let languages = self.languages.clone();
-        let project = self.project.clone();
         let load = cx.background_spawn({
             let path = path.clone();
             async move {
@@ -479,7 +475,6 @@ impl TextThreadStore {
                     languages,
                     prompt_builder,
                     slash_commands,
-                    Some(project),
                     cx,
                 )
             });
@@ -598,7 +593,6 @@ impl TextThreadStore {
         let replica_id = project.replica_id();
         let capability = project.capability();
         let language_registry = self.languages.clone();
-        let project = self.project.clone();
         let request = self.client.request(proto::OpenContext {
             project_id,
             context_id: text_thread_id.to_proto(),
@@ -616,7 +610,6 @@ impl TextThreadStore {
                     language_registry,
                     prompt_builder,
                     slash_commands,
-                    Some(project),
                     cx,
                 )
             });

crates/client/src/test.rs 🔗

@@ -1,7 +1,7 @@
 use crate::{Client, Connection, Credentials, EstablishConnectionError, UserStore};
 use anyhow::{Context as _, Result, anyhow};
 use cloud_api_client::{AuthenticatedUser, GetAuthenticatedUserResponse, PlanInfo};
-use cloud_llm_client::{CurrentUsage, PlanV1, UsageData, UsageLimit};
+use cloud_llm_client::{CurrentUsage, PlanV2, UsageData, UsageLimit};
 use futures::{StreamExt, stream::BoxStream};
 use gpui::{AppContext as _, Entity, TestAppContext};
 use http_client::{AsyncBody, Method, Request, http};
@@ -264,21 +264,15 @@ pub fn make_get_authenticated_user_response(
         },
         feature_flags: vec![],
         plan: PlanInfo {
-            plan: PlanV1::ZedPro,
-            plan_v2: None,
+            plan_v2: PlanV2::ZedPro,
             subscription_period: None,
             usage: CurrentUsage {
-                model_requests: UsageData {
-                    used: 0,
-                    limit: UsageLimit::Limited(500),
-                },
                 edit_predictions: UsageData {
                     used: 250,
                     limit: UsageLimit::Unlimited,
                 },
             },
             trial_started_at: None,
-            is_usage_based_billing_enabled: false,
             is_account_too_young: false,
             has_overdue_invoices: false,
         },

crates/client/src/user.rs 🔗

@@ -4,8 +4,7 @@ use chrono::{DateTime, Utc};
 use cloud_api_client::websocket_protocol::MessageToClient;
 use cloud_api_client::{GetAuthenticatedUserResponse, PlanInfo};
 use cloud_llm_client::{
-    EDIT_PREDICTIONS_USAGE_AMOUNT_HEADER_NAME, EDIT_PREDICTIONS_USAGE_LIMIT_HEADER_NAME,
-    MODEL_REQUESTS_USAGE_AMOUNT_HEADER_NAME, MODEL_REQUESTS_USAGE_LIMIT_HEADER_NAME, Plan,
+    EDIT_PREDICTIONS_USAGE_AMOUNT_HEADER_NAME, EDIT_PREDICTIONS_USAGE_LIMIT_HEADER_NAME, Plan,
     UsageLimit,
 };
 use collections::{HashMap, HashSet, hash_map::Entry};
@@ -108,7 +107,6 @@ pub struct UserStore {
     by_github_login: HashMap<SharedString, u64>,
     participant_indices: HashMap<u64, ParticipantIndex>,
     update_contacts_tx: mpsc::UnboundedSender<UpdateContacts>,
-    model_request_usage: Option<ModelRequestUsage>,
     edit_prediction_usage: Option<EditPredictionUsage>,
     plan_info: Option<PlanInfo>,
     current_user: watch::Receiver<Option<Arc<User>>>,
@@ -154,9 +152,6 @@ enum UpdateContacts {
     Clear(postage::barrier::Sender),
 }
 
-#[derive(Debug, Clone, Copy, Deref)]
-pub struct ModelRequestUsage(pub RequestUsage);
-
 #[derive(Debug, Clone, Copy, Deref)]
 pub struct EditPredictionUsage(pub RequestUsage);
 
@@ -185,7 +180,6 @@ impl UserStore {
             by_github_login: Default::default(),
             current_user: current_user_rx,
             plan_info: None,
-            model_request_usage: None,
             edit_prediction_usage: None,
             contacts: Default::default(),
             incoming_contact_requests: Default::default(),
@@ -675,12 +669,12 @@ impl UserStore {
     pub fn plan(&self) -> Option<Plan> {
         #[cfg(debug_assertions)]
         if let Ok(plan) = std::env::var("ZED_SIMULATE_PLAN").as_ref() {
-            use cloud_llm_client::PlanV1;
+            use cloud_llm_client::PlanV2;
 
             return match plan.as_str() {
-                "free" => Some(Plan::V1(PlanV1::ZedFree)),
-                "trial" => Some(Plan::V1(PlanV1::ZedProTrial)),
-                "pro" => Some(Plan::V1(PlanV1::ZedPro)),
+                "free" => Some(Plan::V2(PlanV2::ZedFree)),
+                "trial" => Some(Plan::V2(PlanV2::ZedProTrial)),
+                "pro" => Some(Plan::V2(PlanV2::ZedPro)),
                 _ => {
                     panic!("ZED_SIMULATE_PLAN must be one of 'free', 'trial', or 'pro'");
                 }
@@ -725,26 +719,6 @@ impl UserStore {
             .unwrap_or_default()
     }
 
-    pub fn is_usage_based_billing_enabled(&self) -> bool {
-        self.plan_info
-            .as_ref()
-            .map(|plan| plan.is_usage_based_billing_enabled)
-            .unwrap_or_default()
-    }
-
-    pub fn model_request_usage(&self) -> Option<ModelRequestUsage> {
-        if self.plan().is_some_and(|plan| plan.is_v2()) {
-            return None;
-        }
-
-        self.model_request_usage
-    }
-
-    pub fn update_model_request_usage(&mut self, usage: ModelRequestUsage, cx: &mut Context<Self>) {
-        self.model_request_usage = Some(usage);
-        cx.notify();
-    }
-
     pub fn edit_prediction_usage(&self) -> Option<EditPredictionUsage> {
         self.edit_prediction_usage
     }
@@ -760,7 +734,6 @@ impl UserStore {
 
     pub fn clear_plan_and_usage(&mut self) {
         self.plan_info = None;
-        self.model_request_usage = None;
         self.edit_prediction_usage = None;
     }
 
@@ -777,10 +750,6 @@ impl UserStore {
                 .set_authenticated_user_info(Some(response.user.metrics_id.clone()), staff);
         }
 
-        self.model_request_usage = Some(ModelRequestUsage(RequestUsage {
-            limit: response.plan.usage.model_requests.limit,
-            amount: response.plan.usage.model_requests.used as i32,
-        }));
         self.edit_prediction_usage = Some(EditPredictionUsage(RequestUsage {
             limit: response.plan.usage.edit_predictions.limit,
             amount: response.plan.usage.edit_predictions.used as i32,
@@ -964,16 +933,6 @@ impl RequestUsage {
     }
 }
 
-impl ModelRequestUsage {
-    pub fn from_headers(headers: &HeaderMap<HeaderValue>) -> Result<Self> {
-        Ok(Self(RequestUsage::from_headers(
-            MODEL_REQUESTS_USAGE_LIMIT_HEADER_NAME,
-            MODEL_REQUESTS_USAGE_AMOUNT_HEADER_NAME,
-            headers,
-        )?))
-    }
-}
-
 impl EditPredictionUsage {
     pub fn from_headers(headers: &HeaderMap<HeaderValue>) -> Result<Self> {
         Ok(Self(RequestUsage::from_headers(

crates/cloud_api_types/src/cloud_api_types.rs 🔗

@@ -28,20 +28,17 @@ pub struct AuthenticatedUser {
 
 #[derive(Debug, PartialEq, Serialize, Deserialize)]
 pub struct PlanInfo {
-    pub plan: cloud_llm_client::PlanV1,
-    #[serde(default)]
-    pub plan_v2: Option<cloud_llm_client::PlanV2>,
+    pub plan_v2: cloud_llm_client::PlanV2,
     pub subscription_period: Option<SubscriptionPeriod>,
     pub usage: cloud_llm_client::CurrentUsage,
     pub trial_started_at: Option<Timestamp>,
-    pub is_usage_based_billing_enabled: bool,
     pub is_account_too_young: bool,
     pub has_overdue_invoices: bool,
 }
 
 impl PlanInfo {
     pub fn plan(&self) -> Plan {
-        self.plan_v2.map(Plan::V2).unwrap_or(Plan::V1(self.plan))
+        Plan::V2(self.plan_v2)
     }
 }
 

crates/cloud_llm_client/src/cloud_llm_client.rs 🔗

@@ -17,30 +17,14 @@ pub const ZED_VERSION_HEADER_NAME: &str = "x-zed-version";
 /// The client may use this as a signal to refresh the token.
 pub const EXPIRED_LLM_TOKEN_HEADER_NAME: &str = "x-zed-expired-token";
 
-/// The name of the header used to indicate what plan the user is currently on.
-pub const CURRENT_PLAN_HEADER_NAME: &str = "x-zed-plan";
-
-/// The name of the header used to indicate the usage limit for model requests.
-pub const MODEL_REQUESTS_USAGE_LIMIT_HEADER_NAME: &str = "x-zed-model-requests-usage-limit";
-
-/// The name of the header used to indicate the usage amount for model requests.
-pub const MODEL_REQUESTS_USAGE_AMOUNT_HEADER_NAME: &str = "x-zed-model-requests-usage-amount";
-
 /// The name of the header used to indicate the usage limit for edit predictions.
 pub const EDIT_PREDICTIONS_USAGE_LIMIT_HEADER_NAME: &str = "x-zed-edit-predictions-usage-limit";
 
 /// The name of the header used to indicate the usage amount for edit predictions.
 pub const EDIT_PREDICTIONS_USAGE_AMOUNT_HEADER_NAME: &str = "x-zed-edit-predictions-usage-amount";
 
-/// The name of the header used to indicate the resource for which the subscription limit has been reached.
-pub const SUBSCRIPTION_LIMIT_RESOURCE_HEADER_NAME: &str = "x-zed-subscription-limit-resource";
-
-pub const MODEL_REQUESTS_RESOURCE_HEADER_VALUE: &str = "model_requests";
 pub const EDIT_PREDICTIONS_RESOURCE_HEADER_VALUE: &str = "edit_predictions";
 
-/// The name of the header used to indicate that the maximum number of consecutive tool uses has been reached.
-pub const TOOL_USE_LIMIT_REACHED_HEADER_NAME: &str = "x-zed-tool-use-limit-reached";
-
 /// The name of the header used to indicate the minimum required Zed version.
 ///
 /// This can be used to force a Zed upgrade in order to continue communicating
@@ -84,7 +68,6 @@ impl FromStr for UsageLimit {
 
 #[derive(Debug, Clone, Copy, PartialEq)]
 pub enum Plan {
-    V1(PlanV1),
     V2(PlanV2),
 }
 
@@ -94,31 +77,6 @@ impl Plan {
     }
 }
 
-#[derive(Debug, Clone, Copy, Default, PartialEq, Serialize, Deserialize)]
-#[serde(rename_all = "snake_case")]
-pub enum PlanV1 {
-    #[default]
-    #[serde(alias = "Free")]
-    ZedFree,
-    #[serde(alias = "ZedPro")]
-    ZedPro,
-    #[serde(alias = "ZedProTrial")]
-    ZedProTrial,
-}
-
-impl FromStr for PlanV1 {
-    type Err = anyhow::Error;
-
-    fn from_str(value: &str) -> Result<Self, Self::Err> {
-        match value {
-            "zed_free" => Ok(Self::ZedFree),
-            "zed_pro" => Ok(Self::ZedPro),
-            "zed_pro_trial" => Ok(Self::ZedProTrial),
-            plan => Err(anyhow::anyhow!("invalid plan: {plan:?}")),
-        }
-    }
-}
-
 #[derive(Debug, Clone, Copy, Default, PartialEq, Serialize, Deserialize)]
 #[serde(rename_all = "snake_case")]
 pub enum PlanV2 {
@@ -386,15 +344,8 @@ pub struct ListModelsResponse {
     pub recommended_models: Vec<LanguageModelId>,
 }
 
-#[derive(Debug, Serialize, Deserialize)]
-pub struct GetSubscriptionResponse {
-    pub plan: PlanV1,
-    pub usage: Option<CurrentUsage>,
-}
-
 #[derive(Debug, PartialEq, Serialize, Deserialize)]
 pub struct CurrentUsage {
-    pub model_requests: UsageData,
     pub edit_predictions: UsageData,
 }
 
@@ -411,30 +362,6 @@ mod tests {
 
     use super::*;
 
-    #[test]
-    fn test_plan_v1_deserialize_snake_case() {
-        let plan = serde_json::from_value::<PlanV1>(json!("zed_free")).unwrap();
-        assert_eq!(plan, PlanV1::ZedFree);
-
-        let plan = serde_json::from_value::<PlanV1>(json!("zed_pro")).unwrap();
-        assert_eq!(plan, PlanV1::ZedPro);
-
-        let plan = serde_json::from_value::<PlanV1>(json!("zed_pro_trial")).unwrap();
-        assert_eq!(plan, PlanV1::ZedProTrial);
-    }
-
-    #[test]
-    fn test_plan_v1_deserialize_aliases() {
-        let plan = serde_json::from_value::<PlanV1>(json!("Free")).unwrap();
-        assert_eq!(plan, PlanV1::ZedFree);
-
-        let plan = serde_json::from_value::<PlanV1>(json!("ZedPro")).unwrap();
-        assert_eq!(plan, PlanV1::ZedPro);
-
-        let plan = serde_json::from_value::<PlanV1>(json!("ZedProTrial")).unwrap();
-        assert_eq!(plan, PlanV1::ZedProTrial);
-    }
-
     #[test]
     fn test_plan_v2_deserialize_snake_case() {
         let plan = serde_json::from_value::<PlanV2>(json!("zed_free")).unwrap();

crates/eval/src/instance.rs 🔗

@@ -1265,9 +1265,7 @@ pub fn response_events_to_markdown(
             }
             Ok(
                 LanguageModelCompletionEvent::UsageUpdate(_)
-                | LanguageModelCompletionEvent::ToolUseLimitReached
                 | LanguageModelCompletionEvent::StartMessage { .. }
-                | LanguageModelCompletionEvent::UsageUpdated { .. }
                 | LanguageModelCompletionEvent::Queued { .. }
                 | LanguageModelCompletionEvent::Started
                 | LanguageModelCompletionEvent::ReasoningDetails(_),
@@ -1359,9 +1357,7 @@ impl ThreadDialog {
                 | Ok(LanguageModelCompletionEvent::ReasoningDetails(_))
                 | Ok(LanguageModelCompletionEvent::Stop(_))
                 | Ok(LanguageModelCompletionEvent::Queued { .. })
-                | Ok(LanguageModelCompletionEvent::Started)
-                | Ok(LanguageModelCompletionEvent::UsageUpdated { .. })
-                | Ok(LanguageModelCompletionEvent::ToolUseLimitReached) => {}
+                | Ok(LanguageModelCompletionEvent::Started) => {}
 
                 Ok(LanguageModelCompletionEvent::ToolUseJsonParseError {
                     json_parse_error,

crates/git_ui/src/git_panel.rs 🔗

@@ -43,8 +43,7 @@ use gpui::{
 use itertools::Itertools;
 use language::{Buffer, File};
 use language_model::{
-    ConfiguredModel, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage,
-    Role, ZED_CLOUD_PROVIDER_ID,
+    ConfiguredModel, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, Role,
 };
 use menu;
 use multi_buffer::ExcerptInfo;
@@ -2572,18 +2571,7 @@ impl GitPanel {
         }
     }
 
-    async fn load_commit_message_prompt(
-        is_using_legacy_zed_pro: bool,
-        cx: &mut AsyncApp,
-    ) -> String {
-        // Remove this once we stop supporting legacy Zed Pro
-        // In legacy Zed Pro, Git commit summary generation did not count as a
-        // prompt. If the user changes the prompt, our classification will fail,
-        // meaning that users will be charged for generating commit messages.
-        if is_using_legacy_zed_pro {
-            return BuiltInPrompt::CommitMessage.default_content().to_string();
-        }
-
+    async fn load_commit_message_prompt(cx: &mut AsyncApp) -> String {
         let load = async {
             let store = cx.update(|cx| PromptStore::global(cx)).await.ok()?;
             store
@@ -2627,13 +2615,6 @@ impl GitPanel {
         let project = self.project.clone();
         let repo_work_dir = repo.read(cx).work_directory_abs_path.clone();
 
-        // Remove this once we stop supporting legacy Zed Pro
-        let is_using_legacy_zed_pro = provider.id() == ZED_CLOUD_PROVIDER_ID
-            && self.workspace.upgrade().map_or(false, |workspace| {
-                workspace.read(cx).user_store().read(cx).plan()
-                    == Some(cloud_llm_client::Plan::V1(cloud_llm_client::PlanV1::ZedPro))
-            });
-
         self.generate_commit_message_task = Some(cx.spawn(async move |this, mut cx| {
              async move {
                 let _defer = cx.on_drop(&this, |this, _cx| {
@@ -2669,7 +2650,7 @@ impl GitPanel {
 
                 let rules_content = Self::load_project_rules(&project, &repo_work_dir, &mut cx).await;
 
-                let prompt = Self::load_commit_message_prompt(is_using_legacy_zed_pro, &mut cx).await;
+                let prompt = Self::load_commit_message_prompt(&mut cx).await;
 
                 let subject = this.update(cx, |this, cx| {
                     this.commit_editor.read(cx).text(cx).lines().next().map(ToOwned::to_owned).unwrap_or_default()

crates/language_model/src/language_model.rs 🔗

@@ -13,7 +13,7 @@ pub mod fake_provider;
 use anthropic::{AnthropicError, parse_prompt_too_long};
 use anyhow::{Result, anyhow};
 use client::Client;
-use cloud_llm_client::{CompletionMode, CompletionRequestStatus, UsageLimit};
+use cloud_llm_client::{CompletionMode, CompletionRequestStatus};
 use futures::FutureExt;
 use futures::{StreamExt, future::BoxFuture, stream::BoxStream};
 use gpui::{AnyView, App, AsyncApp, SharedString, Task, Window};
@@ -77,11 +77,6 @@ pub enum LanguageModelCompletionEvent {
         position: usize,
     },
     Started,
-    UsageUpdated {
-        amount: usize,
-        limit: UsageLimit,
-    },
-    ToolUseLimitReached,
     Stop(StopReason),
     Text(String),
     Thinking {
@@ -115,12 +110,10 @@ impl LanguageModelCompletionEvent {
                 Ok(LanguageModelCompletionEvent::Queued { position })
             }
             CompletionRequestStatus::Started => Ok(LanguageModelCompletionEvent::Started),
-            CompletionRequestStatus::UsageUpdated { amount, limit } => {
-                Ok(LanguageModelCompletionEvent::UsageUpdated { amount, limit })
-            }
-            CompletionRequestStatus::ToolUseLimitReached => {
-                Ok(LanguageModelCompletionEvent::ToolUseLimitReached)
-            }
+            CompletionRequestStatus::UsageUpdated { .. }
+            | CompletionRequestStatus::ToolUseLimitReached => Err(
+                LanguageModelCompletionError::Other(anyhow!("Unexpected status: {status:?}")),
+            ),
             CompletionRequestStatus::Failed {
                 code,
                 message,
@@ -689,8 +682,6 @@ pub trait LanguageModel: Send + Sync {
                             match result {
                                 Ok(LanguageModelCompletionEvent::Queued { .. }) => None,
                                 Ok(LanguageModelCompletionEvent::Started) => None,
-                                Ok(LanguageModelCompletionEvent::UsageUpdated { .. }) => None,
-                                Ok(LanguageModelCompletionEvent::ToolUseLimitReached) => None,
                                 Ok(LanguageModelCompletionEvent::StartMessage { .. }) => None,
                                 Ok(LanguageModelCompletionEvent::Text(text)) => Some(Ok(text)),
                                 Ok(LanguageModelCompletionEvent::Thinking { .. }) => None,

crates/language_model/src/model/cloud_model.rs 🔗

@@ -4,7 +4,6 @@ use std::sync::Arc;
 use anyhow::Result;
 use client::Client;
 use cloud_api_types::websocket_protocol::MessageToClient;
-use cloud_llm_client::{Plan, PlanV1};
 use gpui::{App, AppContext as _, Context, Entity, EventEmitter, Global, ReadGlobal as _};
 use smol::lock::{RwLock, RwLockUpgradableReadGuard, RwLockWriteGuard};
 use thiserror::Error;
@@ -21,42 +20,6 @@ impl fmt::Display for PaymentRequiredError {
     }
 }
 
-#[derive(Error, Debug)]
-pub struct ModelRequestLimitReachedError {
-    pub plan: Plan,
-}
-
-impl fmt::Display for ModelRequestLimitReachedError {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        let message = match self.plan {
-            Plan::V1(PlanV1::ZedFree) => {
-                "Model request limit reached. Upgrade to Zed Pro for more requests."
-            }
-            Plan::V1(PlanV1::ZedPro) => {
-                "Model request limit reached. Upgrade to usage-based billing for more requests."
-            }
-            Plan::V1(PlanV1::ZedProTrial) => {
-                "Model request limit reached. Upgrade to Zed Pro for more requests."
-            }
-            Plan::V2(_) => "Model request limit reached.",
-        };
-
-        write!(f, "{message}")
-    }
-}
-
-#[derive(Error, Debug)]
-pub struct ToolUseLimitReachedError;
-
-impl fmt::Display for ToolUseLimitReachedError {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        write!(
-            f,
-            "Consecutive tool use limit reached. Enable Burn Mode for unlimited tool use."
-        )
-    }
-}
-
 #[derive(Clone, Default)]
 pub struct LlmApiToken(Arc<RwLock<Option<String>>>);
 

crates/language_models/src/provider/cloud.rs 🔗

@@ -2,14 +2,12 @@ use ai_onboarding::YoungAccountBanner;
 use anthropic::AnthropicModelMode;
 use anyhow::{Context as _, Result, anyhow};
 use chrono::{DateTime, Utc};
-use client::{Client, ModelRequestUsage, UserStore, zed_urls};
+use client::{Client, UserStore, zed_urls};
 use cloud_llm_client::{
-    CLIENT_SUPPORTS_STATUS_MESSAGES_HEADER_NAME, CLIENT_SUPPORTS_X_AI_HEADER_NAME,
-    CURRENT_PLAN_HEADER_NAME, CompletionBody, CompletionEvent, CompletionRequestStatus,
-    CountTokensBody, CountTokensResponse, EXPIRED_LLM_TOKEN_HEADER_NAME, ListModelsResponse,
-    MODEL_REQUESTS_RESOURCE_HEADER_VALUE, Plan, PlanV1, PlanV2,
-    SERVER_SUPPORTS_STATUS_MESSAGES_HEADER_NAME, SUBSCRIPTION_LIMIT_RESOURCE_HEADER_NAME,
-    TOOL_USE_LIMIT_REACHED_HEADER_NAME, ZED_VERSION_HEADER_NAME,
+    CLIENT_SUPPORTS_STATUS_MESSAGES_HEADER_NAME, CLIENT_SUPPORTS_X_AI_HEADER_NAME, CompletionBody,
+    CompletionEvent, CountTokensBody, CountTokensResponse, EXPIRED_LLM_TOKEN_HEADER_NAME,
+    ListModelsResponse, Plan, PlanV2, SERVER_SUPPORTS_STATUS_MESSAGES_HEADER_NAME,
+    ZED_VERSION_HEADER_NAME,
 };
 use feature_flags::{FeatureFlagAppExt as _, OpenAiResponsesApiFeatureFlag};
 use futures::{
@@ -24,8 +22,8 @@ use language_model::{
     LanguageModelCompletionError, LanguageModelCompletionEvent, LanguageModelId, LanguageModelName,
     LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName,
     LanguageModelProviderState, LanguageModelRequest, LanguageModelToolChoice,
-    LanguageModelToolSchemaFormat, LlmApiToken, ModelRequestLimitReachedError,
-    PaymentRequiredError, RateLimiter, RefreshLlmTokenListener,
+    LanguageModelToolSchemaFormat, LlmApiToken, PaymentRequiredError, RateLimiter,
+    RefreshLlmTokenListener,
 };
 use release_channel::AppVersion;
 use schemars::JsonSchema;
@@ -36,7 +34,6 @@ pub use settings::ZedDotDevAvailableModel as AvailableModel;
 pub use settings::ZedDotDevAvailableProvider as AvailableProvider;
 use smol::io::{AsyncReadExt, BufReader};
 use std::pin::Pin;
-use std::str::FromStr as _;
 use std::sync::Arc;
 use std::time::Duration;
 use thiserror::Error;
@@ -380,8 +377,6 @@ pub struct CloudLanguageModel {
 
 struct PerformLlmCompletionResponse {
     response: Response<AsyncBody>,
-    usage: Option<ModelRequestUsage>,
-    tool_use_limit_reached: bool,
     includes_status_messages: bool,
 }
 
@@ -417,22 +412,9 @@ impl CloudLanguageModel {
                     .get(SERVER_SUPPORTS_STATUS_MESSAGES_HEADER_NAME)
                     .is_some();
 
-                let tool_use_limit_reached = response
-                    .headers()
-                    .get(TOOL_USE_LIMIT_REACHED_HEADER_NAME)
-                    .is_some();
-
-                let usage = if includes_status_messages {
-                    None
-                } else {
-                    ModelRequestUsage::from_headers(response.headers()).ok()
-                };
-
                 return Ok(PerformLlmCompletionResponse {
                     response,
-                    usage,
                     includes_status_messages,
-                    tool_use_limit_reached,
                 });
             }
 
@@ -447,26 +429,7 @@ impl CloudLanguageModel {
                 continue;
             }
 
-            if status == StatusCode::FORBIDDEN
-                && response
-                    .headers()
-                    .get(SUBSCRIPTION_LIMIT_RESOURCE_HEADER_NAME)
-                    .is_some()
-            {
-                if let Some(MODEL_REQUESTS_RESOURCE_HEADER_VALUE) = response
-                    .headers()
-                    .get(SUBSCRIPTION_LIMIT_RESOURCE_HEADER_NAME)
-                    .and_then(|resource| resource.to_str().ok())
-                    && let Some(plan) = response
-                        .headers()
-                        .get(CURRENT_PLAN_HEADER_NAME)
-                        .and_then(|plan| plan.to_str().ok())
-                        .and_then(|plan| cloud_llm_client::PlanV1::from_str(plan).ok())
-                        .map(Plan::V1)
-                {
-                    return Err(anyhow!(ModelRequestLimitReachedError { plan }));
-                }
-            } else if status == StatusCode::PAYMENT_REQUIRED {
+            if status == StatusCode::PAYMENT_REQUIRED {
                 return Err(anyhow!(PaymentRequiredError));
             }
 
@@ -792,9 +755,7 @@ impl LanguageModel for CloudLanguageModel {
                 let future = self.request_limiter.stream(async move {
                     let PerformLlmCompletionResponse {
                         response,
-                        usage,
                         includes_status_messages,
-                        tool_use_limit_reached,
                     } = Self::perform_llm_completion(
                         client.clone(),
                         llm_api_token,
@@ -818,11 +779,7 @@ impl LanguageModel for CloudLanguageModel {
 
                     let mut mapper = AnthropicEventMapper::new();
                     Ok(map_cloud_completion_events(
-                        Box::pin(
-                            response_lines(response, includes_status_messages)
-                                .chain(usage_updated_event(usage))
-                                .chain(tool_use_limit_reached_event(tool_use_limit_reached)),
-                        ),
+                        Box::pin(response_lines(response, includes_status_messages)),
                         &provider_name,
                         move |event| mapper.map_event(event),
                     ))
@@ -845,9 +802,7 @@ impl LanguageModel for CloudLanguageModel {
                     let future = self.request_limiter.stream(async move {
                         let PerformLlmCompletionResponse {
                             response,
-                            usage,
                             includes_status_messages,
-                            tool_use_limit_reached,
                         } = Self::perform_llm_completion(
                             client.clone(),
                             llm_api_token,
@@ -867,11 +822,7 @@ impl LanguageModel for CloudLanguageModel {
 
                         let mut mapper = OpenAiResponseEventMapper::new();
                         Ok(map_cloud_completion_events(
-                            Box::pin(
-                                response_lines(response, includes_status_messages)
-                                    .chain(usage_updated_event(usage))
-                                    .chain(tool_use_limit_reached_event(tool_use_limit_reached)),
-                            ),
+                            Box::pin(response_lines(response, includes_status_messages)),
                             &provider_name,
                             move |event| mapper.map_event(event),
                         ))
@@ -889,9 +840,7 @@ impl LanguageModel for CloudLanguageModel {
                     let future = self.request_limiter.stream(async move {
                         let PerformLlmCompletionResponse {
                             response,
-                            usage,
                             includes_status_messages,
-                            tool_use_limit_reached,
                         } = Self::perform_llm_completion(
                             client.clone(),
                             llm_api_token,
@@ -911,11 +860,7 @@ impl LanguageModel for CloudLanguageModel {
 
                         let mut mapper = OpenAiEventMapper::new();
                         Ok(map_cloud_completion_events(
-                            Box::pin(
-                                response_lines(response, includes_status_messages)
-                                    .chain(usage_updated_event(usage))
-                                    .chain(tool_use_limit_reached_event(tool_use_limit_reached)),
-                            ),
+                            Box::pin(response_lines(response, includes_status_messages)),
                             &provider_name,
                             move |event| mapper.map_event(event),
                         ))
@@ -937,9 +882,7 @@ impl LanguageModel for CloudLanguageModel {
                 let future = self.request_limiter.stream(async move {
                     let PerformLlmCompletionResponse {
                         response,
-                        usage,
                         includes_status_messages,
-                        tool_use_limit_reached,
                     } = Self::perform_llm_completion(
                         client.clone(),
                         llm_api_token,
@@ -959,11 +902,7 @@ impl LanguageModel for CloudLanguageModel {
 
                     let mut mapper = OpenAiEventMapper::new();
                     Ok(map_cloud_completion_events(
-                        Box::pin(
-                            response_lines(response, includes_status_messages)
-                                .chain(usage_updated_event(usage))
-                                .chain(tool_use_limit_reached_event(tool_use_limit_reached)),
-                        ),
+                        Box::pin(response_lines(response, includes_status_messages)),
                         &provider_name,
                         move |event| mapper.map_event(event),
                     ))
@@ -978,9 +917,7 @@ impl LanguageModel for CloudLanguageModel {
                 let future = self.request_limiter.stream(async move {
                     let PerformLlmCompletionResponse {
                         response,
-                        usage,
                         includes_status_messages,
-                        tool_use_limit_reached,
                     } = Self::perform_llm_completion(
                         client.clone(),
                         llm_api_token,
@@ -1000,11 +937,7 @@ impl LanguageModel for CloudLanguageModel {
 
                     let mut mapper = GoogleEventMapper::new();
                     Ok(map_cloud_completion_events(
-                        Box::pin(
-                            response_lines(response, includes_status_messages)
-                                .chain(usage_updated_event(usage))
-                                .chain(tool_use_limit_reached_event(tool_use_limit_reached)),
-                        ),
+                        Box::pin(response_lines(response, includes_status_messages)),
                         &provider_name,
                         move |event| mapper.map_event(event),
                     ))
@@ -1058,29 +991,6 @@ fn provider_name(provider: &cloud_llm_client::LanguageModelProvider) -> Language
     }
 }
 
-fn usage_updated_event<T>(
-    usage: Option<ModelRequestUsage>,
-) -> impl Stream<Item = Result<CompletionEvent<T>>> {
-    futures::stream::iter(usage.map(|usage| {
-        Ok(CompletionEvent::Status(
-            CompletionRequestStatus::UsageUpdated {
-                amount: usage.amount as usize,
-                limit: usage.limit,
-            },
-        ))
-    }))
-}
-
-fn tool_use_limit_reached_event<T>(
-    tool_use_limit_reached: bool,
-) -> impl Stream<Item = Result<CompletionEvent<T>>> {
-    futures::stream::iter(tool_use_limit_reached.then(|| {
-        Ok(CompletionEvent::Status(
-            CompletionRequestStatus::ToolUseLimitReached,
-        ))
-    }))
-}
-
 fn response_lines<T: DeserializeOwned>(
     response: Response<AsyncBody>,
     includes_status_messages: bool,
@@ -1118,19 +1028,16 @@ struct ZedAiConfiguration {
 
 impl RenderOnce for ZedAiConfiguration {
     fn render(self, _window: &mut Window, _cx: &mut App) -> impl IntoElement {
-        let is_pro = self.plan.is_some_and(|plan| {
-            matches!(plan, Plan::V1(PlanV1::ZedPro) | Plan::V2(PlanV2::ZedPro))
-        });
+        let is_pro = self
+            .plan
+            .is_some_and(|plan| plan == Plan::V2(PlanV2::ZedPro));
         let subscription_text = match (self.plan, self.subscription_period) {
-            (Some(Plan::V1(PlanV1::ZedPro) | Plan::V2(PlanV2::ZedPro)), Some(_)) => {
+            (Some(Plan::V2(PlanV2::ZedPro)), Some(_)) => {
                 "You have access to Zed's hosted models through your Pro subscription."
             }
-            (Some(Plan::V1(PlanV1::ZedProTrial) | Plan::V2(PlanV2::ZedProTrial)), Some(_)) => {
+            (Some(Plan::V2(PlanV2::ZedProTrial)), Some(_)) => {
                 "You have access to Zed's hosted models through your Pro trial."
             }
-            (Some(Plan::V1(PlanV1::ZedFree)), Some(_)) => {
-                "You have basic access to Zed's hosted models through the Free plan."
-            }
             (Some(Plan::V2(PlanV2::ZedFree)), Some(_)) => {
                 if self.eligible_for_trial {
                     "Subscribe for access to Zed's hosted models. Start with a 14 day free trial."
@@ -1294,15 +1201,15 @@ impl Component for ZedAiConfiguration {
                     ),
                     single_example(
                         "Free Plan",
-                        configuration(true, Some(Plan::V1(PlanV1::ZedFree)), true, false),
+                        configuration(true, Some(Plan::V2(PlanV2::ZedFree)), true, false),
                     ),
                     single_example(
                         "Zed Pro Trial Plan",
-                        configuration(true, Some(Plan::V1(PlanV1::ZedProTrial)), true, false),
+                        configuration(true, Some(Plan::V2(PlanV2::ZedProTrial)), true, false),
                     ),
                     single_example(
                         "Zed Pro Plan",
-                        configuration(true, Some(Plan::V1(PlanV1::ZedPro)), true, false),
+                        configuration(true, Some(Plan::V2(PlanV2::ZedPro)), true, false),
                     ),
                 ])
                 .into_any_element(),

crates/title_bar/src/title_bar.rs 🔗

@@ -24,7 +24,7 @@ use crate::application_menu::{
 use auto_update::AutoUpdateStatus;
 use call::ActiveCall;
 use client::{Client, UserStore, zed_urls};
-use cloud_llm_client::{Plan, PlanV1, PlanV2};
+use cloud_llm_client::{Plan, PlanV2};
 use gpui::{
     Action, AnyElement, App, Context, Corner, Element, Entity, FocusHandle, Focusable,
     InteractiveElement, IntoElement, MouseButton, ParentElement, Render,
@@ -964,15 +964,13 @@ impl TitleBar {
                     let user_login = user_login.clone();
 
                     let (plan_name, label_color, bg_color) = match plan {
-                        None | Some(Plan::V1(PlanV1::ZedFree) | Plan::V2(PlanV2::ZedFree)) => {
+                        None | Some(Plan::V2(PlanV2::ZedFree)) => {
                             ("Free", Color::Default, free_chip_bg)
                         }
-                        Some(Plan::V1(PlanV1::ZedProTrial) | Plan::V2(PlanV2::ZedProTrial)) => {
+                        Some(Plan::V2(PlanV2::ZedProTrial)) => {
                             ("Pro Trial", Color::Accent, pro_chip_bg)
                         }
-                        Some(Plan::V1(PlanV1::ZedPro) | Plan::V2(PlanV2::ZedPro)) => {
-                            ("Pro", Color::Accent, pro_chip_bg)
-                        }
+                        Some(Plan::V2(PlanV2::ZedPro)) => ("Pro", Color::Accent, pro_chip_bg),
                     };
 
                     menu.when(is_signed_in, |this| {