Add GPT 5.1 to Zed BYOK (#43492)

Mikayla Maki created

Release Notes:

- Added support for OpenAI's GPT 5.1 model to BYOK

Change summary

Cargo.lock                                     |  4 ++--
Cargo.toml                                     |  2 +-
crates/language_models/src/provider/open_ai.rs | 12 ++++++------
crates/open_ai/src/open_ai.rs                  |  9 ++++++++-
4 files changed, 17 insertions(+), 10 deletions(-)

Detailed changes

Cargo.lock 🔗

@@ -17313,8 +17313,8 @@ dependencies = [
 
 [[package]]
 name = "tiktoken-rs"
-version = "0.8.0"
-source = "git+https://github.com/zed-industries/tiktoken-rs?rev=30c32a4522751699adeda0d5840c71c3b75ae73d#30c32a4522751699adeda0d5840c71c3b75ae73d"
+version = "0.9.1"
+source = "git+https://github.com/zed-industries/tiktoken-rs?rev=7249f999c5fdf9bf3cc5c288c964454e4dac0c00#7249f999c5fdf9bf3cc5c288c964454e4dac0c00"
 dependencies = [
  "anyhow",
  "base64 0.22.1",

Cargo.toml 🔗

@@ -655,7 +655,7 @@ sysinfo = "0.37.0"
 take-until = "0.2.0"
 tempfile = "3.20.0"
 thiserror = "2.0.12"
-tiktoken-rs = { git = "https://github.com/zed-industries/tiktoken-rs", rev = "30c32a4522751699adeda0d5840c71c3b75ae73d" }
+tiktoken-rs = { git = "https://github.com/zed-industries/tiktoken-rs", rev = "7249f999c5fdf9bf3cc5c288c964454e4dac0c00" }
 time = { version = "0.3", features = [
     "macros",
     "parsing",

crates/language_models/src/provider/open_ai.rs 🔗

@@ -277,6 +277,7 @@ impl LanguageModel for OpenAiLanguageModel {
             | Model::Five
             | Model::FiveMini
             | Model::FiveNano
+            | Model::FivePointOne
             | Model::O1
             | Model::O3
             | Model::O4Mini => true,
@@ -644,7 +645,6 @@ pub fn count_open_ai_tokens(
 ) -> BoxFuture<'static, Result<u64>> {
     cx.background_spawn(async move {
         let messages = collect_tiktoken_messages(request);
-
         match model {
             Model::Custom { max_tokens, .. } => {
                 let model = if max_tokens >= 100_000 {
@@ -672,11 +672,11 @@ pub fn count_open_ai_tokens(
             | Model::O1
             | Model::O3
             | Model::O3Mini
-            | Model::O4Mini => tiktoken_rs::num_tokens_from_messages(model.id(), &messages),
-            // GPT-5 models don't have tiktoken support yet; fall back on gpt-4o tokenizer
-            Model::Five | Model::FiveMini | Model::FiveNano => {
-                tiktoken_rs::num_tokens_from_messages("gpt-4o", &messages)
-            }
+            | Model::O4Mini
+            | Model::Five
+            | Model::FiveMini
+            | Model::FiveNano => tiktoken_rs::num_tokens_from_messages(model.id(), &messages), // GPT-5.1 doesn't have tiktoken support yet; fall back on gpt-4o tokenizer
+            Model::FivePointOne => tiktoken_rs::num_tokens_from_messages("gpt-5", &messages),
         }
         .map(|tokens| tokens as u64)
     })

crates/open_ai/src/open_ai.rs 🔗

@@ -85,7 +85,8 @@ pub enum Model {
     FiveMini,
     #[serde(rename = "gpt-5-nano")]
     FiveNano,
-
+    #[serde(rename = "gpt-5.1")]
+    FivePointOne,
     #[serde(rename = "custom")]
     Custom {
         name: String,
@@ -121,6 +122,7 @@ impl Model {
             "gpt-5" => Ok(Self::Five),
             "gpt-5-mini" => Ok(Self::FiveMini),
             "gpt-5-nano" => Ok(Self::FiveNano),
+            "gpt-5.1" => Ok(Self::FivePointOne),
             invalid_id => anyhow::bail!("invalid model id '{invalid_id}'"),
         }
     }
@@ -142,6 +144,7 @@ impl Model {
             Self::Five => "gpt-5",
             Self::FiveMini => "gpt-5-mini",
             Self::FiveNano => "gpt-5-nano",
+            Self::FivePointOne => "gpt-5.1",
             Self::Custom { name, .. } => name,
         }
     }
@@ -163,6 +166,7 @@ impl Model {
             Self::Five => "gpt-5",
             Self::FiveMini => "gpt-5-mini",
             Self::FiveNano => "gpt-5-nano",
+            Self::FivePointOne => "gpt-5.1",
             Self::Custom {
                 name, display_name, ..
             } => display_name.as_ref().unwrap_or(name),
@@ -186,6 +190,7 @@ impl Model {
             Self::Five => 272_000,
             Self::FiveMini => 272_000,
             Self::FiveNano => 272_000,
+            Self::FivePointOne => 400_000,
             Self::Custom { max_tokens, .. } => *max_tokens,
         }
     }
@@ -210,6 +215,7 @@ impl Model {
             Self::Five => Some(128_000),
             Self::FiveMini => Some(128_000),
             Self::FiveNano => Some(128_000),
+            Self::FivePointOne => Some(128_000),
         }
     }
 
@@ -237,6 +243,7 @@ impl Model {
             | Self::FourPointOneNano
             | Self::Five
             | Self::FiveMini
+            | Self::FivePointOne
             | Self::FiveNano => true,
             Self::O1 | Self::O3 | Self::O3Mini | Self::O4Mini | Model::Custom { .. } => false,
         }