Add GPT-5.3-Codex BYOK model under the OpenAI provider (#50122)

Richard Feldman created

Adds `gpt-5.3-codex` as a built-in model under the OpenAI provider for
BYOK usage.

Model specs:
- 400,000 context window
- 128,000 max output tokens
- Reasoning token support (default medium effort)
- Uses the Responses API (like other codex models)
- Token counting falls back to the gpt-5 tokenizer

Closes AI-59

Release Notes:

- Added support for GPT-5.3-Codex as a bring-your-own-key model in the
OpenAI provider.

Change summary

crates/language_models/src/provider/open_ai.rs |  8 ++++++--
crates/open_ai/src/open_ai.rs                  | 11 ++++++++++-
2 files changed, 16 insertions(+), 3 deletions(-)

Detailed changes

crates/language_models/src/provider/open_ai.rs 🔗

@@ -309,6 +309,7 @@ impl LanguageModel for OpenAiLanguageModel {
             | Model::FivePointOne
             | Model::FivePointTwo
             | Model::FivePointTwoCodex
+            | Model::FivePointThreeCodex
             | Model::O1
             | Model::O3 => true,
             Model::ThreePointFiveTurbo
@@ -1180,8 +1181,11 @@ pub fn count_open_ai_tokens(
             | Model::FiveCodex
             | Model::FiveMini
             | Model::FiveNano => tiktoken_rs::num_tokens_from_messages(model.id(), &messages),
-            // GPT-5.1, 5.2, and 5.2-codex don't have dedicated tiktoken support; use gpt-5 tokenizer
-            Model::FivePointOne | Model::FivePointTwo | Model::FivePointTwoCodex => {
+            // GPT-5.1, 5.2, 5.2-codex, and 5.3-codex don't have dedicated tiktoken support; use gpt-5 tokenizer
+            Model::FivePointOne
+            | Model::FivePointTwo
+            | Model::FivePointTwoCodex
+            | Model::FivePointThreeCodex => {
                 tiktoken_rs::num_tokens_from_messages("gpt-5", &messages)
             }
         }

crates/open_ai/src/open_ai.rs 🔗

@@ -88,6 +88,8 @@ pub enum Model {
     FivePointTwo,
     #[serde(rename = "gpt-5.2-codex")]
     FivePointTwoCodex,
+    #[serde(rename = "gpt-5.3-codex")]
+    FivePointThreeCodex,
     #[serde(rename = "custom")]
     Custom {
         name: String,
@@ -128,6 +130,7 @@ impl Model {
             "gpt-5.1" => Ok(Self::FivePointOne),
             "gpt-5.2" => Ok(Self::FivePointTwo),
             "gpt-5.2-codex" => Ok(Self::FivePointTwoCodex),
+            "gpt-5.3-codex" => Ok(Self::FivePointThreeCodex),
             invalid_id => anyhow::bail!("invalid model id '{invalid_id}'"),
         }
     }
@@ -149,6 +152,7 @@ impl Model {
             Self::FivePointOne => "gpt-5.1",
             Self::FivePointTwo => "gpt-5.2",
             Self::FivePointTwoCodex => "gpt-5.2-codex",
+            Self::FivePointThreeCodex => "gpt-5.3-codex",
             Self::Custom { name, .. } => name,
         }
     }
@@ -170,6 +174,7 @@ impl Model {
             Self::FivePointOne => "gpt-5.1",
             Self::FivePointTwo => "gpt-5.2",
             Self::FivePointTwoCodex => "gpt-5.2-codex",
+            Self::FivePointThreeCodex => "gpt-5.3-codex",
             Self::Custom { display_name, .. } => display_name.as_deref().unwrap_or(&self.id()),
         }
     }
@@ -191,6 +196,7 @@ impl Model {
             Self::FivePointOne => 400_000,
             Self::FivePointTwo => 400_000,
             Self::FivePointTwoCodex => 400_000,
+            Self::FivePointThreeCodex => 400_000,
             Self::Custom { max_tokens, .. } => *max_tokens,
         }
     }
@@ -215,6 +221,7 @@ impl Model {
             Self::FivePointOne => Some(128_000),
             Self::FivePointTwo => Some(128_000),
             Self::FivePointTwoCodex => Some(128_000),
+            Self::FivePointThreeCodex => Some(128_000),
         }
     }
 
@@ -223,6 +230,7 @@ impl Model {
             Self::Custom {
                 reasoning_effort, ..
             } => reasoning_effort.to_owned(),
+            Self::FivePointThreeCodex => Some(ReasoningEffort::Medium),
             _ => None,
         }
     }
@@ -233,7 +241,7 @@ impl Model {
                 supports_chat_completions,
                 ..
             } => *supports_chat_completions,
-            Self::FiveCodex | Self::FivePointTwoCodex => false,
+            Self::FiveCodex | Self::FivePointTwoCodex | Self::FivePointThreeCodex => false,
             _ => true,
         }
     }
@@ -254,6 +262,7 @@ impl Model {
             | Self::FivePointOne
             | Self::FivePointTwo
             | Self::FivePointTwoCodex
+            | Self::FivePointThreeCodex
             | Self::FiveNano => true,
             Self::O1 | Self::O3 | Self::O3Mini | Model::Custom { .. } => false,
         }