@@ -309,6 +309,9 @@ impl LanguageModel for OpenAiLanguageModel {
| Model::FivePointOne
| Model::FivePointTwo
| Model::FivePointTwoCodex
+ | Model::FivePointThreeCodex
+ | Model::FivePointFour
+ | Model::FivePointFourPro
| Model::O1
| Model::O3 => true,
Model::ThreePointFiveTurbo
@@ -1180,10 +1183,13 @@ pub fn count_open_ai_tokens(
| Model::FiveCodex
| Model::FiveMini
| Model::FiveNano => tiktoken_rs::num_tokens_from_messages(model.id(), &messages),
- // GPT-5.1, 5.2, and 5.2-codex don't have dedicated tiktoken support; use gpt-5 tokenizer
- Model::FivePointOne | Model::FivePointTwo | Model::FivePointTwoCodex => {
- tiktoken_rs::num_tokens_from_messages("gpt-5", &messages)
- }
+ // GPT-5.1, 5.2, 5.2-codex, 5.3-codex, 5.4, and 5.4-pro don't have dedicated tiktoken support; use gpt-5 tokenizer
+ Model::FivePointOne
+ | Model::FivePointTwo
+ | Model::FivePointTwoCodex
+ | Model::FivePointThreeCodex
+ | Model::FivePointFour
+ | Model::FivePointFourPro => tiktoken_rs::num_tokens_from_messages("gpt-5", &messages),
}
.map(|tokens| tokens as u64)
})
@@ -88,6 +88,12 @@ pub enum Model {
FivePointTwo,
#[serde(rename = "gpt-5.2-codex")]
FivePointTwoCodex,
+ #[serde(rename = "gpt-5.3-codex")]
+ FivePointThreeCodex,
+ #[serde(rename = "gpt-5.4")]
+ FivePointFour,
+ #[serde(rename = "gpt-5.4-pro")]
+ FivePointFourPro,
#[serde(rename = "custom")]
Custom {
name: String,
@@ -128,6 +134,9 @@ impl Model {
"gpt-5.1" => Ok(Self::FivePointOne),
"gpt-5.2" => Ok(Self::FivePointTwo),
"gpt-5.2-codex" => Ok(Self::FivePointTwoCodex),
+ "gpt-5.3-codex" => Ok(Self::FivePointThreeCodex),
+ "gpt-5.4" => Ok(Self::FivePointFour),
+ "gpt-5.4-pro" => Ok(Self::FivePointFourPro),
invalid_id => anyhow::bail!("invalid model id '{invalid_id}'"),
}
}
@@ -149,6 +158,9 @@ impl Model {
Self::FivePointOne => "gpt-5.1",
Self::FivePointTwo => "gpt-5.2",
Self::FivePointTwoCodex => "gpt-5.2-codex",
+ Self::FivePointThreeCodex => "gpt-5.3-codex",
+ Self::FivePointFour => "gpt-5.4",
+ Self::FivePointFourPro => "gpt-5.4-pro",
Self::Custom { name, .. } => name,
}
}
@@ -170,6 +182,9 @@ impl Model {
Self::FivePointOne => "gpt-5.1",
Self::FivePointTwo => "gpt-5.2",
Self::FivePointTwoCodex => "gpt-5.2-codex",
+ Self::FivePointThreeCodex => "gpt-5.3-codex",
+ Self::FivePointFour => "gpt-5.4",
+ Self::FivePointFourPro => "gpt-5.4-pro",
Self::Custom { display_name, .. } => display_name.as_deref().unwrap_or(&self.id()),
}
}
@@ -186,11 +201,14 @@ impl Model {
Self::O3 => 200_000,
Self::Five => 272_000,
Self::FiveCodex => 272_000,
- Self::FiveMini => 272_000,
- Self::FiveNano => 272_000,
+ Self::FiveMini => 400_000,
+ Self::FiveNano => 400_000,
Self::FivePointOne => 400_000,
Self::FivePointTwo => 400_000,
Self::FivePointTwoCodex => 400_000,
+ Self::FivePointThreeCodex => 400_000,
+ Self::FivePointFour => 1_050_000,
+ Self::FivePointFourPro => 1_050_000,
Self::Custom { max_tokens, .. } => *max_tokens,
}
}
@@ -215,6 +233,9 @@ impl Model {
Self::FivePointOne => Some(128_000),
Self::FivePointTwo => Some(128_000),
Self::FivePointTwoCodex => Some(128_000),
+ Self::FivePointThreeCodex => Some(128_000),
+ Self::FivePointFour => Some(128_000),
+ Self::FivePointFourPro => Some(128_000),
}
}
@@ -223,6 +244,7 @@ impl Model {
Self::Custom {
reasoning_effort, ..
} => reasoning_effort.to_owned(),
+ Self::FivePointThreeCodex | Self::FivePointFourPro => Some(ReasoningEffort::Medium),
_ => None,
}
}
@@ -233,7 +255,10 @@ impl Model {
supports_chat_completions,
..
} => *supports_chat_completions,
- Self::FiveCodex | Self::FivePointTwoCodex => false,
+ Self::FiveCodex
+ | Self::FivePointTwoCodex
+ | Self::FivePointThreeCodex
+ | Self::FivePointFourPro => false,
_ => true,
}
}
@@ -254,6 +279,9 @@ impl Model {
| Self::FivePointOne
| Self::FivePointTwo
| Self::FivePointTwoCodex
+ | Self::FivePointThreeCodex
+ | Self::FivePointFour
+ | Self::FivePointFourPro
| Self::FiveNano => true,
Self::O1 | Self::O3 | Self::O3Mini | Model::Custom { .. } => false,
}