diff --git a/llm/oai/oai.go b/llm/oai/oai.go index 02b46f1d33a387346d79b530c1789c89be3fe89e..937ac8d85394bce5e75a5c0679290d74227e0250 100644 --- a/llm/oai/oai.go +++ b/llm/oai/oai.go @@ -306,6 +306,13 @@ var ( APIKeyEnv: OpenAIAPIKeyEnv, } + GPT53Codex = Model{ + UserName: "gpt-5.3-codex", + ModelName: "gpt-5.3-codex", + URL: OpenAIURL, + APIKeyEnv: OpenAIAPIKeyEnv, + } + // Skaband-specific model names. // Provider details (URL and APIKeyEnv) are handled by skaband Qwen = Model{ @@ -344,6 +351,7 @@ var ModelsRegistry = []Model{ GPT5Nano, GPT5Codex, GPT52Codex, + GPT53Codex, O3, O4Mini, Gemini25Flash, diff --git a/llm/oai/oai_responses.go b/llm/oai/oai_responses.go index 4b7808344e2b145374d0b08a7724337d6b64a552..dcb8eaf222bc1fdba0b9813c57ec5ba5b4720dde 100644 --- a/llm/oai/oai_responses.go +++ b/llm/oai/oai_responses.go @@ -341,6 +341,8 @@ func (s *ResponsesService) TokenContextWindow() int { // Use the same context window logic as the regular service switch model.ModelName { + case "gpt-5.3-codex": + return 288000 // 288k for gpt-5.3-codex case "gpt-5.2-codex": return 272000 // 272k for gpt-5.2-codex case "gpt-5.1-codex": diff --git a/llm/oai/oai_responses_test.go b/llm/oai/oai_responses_test.go index d47492b11410004c9da94c0a6c1e62ce001e6693..c51002895472b791eb78ce9cd8bb4a12237aea68 100644 --- a/llm/oai/oai_responses_test.go +++ b/llm/oai/oai_responses_test.go @@ -284,6 +284,7 @@ func TestResponsesServiceTokenContextWindow(t *testing.T) { model Model expected int }{ + {model: GPT53Codex, expected: 288000}, {model: GPT52Codex, expected: 272000}, {model: GPT5Codex, expected: 256000}, {model: GPT41, expected: 200000}, diff --git a/models/models.go b/models/models.go index 0c3db1c8d510dc2c8e49f342df9929850a99a5d8..8026fc3c6f098250b6aea62ea9fc93dae6733ad5 100644 --- a/models/models.go +++ b/models/models.go @@ -241,6 +241,23 @@ func All() []Model { return svc, nil }, }, + { + ID: "gpt-5.3-codex", + Provider: ProviderOpenAI, + Description: "GPT-5.3 Codex", + RequiredEnvVars: []string{"OPENAI_API_KEY"}, + GatewayEnabled: true, + Factory: func(config *Config, httpc *http.Client) (llm.Service, error) { + if config.OpenAIAPIKey == "" { + return nil, fmt.Errorf("gpt-5.3-codex requires OPENAI_API_KEY") + } + svc := &oai.ResponsesService{Model: oai.GPT53Codex, APIKey: config.OpenAIAPIKey, HTTPC: httpc, ThinkingLevel: llm.ThinkingLevelMedium} + if url := config.getOpenAIURL(); url != "" { + svc.ModelURL = url + } + return svc, nil + }, + }, { ID: "gpt-5.2-codex", Provider: ProviderOpenAI,