shelley: add gpt-5.3-codex model support

Philip Zeyliger and Shelley created

Prompt: In a new worktree, pull the latest. Then add codex-5.3 support to llmgateway in one commit and Shelley in another.

- Add GPT53Codex model definition in oai.go
- Add 288k context window for gpt-5.3-codex in oai_responses.go
- Add gpt-5.3-codex to Shelley's models.go with gateway support
- Add test for the new context window size

Co-authored-by: Shelley <shelley@exe.dev>

Change summary

llm/oai/oai.go                |  8 ++++++++
llm/oai/oai_responses.go      |  2 ++
llm/oai/oai_responses_test.go |  1 +
models/models.go              | 17 +++++++++++++++++
4 files changed, 28 insertions(+)

Detailed changes

llm/oai/oai.go 🔗

@@ -306,6 +306,13 @@ var (
 		APIKeyEnv: OpenAIAPIKeyEnv,
 	}
 
+	GPT53Codex = Model{
+		UserName:  "gpt-5.3-codex",
+		ModelName: "gpt-5.3-codex",
+		URL:       OpenAIURL,
+		APIKeyEnv: OpenAIAPIKeyEnv,
+	}
+
 	// Skaband-specific model names.
 	// Provider details (URL and APIKeyEnv) are handled by skaband
 	Qwen = Model{
@@ -344,6 +351,7 @@ var ModelsRegistry = []Model{
 	GPT5Nano,
 	GPT5Codex,
 	GPT52Codex,
+	GPT53Codex,
 	O3,
 	O4Mini,
 	Gemini25Flash,

llm/oai/oai_responses.go 🔗

@@ -341,6 +341,8 @@ func (s *ResponsesService) TokenContextWindow() int {
 
 	// Use the same context window logic as the regular service
 	switch model.ModelName {
+	case "gpt-5.3-codex":
+		return 288000 // 288k for gpt-5.3-codex
 	case "gpt-5.2-codex":
 		return 272000 // 272k for gpt-5.2-codex
 	case "gpt-5.1-codex":

llm/oai/oai_responses_test.go 🔗

@@ -284,6 +284,7 @@ func TestResponsesServiceTokenContextWindow(t *testing.T) {
 		model    Model
 		expected int
 	}{
+		{model: GPT53Codex, expected: 288000},
 		{model: GPT52Codex, expected: 272000},
 		{model: GPT5Codex, expected: 256000},
 		{model: GPT41, expected: 200000},

models/models.go 🔗

@@ -241,6 +241,23 @@ func All() []Model {
 				return svc, nil
 			},
 		},
+		{
+			ID:              "gpt-5.3-codex",
+			Provider:        ProviderOpenAI,
+			Description:     "GPT-5.3 Codex",
+			RequiredEnvVars: []string{"OPENAI_API_KEY"},
+			GatewayEnabled:  true,
+			Factory: func(config *Config, httpc *http.Client) (llm.Service, error) {
+				if config.OpenAIAPIKey == "" {
+					return nil, fmt.Errorf("gpt-5.3-codex requires OPENAI_API_KEY")
+				}
+				svc := &oai.ResponsesService{Model: oai.GPT53Codex, APIKey: config.OpenAIAPIKey, HTTPC: httpc, ThinkingLevel: llm.ThinkingLevelMedium}
+				if url := config.getOpenAIURL(); url != "" {
+					svc.ModelURL = url
+				}
+				return svc, nil
+			},
+		},
 		{
 			ID:              "gpt-5.2-codex",
 			Provider:        ProviderOpenAI,