feat: aihubmix cmd to update the models

Carlos Alexandro Becker created

Signed-off-by: Carlos Alexandro Becker <caarlos0@users.noreply.github.com>

Change summary

.github/workflows/update.yml             |    1 
CRUSH.md                                 |    2 
cmd/aihubmix/main.go                     |  204 ++
internal/providers/configs/aihubmix.json | 1964 ++++++++++++++++++++++++-
4 files changed, 2,071 insertions(+), 100 deletions(-)

Detailed changes

.github/workflows/update.yml 🔗

@@ -21,6 +21,7 @@ jobs:
         run: |
           go run ./cmd/openrouter/main.go
           go run ./cmd/synthetic/main.go
+          go run ./cmd/aihubmix/main.go
           # we need to add this back when we know that the providers/models all work
           # go run ./cmd/huggingface/main.go
       - uses: stefanzweifel/git-auto-commit-action@04702edda442b2e678b25b537cec683a1493fcb9 # v5

CRUSH.md 🔗

@@ -4,10 +4,12 @@
 
 - `go build` - Build the main HTTP server
 - `go build ./cmd/openrouter` - Build OpenRouter config generator
+- `go build ./cmd/aihubmix` - Build AIHubMix config generator
 - `go test ./...` - Run all tests
 - `go test -run TestName ./pkg/...` - Run specific test
 - `go run main.go` - Start HTTP server on :8080
 - `go run ./cmd/openrouter/main.go` - Generate OpenRouter config
+- `go run ./cmd/aihubmix/main.go` - Generate AIHubMix config
 
 ## Code Style Guidelines
 

cmd/aihubmix/main.go 🔗

@@ -0,0 +1,204 @@
+// Package main provides a command-line tool to fetch models from AIHubMix
+// and generate a configuration file for the provider.
+package main
+
+import (
+	"context"
+	"encoding/json"
+	"fmt"
+	"io"
+	"log"
+	"net/http"
+	"os"
+	"slices"
+	"strings"
+	"time"
+
+	"github.com/charmbracelet/catwalk/pkg/catwalk"
+)
+
+// APIModel represents a model from the AIHubMix API.
+type APIModel struct {
+	ModelID         string  `json:"model_id"`
+	Desc            string  `json:"desc"`
+	Pricing         Pricing `json:"pricing"`
+	Types           string  `json:"types"`
+	Features        string  `json:"features"`
+	InputModalities string  `json:"input_modalities"`
+	MaxOutput       int64   `json:"max_output"`
+	ContextLength   int64   `json:"context_length"`
+}
+
+// Pricing contains the pricing information from the API.
+type Pricing struct {
+	Input      *float64 `json:"input"`
+	Output     *float64 `json:"output"`
+	CacheRead  *float64 `json:"cache_read"`
+	CacheWrite *float64 `json:"cache_write"`
+}
+
+// ModelsResponse is the response structure for the models API.
+type ModelsResponse struct {
+	Data    []APIModel `json:"data"`
+	Message string     `json:"message"`
+	Success bool       `json:"success"`
+}
+
+func fetchAIHubMixModels() (*ModelsResponse, error) {
+	client := &http.Client{Timeout: 30 * time.Second}
+	req, _ := http.NewRequestWithContext(
+		context.Background(),
+		"GET",
+		"https://aihubmix.com/api/v1/models?type=llm",
+		nil,
+	)
+	req.Header.Set("User-Agent", "Crush-Client/1.0")
+	resp, err := client.Do(req)
+	if err != nil {
+		return nil, err //nolint:wrapcheck
+	}
+	defer resp.Body.Close() //nolint:errcheck
+	if resp.StatusCode != 200 {
+		body, _ := io.ReadAll(resp.Body)
+		return nil, fmt.Errorf("status %d: %s", resp.StatusCode, body)
+	}
+	var mr ModelsResponse
+	if err := json.NewDecoder(resp.Body).Decode(&mr); err != nil {
+		return nil, err //nolint:wrapcheck
+	}
+	return &mr, nil
+}
+
+func hasFeature(features, feature string) bool {
+	if features == "" {
+		return false
+	}
+	for f := range strings.SplitSeq(features, ",") {
+		if strings.TrimSpace(f) == feature {
+			return true
+		}
+	}
+	return false
+}
+
+func hasModality(modalities, modality string) bool {
+	if modalities == "" {
+		return false
+	}
+	for m := range strings.SplitSeq(modalities, ",") {
+		if strings.TrimSpace(m) == modality {
+			return true
+		}
+	}
+	return false
+}
+
+func parseFloat(p *float64) float64 {
+	if p == nil {
+		return 0.0
+	}
+	return *p
+}
+
+func main() {
+	modelsResp, err := fetchAIHubMixModels()
+	if err != nil {
+		log.Fatal("Error fetching AIHubMix models:", err)
+	}
+
+	aiHubMixProvider := catwalk.Provider{
+		Name:        "AIHubMix",
+		ID:          catwalk.InferenceAIHubMix,
+		APIKey:      "$AIHUBMIX_API_KEY",
+		APIEndpoint: "https://aihubmix.com/v1",
+		Type:        catwalk.TypeOpenAICompat,
+		DefaultLargeModelID: "gpt-5",
+		DefaultSmallModelID: "gpt-5-nano",
+		Models:      []catwalk.Model{},
+		DefaultHeaders: map[string]string{
+			"APP-Code": "IUFF7106",
+		},
+	}
+
+	for _, model := range modelsResp.Data {
+		// Skip models with context window < 20000
+		if model.ContextLength < 20000 {
+			continue
+		}
+
+		// Parse pricing - API returns price per 1K tokens in USD
+		// Convert to price per 1M tokens
+		costIn := parseFloat(model.Pricing.Input) * 1000
+		costOut := parseFloat(model.Pricing.Output) * 1000
+		costInCached := parseFloat(model.Pricing.CacheWrite) * 1000
+		costOutCached := parseFloat(model.Pricing.CacheRead) * 1000
+
+		// Skip models with zero pricing or missing pricing
+		if costIn == 0 || costOut == 0 {
+			continue
+		}
+
+		// Check for text I/O support
+		if !hasModality(model.InputModalities, "text") {
+			continue
+		}
+
+		// Check reasoning capability
+		canReason := hasFeature(model.Features, "thinking")
+
+		// Check image support
+		supportsImages := hasModality(model.InputModalities, "image")
+
+		// Parse reasoning levels
+		var reasoningLevels []string
+		var defaultReasoning string
+		if canReason {
+			reasoningLevels = []string{"low", "medium", "high"}
+			defaultReasoning = "medium"
+		}
+
+		// Calculate default max tokens
+		defaultMaxTokens := model.MaxOutput
+		if defaultMaxTokens == 0 || defaultMaxTokens > model.ContextLength/2 {
+			defaultMaxTokens = model.ContextLength / 10
+		}
+
+		catwalkModel := catwalk.Model{
+			ID:                     model.ModelID,
+			Name:                   model.ModelID,
+			CostPer1MIn:            costIn,
+			CostPer1MOut:           costOut,
+			CostPer1MInCached:      costInCached,
+			CostPer1MOutCached:     costOutCached,
+			ContextWindow:          model.ContextLength,
+			DefaultMaxTokens:       defaultMaxTokens,
+			CanReason:              canReason,
+			ReasoningLevels:        reasoningLevels,
+			DefaultReasoningEffort: defaultReasoning,
+			SupportsImages:         supportsImages,
+		}
+
+		aiHubMixProvider.Models = append(aiHubMixProvider.Models, catwalkModel)
+		fmt.Printf("Added model %s with context window %d\n",
+			model.ModelID, model.ContextLength)
+	}
+
+	if len(aiHubMixProvider.Models) == 0 {
+		log.Fatal("No models found or no models met the criteria")
+	}
+
+	slices.SortFunc(aiHubMixProvider.Models, func(a, b catwalk.Model) int {
+		return strings.Compare(a.Name, b.Name)
+	})
+
+	data, err := json.MarshalIndent(aiHubMixProvider, "", "  ")
+	if err != nil {
+		log.Fatal("Error marshaling AIHubMix provider:", err)
+	}
+
+	if err := os.WriteFile("internal/providers/configs/aihubmix.json", data, 0o600); err != nil {
+		log.Fatal("Error writing AIHubMix provider config:", err)
+	}
+
+	fmt.Printf("\nSuccessfully wrote %d models to internal/providers/configs/aihubmix.json\n", len(aiHubMixProvider.Models))
+}

internal/providers/configs/aihubmix.json 🔗

@@ -4,150 +4,1914 @@
   "api_key": "$AIHUBMIX_API_KEY",
   "api_endpoint": "https://aihubmix.com/v1",
   "type": "openai-compat",
-  "default_large_model_id": "claude-sonnet-4-5",
-  "default_small_model_id": "claude-3-5-haiku",
-  "default_headers": {
-    "APP-Code": "IUFF7106"
-  },
+  "default_large_model_id": "gpt-5",
+  "default_small_model_id": "gpt-5-nano",
   "models": [
     {
-      "id": "claude-sonnet-4-5",
-      "name": "Claude Sonnet 4.5",
-      "cost_per_1m_in": 3,
-      "cost_per_1m_out": 15,
-      "cost_per_1m_in_cached": 3.75,
-      "cost_per_1m_out_cached": 0.3,
-      "context_window": 200000,
-      "default_max_tokens": 50000,
+      "id": "AiHubmix-Phi-4-mini-reasoning",
+      "name": "AiHubmix-Phi-4-mini-reasoning",
+      "cost_per_1m_in": 120,
+      "cost_per_1m_out": 120,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 128000,
+      "default_max_tokens": 4000,
+      "can_reason": false,
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "AiHubmix-Phi-4-reasoning",
+      "name": "AiHubmix-Phi-4-reasoning",
+      "cost_per_1m_in": 200,
+      "cost_per_1m_out": 200,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 128000,
+      "default_max_tokens": 4000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "ByteDance-Seed/Seed-OSS-36B-Instruct",
+      "name": "ByteDance-Seed/Seed-OSS-36B-Instruct",
+      "cost_per_1m_in": 200,
+      "cost_per_1m_out": 534,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 256000,
+      "default_max_tokens": 32000,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "DeepSeek-R1",
+      "name": "DeepSeek-R1",
+      "cost_per_1m_in": 400,
+      "cost_per_1m_out": 2000,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 1638000,
+      "default_max_tokens": 163800,
+      "can_reason": false,
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "DeepSeek-V3",
+      "name": "DeepSeek-V3",
+      "cost_per_1m_in": 272,
+      "cost_per_1m_out": 1088,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 1638000,
+      "default_max_tokens": 163800,
+      "can_reason": false,
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "DeepSeek-V3-Fast",
+      "name": "DeepSeek-V3-Fast",
+      "cost_per_1m_in": 560,
+      "cost_per_1m_out": 2240,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 32000,
+      "default_max_tokens": 3200,
+      "can_reason": false,
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "DeepSeek-V3.1-Fast",
+      "name": "DeepSeek-V3.1-Fast",
+      "cost_per_1m_in": 1096,
+      "cost_per_1m_out": 3288,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 163000,
+      "default_max_tokens": 16300,
+      "can_reason": false,
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "DeepSeek-V3.1-Terminus",
+      "name": "DeepSeek-V3.1-Terminus",
+      "cost_per_1m_in": 560,
+      "cost_per_1m_out": 1680,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 160000,
+      "default_max_tokens": 32000,
+      "can_reason": false,
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "DeepSeek-V3.1-Think",
+      "name": "DeepSeek-V3.1-Think",
+      "cost_per_1m_in": 560,
+      "cost_per_1m_out": 1680,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 128000,
+      "default_max_tokens": 32000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "DeepSeek-V3.2-Exp",
+      "name": "DeepSeek-V3.2-Exp",
+      "cost_per_1m_in": 274,
+      "cost_per_1m_out": 411,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 27.400000000000002,
+      "context_window": 163000,
+      "default_max_tokens": 16300,
+      "can_reason": false,
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "DeepSeek-V3.2-Exp-Think",
+      "name": "DeepSeek-V3.2-Exp-Think",
+      "cost_per_1m_in": 274,
+      "cost_per_1m_out": 411,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 27.400000000000002,
+      "context_window": 131000,
+      "default_max_tokens": 64000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "ERNIE-X1.1-Preview",
+      "name": "ERNIE-X1.1-Preview",
+      "cost_per_1m_in": 136,
+      "cost_per_1m_out": 544,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 119000,
+      "default_max_tokens": 11900,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "Kimi-K2-0905",
+      "name": "Kimi-K2-0905",
+      "cost_per_1m_in": 548,
+      "cost_per_1m_out": 2192,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 262144,
+      "default_max_tokens": 26214,
+      "can_reason": false,
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "aihub-Phi-4-mini-instruct",
+      "name": "aihub-Phi-4-mini-instruct",
+      "cost_per_1m_in": 120,
+      "cost_per_1m_out": 480,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 128000,
+      "default_max_tokens": 4000,
+      "can_reason": false,
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "aihub-Phi-4-multimodal-instruct",
+      "name": "aihub-Phi-4-multimodal-instruct",
+      "cost_per_1m_in": 120,
+      "cost_per_1m_out": 480,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 128000,
+      "default_max_tokens": 4000,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "claude-3-5-haiku",
+      "name": "claude-3-5-haiku",
+      "cost_per_1m_in": 1100,
+      "cost_per_1m_out": 5500,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 200000,
+      "default_max_tokens": 8192,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "claude-3-5-sonnet",
+      "name": "claude-3-5-sonnet",
+      "cost_per_1m_in": 3300,
+      "cost_per_1m_out": 16500,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 200000,
+      "default_max_tokens": 8192,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "claude-3-5-sonnet-20240620",
+      "name": "claude-3-5-sonnet-20240620",
+      "cost_per_1m_in": 3300,
+      "cost_per_1m_out": 16500,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 200000,
+      "default_max_tokens": 8192,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "claude-3-7-sonnet",
+      "name": "claude-3-7-sonnet",
+      "cost_per_1m_in": 3300,
+      "cost_per_1m_out": 16500,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 200000,
+      "default_max_tokens": 20000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "claude-haiku-4-5",
+      "name": "claude-haiku-4-5",
+      "cost_per_1m_in": 1100,
+      "cost_per_1m_out": 5500,
+      "cost_per_1m_in_cached": 1375,
+      "cost_per_1m_out_cached": 110,
+      "context_window": 204800,
+      "default_max_tokens": 20480,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "claude-opus-4-0",
+      "name": "claude-opus-4-0",
+      "cost_per_1m_in": 16500,
+      "cost_per_1m_out": 82500,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 200000,
+      "default_max_tokens": 32000,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "claude-opus-4-1",
+      "name": "claude-opus-4-1",
+      "cost_per_1m_in": 16500,
+      "cost_per_1m_out": 82500,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 200000,
+      "default_max_tokens": 32000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "claude-opus-4-5",
+      "name": "claude-opus-4-5",
+      "cost_per_1m_in": 5000,
+      "cost_per_1m_out": 25000,
+      "cost_per_1m_in_cached": 6250,
+      "cost_per_1m_out_cached": 500,
+      "context_window": 200000,
+      "default_max_tokens": 32000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "claude-opus-4-5-think",
+      "name": "claude-opus-4-5-think",
+      "cost_per_1m_in": 5000,
+      "cost_per_1m_out": 25000,
+      "cost_per_1m_in_cached": 6250,
+      "cost_per_1m_out_cached": 500,
+      "context_window": 200000,
+      "default_max_tokens": 32000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "claude-sonnet-4-0",
+      "name": "claude-sonnet-4-0",
+      "cost_per_1m_in": 3300,
+      "cost_per_1m_out": 16500,
+      "cost_per_1m_in_cached": 4125,
+      "cost_per_1m_out_cached": 330,
+      "context_window": 1000000,
+      "default_max_tokens": 64000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "claude-sonnet-4-5",
+      "name": "claude-sonnet-4-5",
+      "cost_per_1m_in": 3300,
+      "cost_per_1m_out": 16500,
+      "cost_per_1m_in_cached": 4125,
+      "cost_per_1m_out_cached": 330,
+      "context_window": 1000000,
+      "default_max_tokens": 64000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "claude-sonnet-4-5-think",
+      "name": "claude-sonnet-4-5-think",
+      "cost_per_1m_in": 3300,
+      "cost_per_1m_out": 16500,
+      "cost_per_1m_in_cached": 4125,
+      "cost_per_1m_out_cached": 330,
+      "context_window": 1000000,
+      "default_max_tokens": 64000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "coding-minimax-m2",
+      "name": "coding-minimax-m2",
+      "cost_per_1m_in": 200,
+      "cost_per_1m_out": 200,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 204800,
+      "default_max_tokens": 13100,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "coding-minimax-m2.1",
+      "name": "coding-minimax-m2.1",
+      "cost_per_1m_in": 200,
+      "cost_per_1m_out": 200,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 204800,
+      "default_max_tokens": 13100,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "deepseek-math-v2",
+      "name": "deepseek-math-v2",
+      "cost_per_1m_in": 492,
+      "cost_per_1m_out": 1968,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 98.4,
+      "context_window": 163000,
+      "default_max_tokens": 16300,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "deepseek-v3.2",
+      "name": "deepseek-v3.2",
+      "cost_per_1m_in": 302,
+      "cost_per_1m_out": 453,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 30.200000000000003,
+      "context_window": 128000,
+      "default_max_tokens": 64000,
+      "can_reason": false,
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "deepseek-v3.2-fast",
+      "name": "deepseek-v3.2-fast",
+      "cost_per_1m_in": 1096,
+      "cost_per_1m_out": 3288,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 1096,
+      "context_window": 128000,
+      "default_max_tokens": 12800,
+      "can_reason": false,
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "deepseek-v3.2-speciale",
+      "name": "deepseek-v3.2-speciale",
+      "cost_per_1m_in": 580,
+      "cost_per_1m_out": 1680.028,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 128000,
+      "default_max_tokens": 12800,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "deepseek-v3.2-think",
+      "name": "deepseek-v3.2-think",
+      "cost_per_1m_in": 302,
+      "cost_per_1m_out": 453,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 30.200000000000003,
+      "context_window": 128000,
+      "default_max_tokens": 64000,
+      "can_reason": false,
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "doubao-seed-1-6",
+      "name": "doubao-seed-1-6",
+      "cost_per_1m_in": 180,
+      "cost_per_1m_out": 1800,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 36,
+      "context_window": 256000,
+      "default_max_tokens": 32000,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "doubao-seed-1-6-flash",
+      "name": "doubao-seed-1-6-flash",
+      "cost_per_1m_in": 44,
+      "cost_per_1m_out": 440,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 8.8,
+      "context_window": 256000,
+      "default_max_tokens": 33000,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "doubao-seed-1-6-lite",
+      "name": "doubao-seed-1-6-lite",
+      "cost_per_1m_in": 82,
+      "cost_per_1m_out": 656,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 16.400000000000002,
+      "context_window": 256000,
+      "default_max_tokens": 32000,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "doubao-seed-1-6-thinking",
+      "name": "doubao-seed-1-6-thinking",
+      "cost_per_1m_in": 180,
+      "cost_per_1m_out": 1800,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 36,
+      "context_window": 256000,
+      "default_max_tokens": 32000,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "doubao-seed-1-8",
+      "name": "doubao-seed-1-8",
+      "cost_per_1m_in": 109.59,
+      "cost_per_1m_out": 273.975,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 21.918,
+      "context_window": 256000,
+      "default_max_tokens": 64000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "ernie-4.5",
+      "name": "ernie-4.5",
+      "cost_per_1m_in": 68,
+      "cost_per_1m_out": 272,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 160000,
+      "default_max_tokens": 64000,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "ernie-4.5-turbo-latest",
+      "name": "ernie-4.5-turbo-latest",
+      "cost_per_1m_in": 110,
+      "cost_per_1m_out": 440,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 135000,
+      "default_max_tokens": 12000,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "ernie-4.5-turbo-vl",
+      "name": "ernie-4.5-turbo-vl",
+      "cost_per_1m_in": 400,
+      "cost_per_1m_out": 1200,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 139000,
+      "default_max_tokens": 16000,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "ernie-5.0-thinking-exp",
+      "name": "ernie-5.0-thinking-exp",
+      "cost_per_1m_in": 821.92,
+      "cost_per_1m_out": 3287.68,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 821.92,
+      "context_window": 119000,
+      "default_max_tokens": 11900,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "ernie-5.0-thinking-preview",
+      "name": "ernie-5.0-thinking-preview",
+      "cost_per_1m_in": 822,
+      "cost_per_1m_out": 3288,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 822,
+      "context_window": 183000,
+      "default_max_tokens": 64000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "ernie-x1-turbo",
+      "name": "ernie-x1-turbo",
+      "cost_per_1m_in": 136,
+      "cost_per_1m_out": 544,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 50500,
+      "default_max_tokens": 5050,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "gemini-2.0-flash",
+      "name": "gemini-2.0-flash",
+      "cost_per_1m_in": 100,
+      "cost_per_1m_out": 400,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 25,
+      "context_window": 1048576,
+      "default_max_tokens": 8192,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gemini-2.5-flash",
+      "name": "gemini-2.5-flash",
+      "cost_per_1m_in": 300,
+      "cost_per_1m_out": 2499,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 30,
+      "context_window": 1048576,
+      "default_max_tokens": 65536,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gemini-2.5-flash-lite",
+      "name": "gemini-2.5-flash-lite",
+      "cost_per_1m_in": 100,
+      "cost_per_1m_out": 400,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 10,
+      "context_window": 1048576,
+      "default_max_tokens": 65536,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gemini-2.5-flash-lite-preview-09-2025",
+      "name": "gemini-2.5-flash-lite-preview-09-2025",
+      "cost_per_1m_in": 100,
+      "cost_per_1m_out": 400,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 10,
+      "context_window": 1048576,
+      "default_max_tokens": 65536,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gemini-2.5-flash-nothink",
+      "name": "gemini-2.5-flash-nothink",
+      "cost_per_1m_in": 300,
+      "cost_per_1m_out": 2499,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 30,
+      "context_window": 1047576,
+      "default_max_tokens": 65536,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gemini-2.5-flash-preview-05-20-nothink",
+      "name": "gemini-2.5-flash-preview-05-20-nothink",
+      "cost_per_1m_in": 300,
+      "cost_per_1m_out": 2499,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 30,
+      "context_window": 1048576,
+      "default_max_tokens": 65536,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gemini-2.5-flash-preview-05-20-search",
+      "name": "gemini-2.5-flash-preview-05-20-search",
+      "cost_per_1m_in": 300,
+      "cost_per_1m_out": 2499,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 30,
+      "context_window": 1048576,
+      "default_max_tokens": 65536,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gemini-2.5-flash-preview-09-2025",
+      "name": "gemini-2.5-flash-preview-09-2025",
+      "cost_per_1m_in": 300,
+      "cost_per_1m_out": 2499,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 30,
+      "context_window": 1048576,
+      "default_max_tokens": 65536,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gemini-2.5-flash-search",
+      "name": "gemini-2.5-flash-search",
+      "cost_per_1m_in": 300,
+      "cost_per_1m_out": 2499,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 30,
+      "context_window": 1048576,
+      "default_max_tokens": 65536,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gemini-2.5-pro",
+      "name": "gemini-2.5-pro",
+      "cost_per_1m_in": 1250,
+      "cost_per_1m_out": 10000,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 125,
+      "context_window": 1048576,
+      "default_max_tokens": 65536,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gemini-2.5-pro-preview-05-06",
+      "name": "gemini-2.5-pro-preview-05-06",
+      "cost_per_1m_in": 1250,
+      "cost_per_1m_out": 10000,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 125,
+      "context_window": 1048576,
+      "default_max_tokens": 65536,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gemini-2.5-pro-preview-06-05",
+      "name": "gemini-2.5-pro-preview-06-05",
+      "cost_per_1m_in": 1250,
+      "cost_per_1m_out": 10000,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 125,
+      "context_window": 1048576,
+      "default_max_tokens": 65536,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gemini-2.5-pro-search",
+      "name": "gemini-2.5-pro-search",
+      "cost_per_1m_in": 1250,
+      "cost_per_1m_out": 10000,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 125,
+      "context_window": 1048576,
+      "default_max_tokens": 65536,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gemini-3-flash-preview",
+      "name": "gemini-3-flash-preview",
+      "cost_per_1m_in": 500,
+      "cost_per_1m_out": 3000,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 50,
+      "context_window": 1048576,
+      "default_max_tokens": 65536,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "glm-4.5v",
+      "name": "glm-4.5v",
+      "cost_per_1m_in": 274,
+      "cost_per_1m_out": 822,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 274,
+      "context_window": 64000,
+      "default_max_tokens": 16384,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "glm-4.6v",
+      "name": "glm-4.6v",
+      "cost_per_1m_in": 137,
+      "cost_per_1m_out": 411,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 27.400000000000002,
+      "context_window": 128000,
+      "default_max_tokens": 12800,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "glm-4.7",
+      "name": "glm-4.7",
+      "cost_per_1m_in": 273.974,
+      "cost_per_1m_out": 1095.896,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 54.795,
+      "context_window": 200000,
+      "default_max_tokens": 20000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "gpt-4.1",
+      "name": "gpt-4.1",
+      "cost_per_1m_in": 2000,
+      "cost_per_1m_out": 8000,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 500,
+      "context_window": 1047576,
+      "default_max_tokens": 32768,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gpt-4.1-mini",
+      "name": "gpt-4.1-mini",
+      "cost_per_1m_in": 400,
+      "cost_per_1m_out": 1600,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 100,
+      "context_window": 1047576,
+      "default_max_tokens": 32768,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gpt-4.1-nano",
+      "name": "gpt-4.1-nano",
+      "cost_per_1m_in": 100,
+      "cost_per_1m_out": 400,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 25,
+      "context_window": 1047576,
+      "default_max_tokens": 32768,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gpt-4o",
+      "name": "gpt-4o",
+      "cost_per_1m_in": 2500,
+      "cost_per_1m_out": 10000,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 1250,
+      "context_window": 128000,
+      "default_max_tokens": 16384,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gpt-4o-2024-11-20",
+      "name": "gpt-4o-2024-11-20",
+      "cost_per_1m_in": 2500,
+      "cost_per_1m_out": 10000,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 1250,
+      "context_window": 128000,
+      "default_max_tokens": 16384,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gpt-4o-audio-preview",
+      "name": "gpt-4o-audio-preview",
+      "cost_per_1m_in": 2500,
+      "cost_per_1m_out": 10000,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 128000,
+      "default_max_tokens": 16384,
+      "can_reason": false,
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "gpt-4o-mini",
+      "name": "gpt-4o-mini",
+      "cost_per_1m_in": 150,
+      "cost_per_1m_out": 600,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 75,
+      "context_window": 128000,
+      "default_max_tokens": 16384,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gpt-4o-mini-search-preview",
+      "name": "gpt-4o-mini-search-preview",
+      "cost_per_1m_in": 150,
+      "cost_per_1m_out": 600,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 75,
+      "context_window": 128000,
+      "default_max_tokens": 16384,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gpt-4o-search-preview",
+      "name": "gpt-4o-search-preview",
+      "cost_per_1m_in": 2500,
+      "cost_per_1m_out": 10000,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 1250,
+      "context_window": 128000,
+      "default_max_tokens": 16384,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gpt-5",
+      "name": "gpt-5",
+      "cost_per_1m_in": 1250,
+      "cost_per_1m_out": 10000,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 125,
+      "context_window": 400000,
+      "default_max_tokens": 128000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gpt-5-chat-latest",
+      "name": "gpt-5-chat-latest",
+      "cost_per_1m_in": 1250,
+      "cost_per_1m_out": 10000,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 125,
+      "context_window": 400000,
+      "default_max_tokens": 128000,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gpt-5-codex",
+      "name": "gpt-5-codex",
+      "cost_per_1m_in": 1250,
+      "cost_per_1m_out": 10000,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 125,
+      "context_window": 400000,
+      "default_max_tokens": 128000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gpt-5-mini",
+      "name": "gpt-5-mini",
+      "cost_per_1m_in": 250,
+      "cost_per_1m_out": 2000,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 25,
+      "context_window": 400000,
+      "default_max_tokens": 128000,
       "can_reason": true,
-      "supports_attachments": true
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
     },
     {
-      "id": "claude-opus-4-1",
-      "name": "Claude Opus 4.1",
-      "cost_per_1m_in": 15,
-      "cost_per_1m_out": 75,
-      "cost_per_1m_in_cached": 18.75,
-      "cost_per_1m_out_cached": 1.5,
-      "context_window": 200000,
-      "default_max_tokens": 32000,
+      "id": "gpt-5-nano",
+      "name": "gpt-5-nano",
+      "cost_per_1m_in": 50,
+      "cost_per_1m_out": 400,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 5,
+      "context_window": 400000,
+      "default_max_tokens": 128000,
       "can_reason": true,
-      "supports_attachments": true
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
     },
     {
-      "id": "claude-3-5-haiku",
-      "name": "Claude 3.5 Haiku",
-      "cost_per_1m_in": 0.7999999999999999,
-      "cost_per_1m_out": 4,
-      "cost_per_1m_in_cached": 1,
-      "cost_per_1m_out_cached": 0.08,
-      "context_window": 200000,
-      "default_max_tokens": 5000,
+      "id": "gpt-5-pro",
+      "name": "gpt-5-pro",
+      "cost_per_1m_in": 15000,
+      "cost_per_1m_out": 120000,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 400000,
+      "default_max_tokens": 128000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gpt-5.1",
+      "name": "gpt-5.1",
+      "cost_per_1m_in": 1250,
+      "cost_per_1m_out": 10000,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 125,
+      "context_window": 400000,
+      "default_max_tokens": 128000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gpt-5.1-chat-latest",
+      "name": "gpt-5.1-chat-latest",
+      "cost_per_1m_in": 1250,
+      "cost_per_1m_out": 10000,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 125,
+      "context_window": 128000,
+      "default_max_tokens": 16384,
       "can_reason": false,
-      "supports_attachments": true
+      "supports_attachments": true,
+      "options": {}
     },
     {
-      "id": "gemini-2.5-pro",
-      "name": "Gemini 2.5 Pro",
-      "cost_per_1m_in": 1.25,
-      "cost_per_1m_out": 10,
-      "cost_per_1m_in_cached": 1.625,
-      "cost_per_1m_out_cached": 0.31,
-      "context_window": 1048576,
-      "default_max_tokens": 50000,
+      "id": "gpt-5.1-codex",
+      "name": "gpt-5.1-codex",
+      "cost_per_1m_in": 1250,
+      "cost_per_1m_out": 10000,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 125,
+      "context_window": 400000,
+      "default_max_tokens": 128000,
       "can_reason": true,
-      "supports_attachments": true
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
     },
     {
-      "id": "gemini-2.5-flash",
-      "name": "Gemini 2.5 Flash",
-      "cost_per_1m_in": 0.3,
-      "cost_per_1m_out": 2.5,
-      "cost_per_1m_in_cached": 0.3833,
-      "cost_per_1m_out_cached": 0.075,
-      "context_window": 1048576,
-      "default_max_tokens": 50000,
+      "id": "gpt-5.1-codex-max",
+      "name": "gpt-5.1-codex-max",
+      "cost_per_1m_in": 1250,
+      "cost_per_1m_out": 10000,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 125,
+      "context_window": 400000,
+      "default_max_tokens": 128000,
       "can_reason": true,
-      "supports_attachments": true
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
     },
     {
-      "id": "gpt-5",
-      "name": "GPT-5",
-      "cost_per_1m_in": 1.25,
-      "cost_per_1m_out": 10,
-      "cost_per_1m_in_cached": 0.25,
-      "cost_per_1m_out_cached": 0.25,
+      "id": "gpt-5.1-codex-mini",
+      "name": "gpt-5.1-codex-mini",
+      "cost_per_1m_in": 250,
+      "cost_per_1m_out": 2000,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 25,
       "context_window": 400000,
       "default_max_tokens": 128000,
       "can_reason": true,
-      "has_reasoning_efforts": true,
-      "default_reasoning_effort": "minimal",
-      "supports_attachments": true
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
     },
     {
-      "id": "gpt-5-mini",
-      "name": "GPT-5 Mini",
-      "cost_per_1m_in": 0.25,
-      "cost_per_1m_out": 2,
-      "cost_per_1m_in_cached": 0.025,
-      "cost_per_1m_out_cached": 0.025,
+      "id": "gpt-5.2",
+      "name": "gpt-5.2",
+      "cost_per_1m_in": 1750,
+      "cost_per_1m_out": 14000,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 175,
       "context_window": 400000,
       "default_max_tokens": 128000,
       "can_reason": true,
-      "has_reasoning_efforts": true,
-      "default_reasoning_effort": "low",
-      "supports_attachments": true
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
     },
     {
-      "id": "gpt-5-nano",
-      "name": "GPT-5 Nano",
-      "cost_per_1m_in": 0.05,
-      "cost_per_1m_out": 0.4,
-      "cost_per_1m_in_cached": 0.005,
-      "cost_per_1m_out_cached": 0.005,
+      "id": "gpt-5.2-chat-latest",
+      "name": "gpt-5.2-chat-latest",
+      "cost_per_1m_in": 1750,
+      "cost_per_1m_out": 14000,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 175,
+      "context_window": 128000,
+      "default_max_tokens": 16384,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gpt-5.2-codex",
+      "name": "gpt-5.2-codex",
+      "cost_per_1m_in": 1750,
+      "cost_per_1m_out": 14000,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 175,
       "context_window": 400000,
       "default_max_tokens": 128000,
       "can_reason": true,
-      "has_reasoning_efforts": true,
-      "default_reasoning_effort": "low",
-      "supports_attachments": true
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
     },
     {
-      "id": "Kimi-K2-0905",
-      "name": "Kimi K2 0905",
-      "cost_per_1m_in": 0.55,
-      "cost_per_1m_out": 2.19,
+      "id": "gpt-5.2-high",
+      "name": "gpt-5.2-high",
+      "cost_per_1m_in": 1750,
+      "cost_per_1m_out": 14000,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 175,
+      "context_window": 400000,
+      "default_max_tokens": 128000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gpt-5.2-low",
+      "name": "gpt-5.2-low",
+      "cost_per_1m_in": 1750,
+      "cost_per_1m_out": 14000,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 175,
+      "context_window": 400000,
+      "default_max_tokens": 128000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gpt-5.2-pro",
+      "name": "gpt-5.2-pro",
+      "cost_per_1m_in": 21000,
+      "cost_per_1m_out": 168000,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 2100,
+      "context_window": 400000,
+      "default_max_tokens": 128000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gpt-oss-120b",
+      "name": "gpt-oss-120b",
+      "cost_per_1m_in": 180,
+      "cost_per_1m_out": 900,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 131072,
+      "default_max_tokens": 32768,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "gpt-oss-20b",
+      "name": "gpt-oss-20b",
+      "cost_per_1m_in": 110,
+      "cost_per_1m_out": 550,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 128000,
+      "default_max_tokens": 12800,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "grok-4-1-fast-non-reasoning",
+      "name": "grok-4-1-fast-non-reasoning",
+      "cost_per_1m_in": 200,
+      "cost_per_1m_out": 500,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 50,
+      "context_window": 2000000,
+      "default_max_tokens": 200000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "grok-4-1-fast-reasoning",
+      "name": "grok-4-1-fast-reasoning",
+      "cost_per_1m_in": 200,
+      "cost_per_1m_out": 500,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 50,
+      "context_window": 2000000,
+      "default_max_tokens": 200000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "grok-code-fast-1",
+      "name": "grok-code-fast-1",
+      "cost_per_1m_in": 200,
+      "cost_per_1m_out": 500,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 50,
+      "context_window": 2000000,
+      "default_max_tokens": 200000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "jina-deepsearch-v1",
+      "name": "jina-deepsearch-v1",
+      "cost_per_1m_in": 50,
+      "cost_per_1m_out": 50,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 1000000,
+      "default_max_tokens": 100000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "kat-dev",
+      "name": "kat-dev",
+      "cost_per_1m_in": 137,
+      "cost_per_1m_out": 548,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 128000,
+      "default_max_tokens": 12800,
+      "can_reason": false,
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "kimi-k2-0711",
+      "name": "kimi-k2-0711",
+      "cost_per_1m_in": 540,
+      "cost_per_1m_out": 2160,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 131000,
+      "default_max_tokens": 13100,
+      "can_reason": false,
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "kimi-k2-thinking",
+      "name": "kimi-k2-thinking",
+      "cost_per_1m_in": 548,
+      "cost_per_1m_out": 2192,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 137,
+      "context_window": 262144,
+      "default_max_tokens": 26214,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "kimi-k2-turbo-preview",
+      "name": "kimi-k2-turbo-preview",
+      "cost_per_1m_in": 1200,
+      "cost_per_1m_out": 4800,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 300,
+      "context_window": 262144,
+      "default_max_tokens": 26214,
+      "can_reason": false,
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "llama-4-maverick",
+      "name": "llama-4-maverick",
+      "cost_per_1m_in": 200,
+      "cost_per_1m_out": 200,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 1048576,
+      "default_max_tokens": 32000,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "llama-4-scout",
+      "name": "llama-4-scout",
+      "cost_per_1m_in": 200,
+      "cost_per_1m_out": 200,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 131000,
+      "default_max_tokens": 13100,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "mistral-large-3",
+      "name": "mistral-large-3",
+      "cost_per_1m_in": 500,
+      "cost_per_1m_out": 1500,
       "cost_per_1m_in_cached": 0,
       "cost_per_1m_out_cached": 0,
       "context_window": 256000,
-      "default_max_tokens": 10000,
+      "default_max_tokens": 25600,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "o3",
+      "name": "o3",
+      "cost_per_1m_in": 2000,
+      "cost_per_1m_out": 8000,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 500,
+      "context_window": 200000,
+      "default_max_tokens": 100000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "o3-mini",
+      "name": "o3-mini",
+      "cost_per_1m_in": 1100,
+      "cost_per_1m_out": 4400,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 550,
+      "context_window": 200000,
+      "default_max_tokens": 100000,
       "can_reason": true,
-      "has_reasoning_efforts": true,
-      "default_reasoning_effort": "medium"
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "o3-pro",
+      "name": "o3-pro",
+      "cost_per_1m_in": 20000,
+      "cost_per_1m_out": 80000,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 20000,
+      "context_window": 200000,
+      "default_max_tokens": 100000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "o4-mini",
+      "name": "o4-mini",
+      "cost_per_1m_in": 1100,
+      "cost_per_1m_out": 4400,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 275,
+      "context_window": 200000,
+      "default_max_tokens": 100000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
     },
     {
-      "id": "glm-4.6",
-      "name": "GLM-4.6",
-      "cost_per_1m_in": 0.6,
-      "cost_per_1m_out": 2.2,
-      "cost_per_1m_in_cached": 0.11,
+      "id": "qwen3-235b-a22b",
+      "name": "qwen3-235b-a22b",
+      "cost_per_1m_in": 280,
+      "cost_per_1m_out": 1120,
+      "cost_per_1m_in_cached": 0,
       "cost_per_1m_out_cached": 0,
-      "context_window": 204800,
-      "default_max_tokens": 131072,
+      "context_window": 131100,
+      "default_max_tokens": 13110,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "qwen3-235b-a22b-instruct-2507",
+      "name": "qwen3-235b-a22b-instruct-2507",
+      "cost_per_1m_in": 280,
+      "cost_per_1m_out": 1120,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 262144,
+      "default_max_tokens": 26214,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "qwen3-235b-a22b-thinking-2507",
+      "name": "qwen3-235b-a22b-thinking-2507",
+      "cost_per_1m_in": 280,
+      "cost_per_1m_out": 2800,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 262144,
+      "default_max_tokens": 26214,
       "can_reason": true,
-      "has_reasoning_efforts": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
       "default_reasoning_effort": "medium",
-      "supports_attachments": false
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "qwen3-coder-30b-a3b-instruct",
+      "name": "qwen3-coder-30b-a3b-instruct",
+      "cost_per_1m_in": 200,
+      "cost_per_1m_out": 800,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 200,
+      "context_window": 2000000,
+      "default_max_tokens": 262000,
+      "can_reason": false,
+      "supports_attachments": false,
+      "options": {}
     },
     {
       "id": "qwen3-coder-480b-a35b-instruct",
-      "name": "Qwen 3 480B Coder",
-      "cost_per_1m_in": 0.82,
-      "cost_per_1m_out": 3.29,
-      "context_window": 131072,
+      "name": "qwen3-coder-480b-a35b-instruct",
+      "cost_per_1m_in": 820,
+      "cost_per_1m_out": 3280,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 820,
+      "context_window": 262000,
+      "default_max_tokens": 26200,
+      "can_reason": false,
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "qwen3-coder-flash",
+      "name": "qwen3-coder-flash",
+      "cost_per_1m_in": 136,
+      "cost_per_1m_out": 544,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 136,
+      "context_window": 256000,
+      "default_max_tokens": 65536,
+      "can_reason": false,
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "qwen3-coder-plus",
+      "name": "qwen3-coder-plus",
+      "cost_per_1m_in": 540,
+      "cost_per_1m_out": 2160,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 108,
+      "context_window": 1048576,
+      "default_max_tokens": 65536,
+      "can_reason": false,
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "qwen3-coder-plus-2025-07-22",
+      "name": "qwen3-coder-plus-2025-07-22",
+      "cost_per_1m_in": 540,
+      "cost_per_1m_out": 2160,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 540,
+      "context_window": 128000,
+      "default_max_tokens": 12800,
+      "can_reason": false,
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "qwen3-max",
+      "name": "qwen3-max",
+      "cost_per_1m_in": 822,
+      "cost_per_1m_out": 3288,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 822,
+      "context_window": 262144,
       "default_max_tokens": 65536,
       "can_reason": false,
-      "supports_attachments": false
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "qwen3-next-80b-a3b-instruct",
+      "name": "qwen3-next-80b-a3b-instruct",
+      "cost_per_1m_in": 138,
+      "cost_per_1m_out": 552,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 256000,
+      "default_max_tokens": 25600,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "qwen3-next-80b-a3b-thinking",
+      "name": "qwen3-next-80b-a3b-thinking",
+      "cost_per_1m_in": 138,
+      "cost_per_1m_out": 1380,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 256000,
+      "default_max_tokens": 25600,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "qwen3-vl-235b-a22b-instruct",
+      "name": "qwen3-vl-235b-a22b-instruct",
+      "cost_per_1m_in": 274,
+      "cost_per_1m_out": 1096,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 131000,
+      "default_max_tokens": 33000,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "qwen3-vl-235b-a22b-thinking",
+      "name": "qwen3-vl-235b-a22b-thinking",
+      "cost_per_1m_in": 274,
+      "cost_per_1m_out": 2740,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 131000,
+      "default_max_tokens": 33000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "qwen3-vl-30b-a3b-instruct",
+      "name": "qwen3-vl-30b-a3b-instruct",
+      "cost_per_1m_in": 102.8,
+      "cost_per_1m_out": 411.2,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 128000,
+      "default_max_tokens": 32000,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "qwen3-vl-30b-a3b-thinking",
+      "name": "qwen3-vl-30b-a3b-thinking",
+      "cost_per_1m_in": 102.8,
+      "cost_per_1m_out": 1028,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 128000,
+      "default_max_tokens": 32000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "qwen3-vl-plus",
+      "name": "qwen3-vl-plus",
+      "cost_per_1m_in": 137,
+      "cost_per_1m_out": 1370,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 27.400000000000002,
+      "context_window": 256000,
+      "default_max_tokens": 32000,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
     }
-  ]
-}
+  ],
+  "default_headers": {
+    "APP-Code": "IUFF7106"
+  }
+}