feat: aihubmix cmd to update the models (#158)

Carlos Alexandro Becker and Andrey Nering created

Co-authored-by: Andrey Nering <andreynering@users.noreply.github.com>

Change summary

.github/workflows/update.yml             |    1 
CRUSH.md                                 |    2 
Taskfile.yaml                            |    6 
cmd/aihubmix/main.go                     |  189 +
internal/providers/configs/aihubmix.json | 2514 ++++++++++++++++++++++++-
5 files changed, 2,607 insertions(+), 105 deletions(-)

Detailed changes

.github/workflows/update.yml 🔗

@@ -19,6 +19,7 @@ jobs:
           go-version-file: go.mod
       - name: Generate provider configurations
         run: |
+          go run ./cmd/aihubmix/main.go
           go run ./cmd/huggingface/main.go
           go run ./cmd/ionet/main.go
           go run ./cmd/openrouter/main.go

CRUSH.md 🔗

@@ -19,6 +19,7 @@
 - HTTP: Always set timeouts, use context, defer close response bodies
 - JSON: Use `json.MarshalIndent` for pretty output, validate unmarshaling
 - File permissions: Use 0o600 for sensitive config files
+- Always format code with `gofumpt`
 
 ## Adding more provider commands
 
@@ -36,4 +37,3 @@
 For `zai`, we'll need to grab the model list and capabilities from `https://docs.z.ai/guides/overview/overview`.
 
 That page does not contain the exact `context_window` and `default_max_tokens` though. We can grab the exact value from `./internal/providers/configs/openrouter.json`.
-

Taskfile.yaml 🔗

@@ -57,6 +57,7 @@ tasks:
     desc: Generate all provider configurations
     aliases: [generate, gen]
     cmds:
+      - task: gen:aihubmix
       - task: gen:copilot
       - task: gen:huggingface
       - task: gen:ionet
@@ -64,6 +65,11 @@ tasks:
       - task: gen:synthetic
       - task: gen:vercel
 
+  gen:aihubmix:
+    desc: Generate aihubmix provider configurations
+    cmds:
+      - go run cmd/aihubmix/main.go
+
   gen:copilot:
     desc: Generate copilot provider configurations
     cmds:

cmd/aihubmix/main.go 🔗

@@ -0,0 +1,189 @@
+// Package main provides a command-line tool to fetch models from AIHubMix
+// and generate a configuration file for the provider.
+package main
+
+import (
+	"context"
+	"encoding/json"
+	"fmt"
+	"io"
+	"log"
+	"net/http"
+	"os"
+	"slices"
+	"strings"
+	"time"
+
+	"charm.land/catwalk/pkg/catwalk"
+)
+
+// APIModel represents a model from the AIHubMix API.
+type APIModel struct {
+	ModelID         string  `json:"model_id"`
+	ModelName       string  `json:"model_name"`
+	Desc            string  `json:"desc"`
+	Pricing         Pricing `json:"pricing"`
+	Types           string  `json:"types"`
+	Features        string  `json:"features"`
+	InputModalities string  `json:"input_modalities"`
+	MaxOutput       int64   `json:"max_output"`
+	ContextLength   int64   `json:"context_length"`
+}
+
+// Pricing contains the pricing information from the API.
+type Pricing struct {
+	Input      *float64 `json:"input"`
+	Output     *float64 `json:"output"`
+	CacheRead  *float64 `json:"cache_read"`
+	CacheWrite *float64 `json:"cache_write"`
+}
+
+const (
+	minContextWindow  = 20000
+	defaultLargeModel = "gpt-5"
+	defaultSmallModel = "gpt-5-nano"
+	maxTokensFactor   = 10
+)
+
+// ModelsResponse is the response structure for the models API.
+type ModelsResponse struct {
+	Data    []APIModel `json:"data"`
+	Message string     `json:"message"`
+	Success bool       `json:"success"`
+}
+
+func fetchAIHubMixModels() (*ModelsResponse, error) {
+	req, err := http.NewRequestWithContext(
+		context.Background(),
+		"GET",
+		"https://aihubmix.com/api/v1/models?type=llm",
+		nil,
+	)
+	if err != nil {
+		return nil, fmt.Errorf("creating request: %w", err)
+	}
+	req.Header.Set("User-Agent", "Crush-Client/1.0")
+
+	client := &http.Client{Timeout: 30 * time.Second}
+	resp, err := client.Do(req)
+	if err != nil {
+		return nil, fmt.Errorf("fetching models: %w", err)
+	}
+	defer resp.Body.Close() //nolint:errcheck
+
+	if resp.StatusCode != http.StatusOK {
+		body, _ := io.ReadAll(resp.Body)
+		return nil, fmt.Errorf("unexpected status %d: %s", resp.StatusCode, body)
+	}
+
+	var mr ModelsResponse
+	if err := json.NewDecoder(resp.Body).Decode(&mr); err != nil {
+		return nil, fmt.Errorf("parsing response: %w", err)
+	}
+	return &mr, nil
+}
+
+func hasField(s, field string) bool {
+	if s == "" {
+		return false
+	}
+	for item := range strings.SplitSeq(s, ",") {
+		if strings.TrimSpace(item) == field {
+			return true
+		}
+	}
+	return false
+}
+
+func parseFloat(p *float64) float64 {
+	if p == nil {
+		return 0
+	}
+	return *p
+}
+
+func calculateMaxTokens(contextLength, maxOutput, factor int64) int64 {
+	if maxOutput == 0 || maxOutput > contextLength/2 {
+		return contextLength / factor
+	}
+	return maxOutput
+}
+
+func buildReasoningConfig(canReason bool) ([]string, string) {
+	if !canReason {
+		return nil, ""
+	}
+	return []string{"low", "medium", "high"}, "medium"
+}
+
+func main() {
+	modelsResp, err := fetchAIHubMixModels()
+	if err != nil {
+		log.Fatal("Error fetching AIHubMix models:", err)
+	}
+
+	aiHubMixProvider := catwalk.Provider{
+		Name:                "AIHubMix",
+		ID:                  catwalk.InferenceAIHubMix,
+		APIKey:              "$AIHUBMIX_API_KEY",
+		APIEndpoint:         "https://aihubmix.com/v1",
+		Type:                catwalk.TypeOpenAICompat,
+		DefaultLargeModelID: defaultLargeModel,
+		DefaultSmallModelID: defaultSmallModel,
+		DefaultHeaders: map[string]string{
+			"APP-Code": "IUFF7106",
+		},
+	}
+
+	for _, model := range modelsResp.Data {
+		if model.ContextLength < minContextWindow {
+			continue
+		}
+		if !hasField(model.InputModalities, "text") {
+			continue
+		}
+
+		canReason := hasField(model.Features, "thinking")
+		supportsImages := hasField(model.InputModalities, "image")
+
+		reasoningLevels, defaultReasoning := buildReasoningConfig(canReason)
+		maxTokens := calculateMaxTokens(model.ContextLength, model.MaxOutput, maxTokensFactor)
+
+		aiHubMixProvider.Models = append(aiHubMixProvider.Models, catwalk.Model{
+			ID:                     model.ModelID,
+			Name:                   model.ModelName,
+			CostPer1MIn:            parseFloat(model.Pricing.Input),
+			CostPer1MOut:           parseFloat(model.Pricing.Output),
+			CostPer1MInCached:      parseFloat(model.Pricing.CacheWrite),
+			CostPer1MOutCached:     parseFloat(model.Pricing.CacheRead),
+			ContextWindow:          model.ContextLength,
+			DefaultMaxTokens:       maxTokens,
+			CanReason:              canReason,
+			ReasoningLevels:        reasoningLevels,
+			DefaultReasoningEffort: defaultReasoning,
+			SupportsImages:         supportsImages,
+		})
+
+		fmt.Printf("Added model %s with context window %d\n",
+			model.ModelID, model.ContextLength)
+	}
+
+	if len(aiHubMixProvider.Models) == 0 {
+		log.Fatal("No models found or no models met the criteria")
+	}
+
+	slices.SortFunc(aiHubMixProvider.Models, func(a, b catwalk.Model) int {
+		return strings.Compare(a.ID, b.ID)
+	})
+
+	data, err := json.MarshalIndent(aiHubMixProvider, "", "  ")
+	if err != nil {
+		log.Fatal("Error marshaling AIHubMix provider:", err)
+	}
+
+	if err := os.WriteFile("internal/providers/configs/aihubmix.json", data, 0o600); err != nil {
+		log.Fatal("Error writing AIHubMix provider config:", err)
+	}
+
+	fmt.Printf("\nSuccessfully wrote %d models to internal/providers/configs/aihubmix.json\n", len(aiHubMixProvider.Models))
+}

internal/providers/configs/aihubmix.json 🔗

@@ -4,150 +4,2456 @@
   "api_key": "$AIHUBMIX_API_KEY",
   "api_endpoint": "https://aihubmix.com/v1",
   "type": "openai-compat",
-  "default_large_model_id": "claude-sonnet-4-5",
-  "default_small_model_id": "claude-3-5-haiku",
-  "default_headers": {
-    "APP-Code": "IUFF7106"
-  },
+  "default_large_model_id": "gpt-5",
+  "default_small_model_id": "gpt-5-nano",
   "models": [
     {
-      "id": "claude-sonnet-4-5",
-      "name": "Claude Sonnet 4.5",
-      "cost_per_1m_in": 3,
-      "cost_per_1m_out": 15,
-      "cost_per_1m_in_cached": 3.75,
-      "cost_per_1m_out_cached": 0.3,
+      "id": "AiHubmix-Phi-4-mini-reasoning",
+      "name": "Aihubmix Phi 4 Mini (reasoning)",
+      "cost_per_1m_in": 0.12,
+      "cost_per_1m_out": 0.12,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 128000,
+      "default_max_tokens": 4000,
+      "can_reason": false,
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "AiHubmix-Phi-4-reasoning",
+      "name": "Aihubmix Phi 4 (reasoning)",
+      "cost_per_1m_in": 0.2,
+      "cost_per_1m_out": 0.2,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 128000,
+      "default_max_tokens": 4000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "ByteDance-Seed/Seed-OSS-36B-Instruct",
+      "name": "Seed Oss 36B Instruct",
+      "cost_per_1m_in": 0.2,
+      "cost_per_1m_out": 0.534,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 256000,
+      "default_max_tokens": 32000,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "DeepSeek-R1",
+      "name": "DeepSeek R1",
+      "cost_per_1m_in": 0.4,
+      "cost_per_1m_out": 2,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 1638000,
+      "default_max_tokens": 163800,
+      "can_reason": false,
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "DeepSeek-V3",
+      "name": "DeepSeek V3",
+      "cost_per_1m_in": 0.272,
+      "cost_per_1m_out": 1.088,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 1638000,
+      "default_max_tokens": 163800,
+      "can_reason": false,
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "DeepSeek-V3-Fast",
+      "name": "DeepSeek V3 Fast",
+      "cost_per_1m_in": 0.56,
+      "cost_per_1m_out": 2.24,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 32000,
+      "default_max_tokens": 3200,
+      "can_reason": false,
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "DeepSeek-V3.1-Fast",
+      "name": "DeepSeek V3.1 Fast",
+      "cost_per_1m_in": 1.096,
+      "cost_per_1m_out": 3.288,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 163000,
+      "default_max_tokens": 16300,
+      "can_reason": false,
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "DeepSeek-V3.1-Terminus",
+      "name": "DeepSeek V3.1 Terminus",
+      "cost_per_1m_in": 0.56,
+      "cost_per_1m_out": 1.68,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 160000,
+      "default_max_tokens": 32000,
+      "can_reason": false,
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "DeepSeek-V3.1-Think",
+      "name": "DeepSeek V3.1 Thinking",
+      "cost_per_1m_in": 0.56,
+      "cost_per_1m_out": 1.68,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 128000,
+      "default_max_tokens": 32000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "DeepSeek-V3.2-Exp",
+      "name": "DeepSeek V3.2 Exp",
+      "cost_per_1m_in": 0.274,
+      "cost_per_1m_out": 0.411,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.0274,
+      "context_window": 163000,
+      "default_max_tokens": 16300,
+      "can_reason": false,
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "DeepSeek-V3.2-Exp-Think",
+      "name": "DeepSeek V3.2 Exp Thinking",
+      "cost_per_1m_in": 0.274,
+      "cost_per_1m_out": 0.411,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.0274,
+      "context_window": 131000,
+      "default_max_tokens": 64000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "ERNIE-X1.1-Preview",
+      "name": "ERNIE X1.1 Preview",
+      "cost_per_1m_in": 0.136,
+      "cost_per_1m_out": 0.544,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 119000,
+      "default_max_tokens": 11900,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "Kimi-K2-0905",
+      "name": "Kimi K2 0905",
+      "cost_per_1m_in": 0.548,
+      "cost_per_1m_out": 2.192,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 262144,
+      "default_max_tokens": 26214,
+      "can_reason": false,
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "aihub-Phi-4-mini-instruct",
+      "name": "Aihub Phi 4 Mini Instruct",
+      "cost_per_1m_in": 0.12,
+      "cost_per_1m_out": 0.48,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 128000,
+      "default_max_tokens": 4000,
+      "can_reason": false,
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "aihub-Phi-4-multimodal-instruct",
+      "name": "Aihub Phi 4 Multimodal Instruct",
+      "cost_per_1m_in": 0.12,
+      "cost_per_1m_out": 0.48,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 128000,
+      "default_max_tokens": 4000,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "claude-3-5-haiku",
+      "name": "Claude 3.5 Haiku",
+      "cost_per_1m_in": 1.1,
+      "cost_per_1m_out": 5.5,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
       "context_window": 200000,
-      "default_max_tokens": 50000,
+      "default_max_tokens": 8192,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "claude-3-5-sonnet",
+      "name": "Claude 3.5 Sonnet",
+      "cost_per_1m_in": 3.3,
+      "cost_per_1m_out": 16.5,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 200000,
+      "default_max_tokens": 8192,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "claude-3-5-sonnet-20240620",
+      "name": "Claude 3.5 Sonnet 20240620",
+      "cost_per_1m_in": 3.3,
+      "cost_per_1m_out": 16.5,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 200000,
+      "default_max_tokens": 8192,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "claude-3-7-sonnet",
+      "name": "Claude 3.7 Sonnet",
+      "cost_per_1m_in": 3.3,
+      "cost_per_1m_out": 16.5,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 200000,
+      "default_max_tokens": 20000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "claude-haiku-4-5",
+      "name": "Claude Haiku 4.5",
+      "cost_per_1m_in": 1.1,
+      "cost_per_1m_out": 5.5,
+      "cost_per_1m_in_cached": 1.375,
+      "cost_per_1m_out_cached": 0.11,
+      "context_window": 204800,
+      "default_max_tokens": 20480,
       "can_reason": true,
-      "supports_attachments": true
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "claude-opus-4-0",
+      "name": "Claude Opus 4.0",
+      "cost_per_1m_in": 16.5,
+      "cost_per_1m_out": 82.5,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 200000,
+      "default_max_tokens": 32000,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
     },
     {
       "id": "claude-opus-4-1",
       "name": "Claude Opus 4.1",
-      "cost_per_1m_in": 15,
-      "cost_per_1m_out": 75,
-      "cost_per_1m_in_cached": 18.75,
-      "cost_per_1m_out_cached": 1.5,
+      "cost_per_1m_in": 16.5,
+      "cost_per_1m_out": 82.5,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
       "context_window": 200000,
       "default_max_tokens": 32000,
       "can_reason": true,
-      "supports_attachments": true
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
     },
     {
-      "id": "claude-3-5-haiku",
-      "name": "Claude 3.5 Haiku",
-      "cost_per_1m_in": 0.7999999999999999,
-      "cost_per_1m_out": 4,
-      "cost_per_1m_in_cached": 1,
-      "cost_per_1m_out_cached": 0.08,
+      "id": "claude-opus-4-5",
+      "name": "Claude Opus 4.5",
+      "cost_per_1m_in": 5,
+      "cost_per_1m_out": 25,
+      "cost_per_1m_in_cached": 6.25,
+      "cost_per_1m_out_cached": 0.5,
       "context_window": 200000,
-      "default_max_tokens": 5000,
-      "can_reason": false,
-      "supports_attachments": true
+      "default_max_tokens": 32000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
     },
     {
-      "id": "gemini-2.5-pro",
-      "name": "Gemini 2.5 Pro",
-      "cost_per_1m_in": 1.25,
-      "cost_per_1m_out": 10,
-      "cost_per_1m_in_cached": 1.625,
-      "cost_per_1m_out_cached": 0.31,
-      "context_window": 1048576,
-      "default_max_tokens": 50000,
+      "id": "claude-opus-4-5-think",
+      "name": "Claude Opus 4.5 Thinking",
+      "cost_per_1m_in": 5,
+      "cost_per_1m_out": 25,
+      "cost_per_1m_in_cached": 6.25,
+      "cost_per_1m_out_cached": 0.5,
+      "context_window": 200000,
+      "default_max_tokens": 32000,
       "can_reason": true,
-      "supports_attachments": true
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
     },
     {
-      "id": "gemini-2.5-flash",
-      "name": "Gemini 2.5 Flash",
-      "cost_per_1m_in": 0.3,
-      "cost_per_1m_out": 2.5,
-      "cost_per_1m_in_cached": 0.3833,
-      "cost_per_1m_out_cached": 0.075,
-      "context_window": 1048576,
-      "default_max_tokens": 50000,
+      "id": "claude-opus-4-6",
+      "name": "Claude Opus 4.6",
+      "cost_per_1m_in": 5,
+      "cost_per_1m_out": 25,
+      "cost_per_1m_in_cached": 6.25,
+      "cost_per_1m_out_cached": 0.5,
+      "context_window": 200000,
+      "default_max_tokens": 32000,
       "can_reason": true,
-      "supports_attachments": true
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
     },
     {
-      "id": "gpt-5",
-      "name": "GPT-5",
-      "cost_per_1m_in": 1.25,
-      "cost_per_1m_out": 10,
-      "cost_per_1m_in_cached": 0.25,
-      "cost_per_1m_out_cached": 0.25,
-      "context_window": 400000,
-      "default_max_tokens": 128000,
+      "id": "claude-opus-4-6-think",
+      "name": "Claude Opus 4.6 Thinking",
+      "cost_per_1m_in": 5,
+      "cost_per_1m_out": 25,
+      "cost_per_1m_in_cached": 6.25,
+      "cost_per_1m_out_cached": 0.5,
+      "context_window": 200000,
+      "default_max_tokens": 32000,
       "can_reason": true,
-      "has_reasoning_efforts": true,
-      "default_reasoning_effort": "minimal",
-      "supports_attachments": true
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
     },
     {
-      "id": "gpt-5-mini",
-      "name": "GPT-5 Mini",
-      "cost_per_1m_in": 0.25,
-      "cost_per_1m_out": 2,
-      "cost_per_1m_in_cached": 0.025,
-      "cost_per_1m_out_cached": 0.025,
-      "context_window": 400000,
-      "default_max_tokens": 128000,
+      "id": "claude-sonnet-4-0",
+      "name": "Claude Sonnet 4.0",
+      "cost_per_1m_in": 3.3,
+      "cost_per_1m_out": 16.5,
+      "cost_per_1m_in_cached": 4.125,
+      "cost_per_1m_out_cached": 0.33,
+      "context_window": 1000000,
+      "default_max_tokens": 64000,
       "can_reason": true,
-      "has_reasoning_efforts": true,
-      "default_reasoning_effort": "low",
-      "supports_attachments": true
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
     },
     {
-      "id": "gpt-5-nano",
-      "name": "GPT-5 Nano",
-      "cost_per_1m_in": 0.05,
-      "cost_per_1m_out": 0.4,
-      "cost_per_1m_in_cached": 0.005,
-      "cost_per_1m_out_cached": 0.005,
-      "context_window": 400000,
-      "default_max_tokens": 128000,
+      "id": "claude-sonnet-4-5",
+      "name": "Claude Sonnet 4.5",
+      "cost_per_1m_in": 3.3,
+      "cost_per_1m_out": 16.5,
+      "cost_per_1m_in_cached": 4.125,
+      "cost_per_1m_out_cached": 0.33,
+      "context_window": 1000000,
+      "default_max_tokens": 64000,
       "can_reason": true,
-      "has_reasoning_efforts": true,
-      "default_reasoning_effort": "low",
-      "supports_attachments": true
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
     },
     {
-      "id": "Kimi-K2-0905",
-      "name": "Kimi K2 0905",
-      "cost_per_1m_in": 0.55,
-      "cost_per_1m_out": 2.19,
+      "id": "claude-sonnet-4-5-think",
+      "name": "Claude Sonnet 4.5 Thinking",
+      "cost_per_1m_in": 3.3,
+      "cost_per_1m_out": 16.5,
+      "cost_per_1m_in_cached": 4.125,
+      "cost_per_1m_out_cached": 0.33,
+      "context_window": 1000000,
+      "default_max_tokens": 64000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "claude-sonnet-4-6",
+      "name": "Claude Sonnet 4.6",
+      "cost_per_1m_in": 3,
+      "cost_per_1m_out": 15,
+      "cost_per_1m_in_cached": 3.75,
+      "cost_per_1m_out_cached": 0.3,
+      "context_window": 1000000,
+      "default_max_tokens": 64000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "claude-sonnet-4-6-think",
+      "name": "Claude Sonnet 4.6 Thinking",
+      "cost_per_1m_in": 3,
+      "cost_per_1m_out": 15,
+      "cost_per_1m_in_cached": 3.75,
+      "cost_per_1m_out_cached": 0.3,
+      "context_window": 200000,
+      "default_max_tokens": 32000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "coding-glm-4.6-free",
+      "name": "Coding GLM 4.6 (free)",
+      "cost_per_1m_in": 0,
+      "cost_per_1m_out": 0,
       "cost_per_1m_in_cached": 0,
       "cost_per_1m_out_cached": 0,
-      "context_window": 256000,
-      "default_max_tokens": 10000,
+      "context_window": 200000,
+      "default_max_tokens": 20000,
       "can_reason": true,
-      "has_reasoning_efforts": true,
-      "default_reasoning_effort": "medium"
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": false,
+      "options": {}
     },
     {
-      "id": "glm-4.6",
-      "name": "GLM-4.6",
-      "cost_per_1m_in": 0.6,
-      "cost_per_1m_out": 2.2,
-      "cost_per_1m_in_cached": 0.11,
+      "id": "coding-minimax-m2",
+      "name": "Coding MiniMax M2",
+      "cost_per_1m_in": 0.2,
+      "cost_per_1m_out": 0.2,
+      "cost_per_1m_in_cached": 0,
       "cost_per_1m_out_cached": 0,
       "context_window": 204800,
-      "default_max_tokens": 131072,
+      "default_max_tokens": 13100,
       "can_reason": true,
-      "has_reasoning_efforts": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
       "default_reasoning_effort": "medium",
-      "supports_attachments": false
+      "supports_attachments": false,
+      "options": {}
     },
     {
-      "id": "qwen3-coder-480b-a35b-instruct",
-      "name": "Qwen 3 480B Coder",
-      "cost_per_1m_in": 0.82,
-      "cost_per_1m_out": 3.29,
-      "context_window": 131072,
-      "default_max_tokens": 65536,
+      "id": "coding-minimax-m2-free",
+      "name": "Coding MiniMax M2 (free)",
+      "cost_per_1m_in": 0,
+      "cost_per_1m_out": 0,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 204800,
+      "default_max_tokens": 13100,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "coding-minimax-m2.1",
+      "name": "Coding MiniMax M2.1",
+      "cost_per_1m_in": 0.2,
+      "cost_per_1m_out": 0.2,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 204800,
+      "default_max_tokens": 13100,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "coding-minimax-m2.5",
+      "name": "Coding MiniMax M2.5",
+      "cost_per_1m_in": 0.2,
+      "cost_per_1m_out": 0.2,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 204800,
+      "default_max_tokens": 13100,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "deepseek-math-v2",
+      "name": "DeepSeek Math V2",
+      "cost_per_1m_in": 0.492,
+      "cost_per_1m_out": 1.968,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.0984,
+      "context_window": 163000,
+      "default_max_tokens": 16300,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "deepseek-v3.2",
+      "name": "DeepSeek V3.2",
+      "cost_per_1m_in": 0.302,
+      "cost_per_1m_out": 0.453,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.0302,
+      "context_window": 128000,
+      "default_max_tokens": 64000,
       "can_reason": false,
-      "supports_attachments": false
-    }
-  ]
-}
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "deepseek-v3.2-fast",
+      "name": "DeepSeek V3.2 Fast",
+      "cost_per_1m_in": 1.096,
+      "cost_per_1m_out": 3.288,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 1.096,
+      "context_window": 128000,
+      "default_max_tokens": 12800,
+      "can_reason": false,
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "deepseek-v3.2-speciale",
+      "name": "DeepSeek V3.2 Speciale",
+      "cost_per_1m_in": 0.58,
+      "cost_per_1m_out": 1.680028,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 128000,
+      "default_max_tokens": 12800,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "deepseek-v3.2-think",
+      "name": "DeepSeek V3.2 Thinking",
+      "cost_per_1m_in": 0.302,
+      "cost_per_1m_out": 0.453,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.0302,
+      "context_window": 128000,
+      "default_max_tokens": 64000,
+      "can_reason": false,
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "doubao-seed-1-6",
+      "name": "Doubao Seed 1.6",
+      "cost_per_1m_in": 0.18,
+      "cost_per_1m_out": 1.8,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.036,
+      "context_window": 256000,
+      "default_max_tokens": 32000,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "doubao-seed-1-6-flash",
+      "name": "Doubao Seed 1.6 Flash",
+      "cost_per_1m_in": 0.044,
+      "cost_per_1m_out": 0.44,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.0088,
+      "context_window": 256000,
+      "default_max_tokens": 33000,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "doubao-seed-1-6-lite",
+      "name": "Doubao Seed 1.6 Lite",
+      "cost_per_1m_in": 0.082,
+      "cost_per_1m_out": 0.656,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.0164,
+      "context_window": 256000,
+      "default_max_tokens": 32000,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "doubao-seed-1-6-thinking",
+      "name": "Doubao Seed 1.6 Thinking",
+      "cost_per_1m_in": 0.18,
+      "cost_per_1m_out": 1.8,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.036,
+      "context_window": 256000,
+      "default_max_tokens": 32000,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "doubao-seed-1-8",
+      "name": "Doubao Seed 1.8",
+      "cost_per_1m_in": 0.10959,
+      "cost_per_1m_out": 0.273975,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.021918,
+      "context_window": 256000,
+      "default_max_tokens": 64000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "doubao-seed-2-0-code-preview",
+      "name": "Doubao Seed 2.0 Code Preview",
+      "cost_per_1m_in": 0.4822,
+      "cost_per_1m_out": 2.411,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.09644,
+      "context_window": 256000,
+      "default_max_tokens": 128000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "doubao-seed-2-0-lite",
+      "name": "Doubao Seed 2.0 Lite",
+      "cost_per_1m_in": 0.09041,
+      "cost_per_1m_out": 0.54246,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.018082,
+      "context_window": 256000,
+      "default_max_tokens": 32000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "doubao-seed-2-0-mini",
+      "name": "Doubao Seed 2.0 Mini",
+      "cost_per_1m_in": 0.030136,
+      "cost_per_1m_out": 0.30136,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.006027,
+      "context_window": 256000,
+      "default_max_tokens": 32000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "doubao-seed-2-0-pro",
+      "name": "Doubao Seed 2.0 Pro",
+      "cost_per_1m_in": 0.4822,
+      "cost_per_1m_out": 2.411,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.09644,
+      "context_window": 256000,
+      "default_max_tokens": 128000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "ernie-4.5",
+      "name": "ERNIE 4.5",
+      "cost_per_1m_in": 0.068,
+      "cost_per_1m_out": 0.272,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 160000,
+      "default_max_tokens": 64000,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "ernie-4.5-turbo-latest",
+      "name": "ERNIE 4.5 Turbo",
+      "cost_per_1m_in": 0.11,
+      "cost_per_1m_out": 0.44,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 135000,
+      "default_max_tokens": 12000,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "ernie-4.5-turbo-vl",
+      "name": "ERNIE 4.5 Turbo VL",
+      "cost_per_1m_in": 0.4,
+      "cost_per_1m_out": 1.2,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 139000,
+      "default_max_tokens": 16000,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "ernie-5.0-thinking-exp",
+      "name": "ERNIE 5.0 Thinking Exp",
+      "cost_per_1m_in": 0.82192,
+      "cost_per_1m_out": 3.28768,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.82192,
+      "context_window": 119000,
+      "default_max_tokens": 11900,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "ernie-5.0-thinking-preview",
+      "name": "ERNIE 5.0 Thinking Preview",
+      "cost_per_1m_in": 0.822,
+      "cost_per_1m_out": 3.288,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.822,
+      "context_window": 183000,
+      "default_max_tokens": 64000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "ernie-x1-turbo",
+      "name": "ERNIE X1 Turbo",
+      "cost_per_1m_in": 0.136,
+      "cost_per_1m_out": 0.544,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 50500,
+      "default_max_tokens": 5050,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "gemini-2.0-flash",
+      "name": "Gemini 2.0 Flash",
+      "cost_per_1m_in": 0.1,
+      "cost_per_1m_out": 0.4,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.025,
+      "context_window": 1048576,
+      "default_max_tokens": 8192,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gemini-2.0-flash-free",
+      "name": "Gemini 2.0 Flash (free)",
+      "cost_per_1m_in": 0,
+      "cost_per_1m_out": 0,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 1048576,
+      "default_max_tokens": 8192,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gemini-2.5-flash",
+      "name": "Gemini 2.5 Flash",
+      "cost_per_1m_in": 0.3,
+      "cost_per_1m_out": 2.499,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.03,
+      "context_window": 1048576,
+      "default_max_tokens": 65536,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gemini-2.5-flash-lite",
+      "name": "Gemini 2.5 Flash Lite",
+      "cost_per_1m_in": 0.1,
+      "cost_per_1m_out": 0.4,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.01,
+      "context_window": 1048576,
+      "default_max_tokens": 65536,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gemini-2.5-flash-lite-preview-09-2025",
+      "name": "Gemini 2.5 Flash Lite Preview 09 2025",
+      "cost_per_1m_in": 0.1,
+      "cost_per_1m_out": 0.4,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.01,
+      "context_window": 1048576,
+      "default_max_tokens": 65536,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gemini-2.5-flash-nothink",
+      "name": "Gemini 2.5 Flash (no think)",
+      "cost_per_1m_in": 0.3,
+      "cost_per_1m_out": 2.499,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.03,
+      "context_window": 1047576,
+      "default_max_tokens": 65536,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gemini-2.5-flash-preview-05-20-nothink",
+      "name": "Gemini 2.5 Flash Preview 05-20 (no think)",
+      "cost_per_1m_in": 0.3,
+      "cost_per_1m_out": 2.499,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.03,
+      "context_window": 1048576,
+      "default_max_tokens": 65536,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gemini-2.5-flash-preview-05-20-search",
+      "name": "Gemini 2.5 Flash Preview 05-20 Search",
+      "cost_per_1m_in": 0.3,
+      "cost_per_1m_out": 2.499,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.03,
+      "context_window": 1048576,
+      "default_max_tokens": 65536,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gemini-2.5-flash-preview-09-2025",
+      "name": "Gemini 2.5 Flash Preview 09 2025",
+      "cost_per_1m_in": 0.3,
+      "cost_per_1m_out": 2.499,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.03,
+      "context_window": 1048576,
+      "default_max_tokens": 65536,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gemini-2.5-flash-search",
+      "name": "Gemini 2.5 Flash Search",
+      "cost_per_1m_in": 0.3,
+      "cost_per_1m_out": 2.499,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.03,
+      "context_window": 1048576,
+      "default_max_tokens": 65536,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gemini-2.5-pro",
+      "name": "Gemini 2.5 Pro",
+      "cost_per_1m_in": 1.25,
+      "cost_per_1m_out": 10,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.125,
+      "context_window": 1048576,
+      "default_max_tokens": 65536,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gemini-2.5-pro-preview-05-06",
+      "name": "Gemini 2.5 Pro Preview 05-06",
+      "cost_per_1m_in": 1.25,
+      "cost_per_1m_out": 10,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.125,
+      "context_window": 1048576,
+      "default_max_tokens": 65536,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gemini-2.5-pro-preview-06-05",
+      "name": "Gemini 2.5 Pro Preview 06-05",
+      "cost_per_1m_in": 1.25,
+      "cost_per_1m_out": 10,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.125,
+      "context_window": 1048576,
+      "default_max_tokens": 65536,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gemini-2.5-pro-search",
+      "name": "Gemini 2.5 Pro Search",
+      "cost_per_1m_in": 1.25,
+      "cost_per_1m_out": 10,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.125,
+      "context_window": 1048576,
+      "default_max_tokens": 65536,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gemini-3-flash-preview",
+      "name": "Gemini 3 Flash Preview",
+      "cost_per_1m_in": 0.5,
+      "cost_per_1m_out": 3,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.05,
+      "context_window": 1048576,
+      "default_max_tokens": 104857,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gemini-3-flash-preview-free",
+      "name": "Gemini 3 Flash Preview (free)",
+      "cost_per_1m_in": 0,
+      "cost_per_1m_out": 0,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 1048576,
+      "default_max_tokens": 65536,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gemini-3-flash-preview-search",
+      "name": "Gemini 3 Flash Preview Search",
+      "cost_per_1m_in": 0.5,
+      "cost_per_1m_out": 3,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.05,
+      "context_window": 1048576,
+      "default_max_tokens": 104857,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gemini-3.1-pro-preview",
+      "name": "Gemini 3.1 Pro Preview",
+      "cost_per_1m_in": 2,
+      "cost_per_1m_out": 12,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.2,
+      "context_window": 1000000,
+      "default_max_tokens": 64000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gemini-3.1-pro-preview-customtools",
+      "name": "Gemini 3.1 Pro Preview Customtools",
+      "cost_per_1m_in": 2,
+      "cost_per_1m_out": 12,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.2,
+      "context_window": 1000000,
+      "default_max_tokens": 64000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "glm-4.5v",
+      "name": "GLM 4.5 Vision",
+      "cost_per_1m_in": 0.274,
+      "cost_per_1m_out": 0.822,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.274,
+      "context_window": 64000,
+      "default_max_tokens": 16384,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "glm-4.6",
+      "name": "GLM 4.6",
+      "cost_per_1m_in": 0,
+      "cost_per_1m_out": 0,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 204800,
+      "default_max_tokens": 20480,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "glm-4.6v",
+      "name": "GLM 4.6 Vision",
+      "cost_per_1m_in": 0.137,
+      "cost_per_1m_out": 0.411,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.0274,
+      "context_window": 128000,
+      "default_max_tokens": 12800,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "glm-4.7",
+      "name": "GLM 4.7",
+      "cost_per_1m_in": 0.273974,
+      "cost_per_1m_out": 1.095896,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.054795,
+      "context_window": 200000,
+      "default_max_tokens": 20000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "gpt-4.1",
+      "name": "GPT 4.1",
+      "cost_per_1m_in": 2,
+      "cost_per_1m_out": 8,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.5,
+      "context_window": 1047576,
+      "default_max_tokens": 32768,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gpt-4.1-free",
+      "name": "GPT 4.1 (free)",
+      "cost_per_1m_in": 0,
+      "cost_per_1m_out": 0,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 1047576,
+      "default_max_tokens": 32768,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gpt-4.1-mini",
+      "name": "GPT 4.1 Mini",
+      "cost_per_1m_in": 0.4,
+      "cost_per_1m_out": 1.6,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.1,
+      "context_window": 1047576,
+      "default_max_tokens": 32768,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gpt-4.1-mini-free",
+      "name": "GPT 4.1 Mini (free)",
+      "cost_per_1m_in": 0,
+      "cost_per_1m_out": 0,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 1047576,
+      "default_max_tokens": 32768,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gpt-4.1-nano",
+      "name": "GPT 4.1 Nano",
+      "cost_per_1m_in": 0.1,
+      "cost_per_1m_out": 0.4,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.025,
+      "context_window": 1047576,
+      "default_max_tokens": 32768,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gpt-4.1-nano-free",
+      "name": "GPT 4.1 Nano (free)",
+      "cost_per_1m_in": 0,
+      "cost_per_1m_out": 0,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 1047576,
+      "default_max_tokens": 32768,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gpt-4o",
+      "name": "GPT 4o",
+      "cost_per_1m_in": 2.5,
+      "cost_per_1m_out": 10,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 1.25,
+      "context_window": 128000,
+      "default_max_tokens": 16384,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gpt-4o-2024-11-20",
+      "name": "GPT 4o 2024 11-20",
+      "cost_per_1m_in": 2.5,
+      "cost_per_1m_out": 10,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 1.25,
+      "context_window": 128000,
+      "default_max_tokens": 16384,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gpt-4o-audio-preview",
+      "name": "GPT 4o Audio Preview",
+      "cost_per_1m_in": 2.5,
+      "cost_per_1m_out": 10,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 128000,
+      "default_max_tokens": 16384,
+      "can_reason": false,
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "gpt-4o-free",
+      "name": "GPT 4o (free)",
+      "cost_per_1m_in": 0,
+      "cost_per_1m_out": 0,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 1047576,
+      "default_max_tokens": 32768,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gpt-4o-mini",
+      "name": "GPT 4o Mini",
+      "cost_per_1m_in": 0.15,
+      "cost_per_1m_out": 0.6,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.075,
+      "context_window": 128000,
+      "default_max_tokens": 16384,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gpt-4o-mini-search-preview",
+      "name": "GPT 4o Mini Search Preview",
+      "cost_per_1m_in": 0.15,
+      "cost_per_1m_out": 0.6,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.075,
+      "context_window": 128000,
+      "default_max_tokens": 16384,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gpt-4o-search-preview",
+      "name": "GPT 4o Search Preview",
+      "cost_per_1m_in": 2.5,
+      "cost_per_1m_out": 10,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 1.25,
+      "context_window": 128000,
+      "default_max_tokens": 16384,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gpt-5",
+      "name": "GPT 5",
+      "cost_per_1m_in": 1.25,
+      "cost_per_1m_out": 10,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.125,
+      "context_window": 400000,
+      "default_max_tokens": 128000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gpt-5-chat-latest",
+      "name": "GPT 5 Chat",
+      "cost_per_1m_in": 1.25,
+      "cost_per_1m_out": 10,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.125,
+      "context_window": 400000,
+      "default_max_tokens": 128000,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gpt-5-codex",
+      "name": "GPT-5-Codex",
+      "cost_per_1m_in": 1.25,
+      "cost_per_1m_out": 10,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.125,
+      "context_window": 400000,
+      "default_max_tokens": 128000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gpt-5-mini",
+      "name": "GPT 5 Mini",
+      "cost_per_1m_in": 0.25,
+      "cost_per_1m_out": 2,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.025,
+      "context_window": 400000,
+      "default_max_tokens": 128000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gpt-5-nano",
+      "name": "GPT 5 Nano",
+      "cost_per_1m_in": 0.05,
+      "cost_per_1m_out": 0.4,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.005,
+      "context_window": 400000,
+      "default_max_tokens": 128000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gpt-5-pro",
+      "name": "GPT 5 Pro",
+      "cost_per_1m_in": 15,
+      "cost_per_1m_out": 120,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 400000,
+      "default_max_tokens": 128000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gpt-5.1",
+      "name": "GPT 5.1",
+      "cost_per_1m_in": 1.25,
+      "cost_per_1m_out": 10,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.125,
+      "context_window": 400000,
+      "default_max_tokens": 128000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gpt-5.1-chat-latest",
+      "name": "GPT 5.1 Chat",
+      "cost_per_1m_in": 1.25,
+      "cost_per_1m_out": 10,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.125,
+      "context_window": 128000,
+      "default_max_tokens": 16384,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gpt-5.1-codex",
+      "name": "GPT-5.1-Codex",
+      "cost_per_1m_in": 1.25,
+      "cost_per_1m_out": 10,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.125,
+      "context_window": 400000,
+      "default_max_tokens": 128000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gpt-5.1-codex-max",
+      "name": "GPT-5.1-Codex Max",
+      "cost_per_1m_in": 1.25,
+      "cost_per_1m_out": 10,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.125,
+      "context_window": 400000,
+      "default_max_tokens": 128000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gpt-5.1-codex-mini",
+      "name": "GPT-5.1-Codex Mini",
+      "cost_per_1m_in": 0.25,
+      "cost_per_1m_out": 2,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.025,
+      "context_window": 400000,
+      "default_max_tokens": 128000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gpt-5.2",
+      "name": "GPT 5.2",
+      "cost_per_1m_in": 1.75,
+      "cost_per_1m_out": 14,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.175,
+      "context_window": 400000,
+      "default_max_tokens": 128000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gpt-5.2-chat-latest",
+      "name": "GPT 5.2 Chat",
+      "cost_per_1m_in": 1.75,
+      "cost_per_1m_out": 14,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.175,
+      "context_window": 128000,
+      "default_max_tokens": 16384,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gpt-5.2-codex",
+      "name": "GPT-5.2-Codex",
+      "cost_per_1m_in": 1.75,
+      "cost_per_1m_out": 14,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.175,
+      "context_window": 400000,
+      "default_max_tokens": 128000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gpt-5.2-high",
+      "name": "GPT 5.2 High",
+      "cost_per_1m_in": 1.75,
+      "cost_per_1m_out": 14,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.175,
+      "context_window": 400000,
+      "default_max_tokens": 128000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gpt-5.2-low",
+      "name": "GPT 5.2 Low",
+      "cost_per_1m_in": 1.75,
+      "cost_per_1m_out": 14,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.175,
+      "context_window": 400000,
+      "default_max_tokens": 128000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gpt-5.2-pro",
+      "name": "GPT 5.2 Pro",
+      "cost_per_1m_in": 21,
+      "cost_per_1m_out": 168,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 2.1,
+      "context_window": 400000,
+      "default_max_tokens": 128000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "gpt-oss-120b",
+      "name": "gpt-oss-120b",
+      "cost_per_1m_in": 0.18,
+      "cost_per_1m_out": 0.9,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 131072,
+      "default_max_tokens": 32768,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "gpt-oss-20b",
+      "name": "gpt-oss-20b",
+      "cost_per_1m_in": 0.11,
+      "cost_per_1m_out": 0.55,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 128000,
+      "default_max_tokens": 12800,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "grok-4",
+      "name": "Grok 4",
+      "cost_per_1m_in": 3.3,
+      "cost_per_1m_out": 16.5,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.825,
+      "context_window": 256000,
+      "default_max_tokens": 64000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "grok-4-1-fast-non-reasoning",
+      "name": "Grok 4.1 Fast",
+      "cost_per_1m_in": 0.2,
+      "cost_per_1m_out": 0.5,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.05,
+      "context_window": 2000000,
+      "default_max_tokens": 200000,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "grok-4-1-fast-reasoning",
+      "name": "Grok 4.1 Fast (reasoning)",
+      "cost_per_1m_in": 0.2,
+      "cost_per_1m_out": 0.5,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.05,
+      "context_window": 2000000,
+      "default_max_tokens": 200000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "grok-code-fast-1",
+      "name": "Grok Code Fast 1",
+      "cost_per_1m_in": 0.2,
+      "cost_per_1m_out": 0.5,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.05,
+      "context_window": 256000,
+      "default_max_tokens": 10000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "jina-deepsearch-v1",
+      "name": "Jina Deepsearch V1",
+      "cost_per_1m_in": 0.05,
+      "cost_per_1m_out": 0.05,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 1000000,
+      "default_max_tokens": 100000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "kat-dev",
+      "name": "Kat Dev",
+      "cost_per_1m_in": 0.137,
+      "cost_per_1m_out": 0.548,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 128000,
+      "default_max_tokens": 12800,
+      "can_reason": false,
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "kimi-for-coding-free",
+      "name": "Kimi For Coding (free)",
+      "cost_per_1m_in": 0,
+      "cost_per_1m_out": 0,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 256000,
+      "default_max_tokens": 25600,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "kimi-k2-0711",
+      "name": "Kimi K2 0711",
+      "cost_per_1m_in": 0.54,
+      "cost_per_1m_out": 2.16,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 131000,
+      "default_max_tokens": 13100,
+      "can_reason": false,
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "kimi-k2-thinking",
+      "name": "Kimi K2 Thinking",
+      "cost_per_1m_in": 0.548,
+      "cost_per_1m_out": 2.192,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.137,
+      "context_window": 262144,
+      "default_max_tokens": 26214,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "kimi-k2-turbo-preview",
+      "name": "Kimi K2 Turbo Preview",
+      "cost_per_1m_in": 1.2,
+      "cost_per_1m_out": 4.8,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.3,
+      "context_window": 262144,
+      "default_max_tokens": 26214,
+      "can_reason": false,
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "kimi-k2.5",
+      "name": "Kimi K2.5",
+      "cost_per_1m_in": 0.6,
+      "cost_per_1m_out": 3,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.105,
+      "context_window": 256000,
+      "default_max_tokens": 25600,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "llama-4-maverick",
+      "name": "Llama 4 Maverick",
+      "cost_per_1m_in": 0.2,
+      "cost_per_1m_out": 0.2,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 1048576,
+      "default_max_tokens": 32000,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "llama-4-scout",
+      "name": "Llama 4 Scout",
+      "cost_per_1m_in": 0.2,
+      "cost_per_1m_out": 0.2,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 131000,
+      "default_max_tokens": 13100,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "mimo-v2-flash-free",
+      "name": "MiMo V2 Flash (free)",
+      "cost_per_1m_in": 0,
+      "cost_per_1m_out": 0,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 256000,
+      "default_max_tokens": 25600,
+      "can_reason": false,
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "mistral-large-3",
+      "name": "Mistral Large 3",
+      "cost_per_1m_in": 0.5,
+      "cost_per_1m_out": 1.5,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 256000,
+      "default_max_tokens": 25600,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "o3",
+      "name": "O3",
+      "cost_per_1m_in": 2,
+      "cost_per_1m_out": 8,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.5,
+      "context_window": 200000,
+      "default_max_tokens": 100000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "o3-mini",
+      "name": "O3 Mini",
+      "cost_per_1m_in": 1.1,
+      "cost_per_1m_out": 4.4,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.55,
+      "context_window": 200000,
+      "default_max_tokens": 100000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "o3-pro",
+      "name": "O3 Pro",
+      "cost_per_1m_in": 20,
+      "cost_per_1m_out": 80,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 20,
+      "context_window": 200000,
+      "default_max_tokens": 100000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "o4-mini",
+      "name": "O4 Mini",
+      "cost_per_1m_in": 1.1,
+      "cost_per_1m_out": 4.4,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.275,
+      "context_window": 200000,
+      "default_max_tokens": 100000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "qwen3-235b-a22b",
+      "name": "Qwen3 235B A22B",
+      "cost_per_1m_in": 0.28,
+      "cost_per_1m_out": 1.12,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 131100,
+      "default_max_tokens": 13110,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "qwen3-235b-a22b-instruct-2507",
+      "name": "Qwen3 235B A22B Instruct 2507",
+      "cost_per_1m_in": 0.28,
+      "cost_per_1m_out": 1.12,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 262144,
+      "default_max_tokens": 26214,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "qwen3-235b-a22b-thinking-2507",
+      "name": "Qwen3 235B A22B Thinking 2507",
+      "cost_per_1m_in": 0.28,
+      "cost_per_1m_out": 2.8,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 262144,
+      "default_max_tokens": 26214,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "qwen3-coder-30b-a3b-instruct",
+      "name": "Qwen3 Coder 30B A3B Instruct",
+      "cost_per_1m_in": 0.2,
+      "cost_per_1m_out": 0.8,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.2,
+      "context_window": 2000000,
+      "default_max_tokens": 262000,
+      "can_reason": false,
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "qwen3-coder-480b-a35b-instruct",
+      "name": "Qwen3 Coder 480B A35B Instruct",
+      "cost_per_1m_in": 0.82,
+      "cost_per_1m_out": 3.28,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.82,
+      "context_window": 262000,
+      "default_max_tokens": 26200,
+      "can_reason": false,
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "qwen3-coder-flash",
+      "name": "Qwen3 Coder Flash",
+      "cost_per_1m_in": 0.136,
+      "cost_per_1m_out": 0.544,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.136,
+      "context_window": 256000,
+      "default_max_tokens": 65536,
+      "can_reason": false,
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "qwen3-coder-plus",
+      "name": "Qwen3 Coder Plus",
+      "cost_per_1m_in": 0.54,
+      "cost_per_1m_out": 2.16,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.108,
+      "context_window": 1048576,
+      "default_max_tokens": 65536,
+      "can_reason": false,
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "qwen3-coder-plus-2025-07-22",
+      "name": "Qwen3 Coder Plus 2025 07-22",
+      "cost_per_1m_in": 0.54,
+      "cost_per_1m_out": 2.16,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.54,
+      "context_window": 128000,
+      "default_max_tokens": 12800,
+      "can_reason": false,
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "qwen3-max",
+      "name": "Qwen3 Max",
+      "cost_per_1m_in": 0.34246,
+      "cost_per_1m_out": 1.36984,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.34246,
+      "context_window": 262144,
+      "default_max_tokens": 65536,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "qwen3-max-2026-01-23",
+      "name": "Qwen3 Max 2026 01-23",
+      "cost_per_1m_in": 0.34246,
+      "cost_per_1m_out": 1.36984,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.34246,
+      "context_window": 252000,
+      "default_max_tokens": 32000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": false,
+      "options": {}
+    },
+    {
+      "id": "qwen3-next-80b-a3b-instruct",
+      "name": "Qwen3 Next 80B A3B Instruct",
+      "cost_per_1m_in": 0.138,
+      "cost_per_1m_out": 0.552,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 256000,
+      "default_max_tokens": 25600,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "qwen3-next-80b-a3b-thinking",
+      "name": "Qwen3 Next 80B A3B Thinking",
+      "cost_per_1m_in": 0.138,
+      "cost_per_1m_out": 1.38,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 256000,
+      "default_max_tokens": 25600,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "qwen3-vl-235b-a22b-instruct",
+      "name": "Qwen3 VL 235B A22B Instruct",
+      "cost_per_1m_in": 0.274,
+      "cost_per_1m_out": 1.096,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 131000,
+      "default_max_tokens": 33000,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "qwen3-vl-235b-a22b-thinking",
+      "name": "Qwen3 VL 235B A22B Thinking",
+      "cost_per_1m_in": 0.274,
+      "cost_per_1m_out": 2.74,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 131000,
+      "default_max_tokens": 33000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "qwen3-vl-30b-a3b-instruct",
+      "name": "Qwen3 VL 30B A3B Instruct",
+      "cost_per_1m_in": 0.1028,
+      "cost_per_1m_out": 0.4112,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 128000,
+      "default_max_tokens": 32000,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "qwen3-vl-30b-a3b-thinking",
+      "name": "Qwen3 VL 30B A3B Thinking",
+      "cost_per_1m_in": 0.1028,
+      "cost_per_1m_out": 1.028,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 128000,
+      "default_max_tokens": 32000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "qwen3-vl-flash",
+      "name": "Qwen3 VL Flash",
+      "cost_per_1m_in": 0.0206,
+      "cost_per_1m_out": 0.206,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.00412,
+      "context_window": 254000,
+      "default_max_tokens": 32000,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "qwen3-vl-flash-2026-01-22",
+      "name": "Qwen3 VL Flash 2026 01-22",
+      "cost_per_1m_in": 0.0206,
+      "cost_per_1m_out": 0.206,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.0206,
+      "context_window": 254000,
+      "default_max_tokens": 32000,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "qwen3-vl-plus",
+      "name": "Qwen3 VL Plus",
+      "cost_per_1m_in": 0.137,
+      "cost_per_1m_out": 1.37,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.0274,
+      "context_window": 256000,
+      "default_max_tokens": 32000,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "qwen3.5-397b-a17b",
+      "name": "Qwen3.5 397B A17B",
+      "cost_per_1m_in": 0.1644,
+      "cost_per_1m_out": 0.9864,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0.1644,
+      "context_window": 991000,
+      "default_max_tokens": 64000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "qwen3.5-plus",
+      "name": "Qwen3.5 Plus",
+      "cost_per_1m_in": 0.137,
+      "cost_per_1m_out": 1.37,
+      "cost_per_1m_in_cached": 0.17125,
+      "cost_per_1m_out_cached": 0.0137,
+      "context_window": 991000,
+      "default_max_tokens": 64000,
+      "can_reason": true,
+      "reasoning_levels": [
+        "low",
+        "medium",
+        "high"
+      ],
+      "default_reasoning_effort": "medium",
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "step-3.5-flash",
+      "name": "Step 3.5 Flash",
+      "cost_per_1m_in": 0.1,
+      "cost_per_1m_out": 0.3,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 256000,
+      "default_max_tokens": 25600,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    },
+    {
+      "id": "step-3.5-flash-free",
+      "name": "Step 3.5 Flash (free)",
+      "cost_per_1m_in": 0,
+      "cost_per_1m_out": 0,
+      "cost_per_1m_in_cached": 0,
+      "cost_per_1m_out_cached": 0,
+      "context_window": 256000,
+      "default_max_tokens": 25600,
+      "can_reason": false,
+      "supports_attachments": true,
+      "options": {}
+    }
+  ],
+  "default_headers": {
+    "APP-Code": "IUFF7106"
+  }
+}