1// Package main provides a command-line tool to fetch models from AIHubMix
2// and generate a configuration file for the provider.
3package main
4
5import (
6 "context"
7 "encoding/json"
8 "fmt"
9 "io"
10 "log"
11 "math"
12 "net/http"
13 "os"
14 "slices"
15 "strings"
16 "time"
17
18 "charm.land/catwalk/pkg/catwalk"
19)
20
21// APIModel represents a model from the AIHubMix API.
22type APIModel struct {
23 ModelID string `json:"model_id"`
24 ModelName string `json:"model_name"`
25 Desc string `json:"desc"`
26 Pricing Pricing `json:"pricing"`
27 Types string `json:"types"`
28 Features string `json:"features"`
29 InputModalities string `json:"input_modalities"`
30 MaxOutput int64 `json:"max_output"`
31 ContextLength int64 `json:"context_length"`
32}
33
34// Pricing contains the pricing information from the API.
35type Pricing struct {
36 Input *float64 `json:"input"`
37 Output *float64 `json:"output"`
38 CacheRead *float64 `json:"cache_read"`
39 CacheWrite *float64 `json:"cache_write"`
40}
41
42const (
43 minContextWindow = 20000
44 defaultLargeModel = "gpt-5"
45 defaultSmallModel = "gpt-5-nano"
46 maxTokensFactor = 10
47)
48
49// ModelsResponse is the response structure for the models API.
50type ModelsResponse struct {
51 Data []APIModel `json:"data"`
52 Message string `json:"message"`
53 Success bool `json:"success"`
54}
55
56func fetchAIHubMixModels() (*ModelsResponse, error) {
57 req, err := http.NewRequestWithContext(
58 context.Background(),
59 "GET",
60 "https://aihubmix.com/api/v1/models?type=llm",
61 nil,
62 )
63 if err != nil {
64 return nil, fmt.Errorf("creating request: %w", err)
65 }
66 req.Header.Set("User-Agent", "Crush-Client/1.0")
67
68 client := &http.Client{Timeout: 30 * time.Second}
69 resp, err := client.Do(req)
70 if err != nil {
71 return nil, fmt.Errorf("fetching models: %w", err)
72 }
73 defer resp.Body.Close() //nolint:errcheck
74
75 if resp.StatusCode != http.StatusOK {
76 body, _ := io.ReadAll(resp.Body)
77 return nil, fmt.Errorf("unexpected status %d: %s", resp.StatusCode, body)
78 }
79
80 var mr ModelsResponse
81 if err := json.NewDecoder(resp.Body).Decode(&mr); err != nil {
82 return nil, fmt.Errorf("parsing response: %w", err)
83 }
84 return &mr, nil
85}
86
87func hasField(s, field string) bool {
88 if s == "" {
89 return false
90 }
91 for item := range strings.SplitSeq(s, ",") {
92 if strings.TrimSpace(item) == field {
93 return true
94 }
95 }
96 return false
97}
98
99func roundCost(v float64) float64 {
100 return math.Round(v*1e5) / 1e5
101}
102
103func parseFloat(p *float64) float64 {
104 if p == nil {
105 return 0
106 }
107 return roundCost(*p)
108}
109
110func calculateMaxTokens(contextLength, maxOutput, factor int64) int64 {
111 if maxOutput == 0 || maxOutput > contextLength/2 {
112 return contextLength / factor
113 }
114 return maxOutput
115}
116
117func buildReasoningConfig(canReason bool) ([]string, string) {
118 if !canReason {
119 return nil, ""
120 }
121 return []string{"low", "medium", "high"}, "medium"
122}
123
124func main() {
125 modelsResp, err := fetchAIHubMixModels()
126 if err != nil {
127 log.Fatal("Error fetching AIHubMix models:", err)
128 }
129
130 aiHubMixProvider := catwalk.Provider{
131 Name: "AIHubMix",
132 ID: catwalk.InferenceAIHubMix,
133 APIKey: "$AIHUBMIX_API_KEY",
134 APIEndpoint: "https://aihubmix.com/v1",
135 Type: catwalk.TypeOpenAICompat,
136 DefaultLargeModelID: defaultLargeModel,
137 DefaultSmallModelID: defaultSmallModel,
138 DefaultHeaders: map[string]string{
139 "APP-Code": "IUFF7106",
140 },
141 }
142
143 for _, model := range modelsResp.Data {
144 if model.ContextLength < minContextWindow {
145 continue
146 }
147 if !hasField(model.InputModalities, "text") {
148 continue
149 }
150
151 canReason := hasField(model.Features, "thinking")
152 supportsImages := hasField(model.InputModalities, "image")
153
154 reasoningLevels, defaultReasoning := buildReasoningConfig(canReason)
155 maxTokens := calculateMaxTokens(model.ContextLength, model.MaxOutput, maxTokensFactor)
156
157 aiHubMixProvider.Models = append(aiHubMixProvider.Models, catwalk.Model{
158 ID: model.ModelID,
159 Name: model.ModelName,
160 CostPer1MIn: parseFloat(model.Pricing.Input),
161 CostPer1MOut: parseFloat(model.Pricing.Output),
162 CostPer1MInCached: parseFloat(model.Pricing.CacheWrite),
163 CostPer1MOutCached: parseFloat(model.Pricing.CacheRead),
164 ContextWindow: model.ContextLength,
165 DefaultMaxTokens: maxTokens,
166 CanReason: canReason,
167 ReasoningLevels: reasoningLevels,
168 DefaultReasoningEffort: defaultReasoning,
169 SupportsImages: supportsImages,
170 })
171
172 fmt.Printf("Added model %s with context window %d\n",
173 model.ModelID, model.ContextLength)
174 }
175
176 if len(aiHubMixProvider.Models) == 0 {
177 log.Fatal("No models found or no models met the criteria")
178 }
179
180 slices.SortFunc(aiHubMixProvider.Models, func(a, b catwalk.Model) int {
181 return strings.Compare(a.ID, b.ID)
182 })
183
184 data, err := json.MarshalIndent(aiHubMixProvider, "", " ")
185 if err != nil {
186 log.Fatal("Error marshaling AIHubMix provider:", err)
187 }
188
189 if err := os.WriteFile("internal/providers/configs/aihubmix.json", data, 0o600); err != nil {
190 log.Fatal("Error writing AIHubMix provider config:", err)
191 }
192
193 fmt.Printf("\nSuccessfully wrote %d models to internal/providers/configs/aihubmix.json\n", len(aiHubMixProvider.Models))
194}