1// Package main implements a tool to fetch GitHub Copilot models and generate a Catwalk provider configuration.
2package main
3
4import (
5 "context"
6 "encoding/json"
7 "fmt"
8 "io"
9 "net/http"
10 "os"
11 "path/filepath"
12 "regexp"
13 "runtime"
14 "slices"
15 "strings"
16 "time"
17
18 "github.com/charmbracelet/catwalk/pkg/catwalk"
19)
20
21type Response struct {
22 Object string `json:"object"`
23 Data []Model `json:"data"`
24}
25
26type Model struct {
27 ID string `json:"id"`
28 Name string `json:"name"`
29 Version string `json:"version"`
30 Vendor string `json:"vendor"`
31 Preview bool `json:"preview"`
32 ModelPickerEnabled bool `json:"model_picker_enabled"`
33 Capabilities Capability `json:"capabilities"`
34 Policy *Policy `json:"policy,omitempty"`
35}
36
37type Capability struct {
38 Family string `json:"family"`
39 Type string `json:"type"`
40 Tokenizer string `json:"tokenizer"`
41 Limits Limits `json:"limits"`
42 Supports Supports `json:"supports"`
43}
44
45type Limits struct {
46 MaxContextWindowTokens int `json:"max_context_window_tokens,omitempty"`
47 MaxOutputTokens int `json:"max_output_tokens,omitempty"`
48 MaxPromptTokens int `json:"max_prompt_tokens,omitempty"`
49}
50
51type Supports struct {
52 ToolCalls bool `json:"tool_calls,omitempty"`
53 ParallelToolCalls bool `json:"parallel_tool_calls,omitempty"`
54 MaxThinkingBudget int `json:"max_thinking_budget,omitempty"`
55 MinThinkingBudget int `json:"min_thinking_budget,omitempty"`
56}
57
58type Policy struct {
59 State string `json:"state"`
60 Terms string `json:"terms"`
61}
62
63var versionedModelRegexp = regexp.MustCompile(`-\d{4}-\d{2}-\d{2}$`)
64
65func main() {
66 if err := run(); err != nil {
67 fmt.Fprintf(os.Stderr, "Error: %v\n", err)
68 os.Exit(1)
69 }
70}
71
72func run() error {
73 copilotModels, err := fetchCopilotModels()
74 if err != nil {
75 return err
76 }
77
78 // NOTE(@andreynering): Exclude versioned models and keep only the main version of each.
79 copilotModels = slices.DeleteFunc(copilotModels, func(m Model) bool {
80 return m.ID != m.Version || versionedModelRegexp.MatchString(m.ID) || strings.Contains(m.ID, "embedding")
81 })
82
83 catwalkModels := modelsToCatwalk(copilotModels)
84 slices.SortStableFunc(catwalkModels, func(a, b catwalk.Model) int {
85 return strings.Compare(a.ID, b.ID)
86 })
87
88 provider := catwalk.Provider{
89 ID: catwalk.InferenceProviderCopilot,
90 Name: "GitHub Copilot",
91 Models: catwalkModels,
92 APIEndpoint: "https://api.githubcopilot.com",
93 Type: catwalk.TypeOpenAICompat,
94 DefaultLargeModelID: "claude-sonnet-4.5",
95 DefaultSmallModelID: "claude-haiku-4.5",
96 }
97 data, err := json.MarshalIndent(provider, "", " ")
98 if err != nil {
99 return fmt.Errorf("unable to marshal json: %w", err)
100 }
101 if err := os.WriteFile("internal/providers/configs/copilot.json", data, 0o600); err != nil {
102 return fmt.Errorf("unable to write copilog.json: %w", err)
103 }
104 return nil
105}
106
107func fetchCopilotModels() ([]Model, error) {
108 ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
109 defer cancel()
110
111 req, err := http.NewRequestWithContext(
112 ctx,
113 "GET",
114 "https://api.githubcopilot.com/models",
115 nil,
116 )
117 if err != nil {
118 return nil, fmt.Errorf("unable to create request: %w", err)
119 }
120 req.Header.Set("Accept", "application/json")
121 req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", copilotToken()))
122
123 client := &http.Client{}
124 resp, err := client.Do(req)
125 if err != nil {
126 return nil, fmt.Errorf("unable to make http request: %w", err)
127 }
128 defer resp.Body.Close() //nolint:errcheck
129
130 if resp.StatusCode != http.StatusOK {
131 return nil, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
132 }
133
134 bts, err := io.ReadAll(resp.Body)
135 if err != nil {
136 return nil, fmt.Errorf("unable to read response body: %w", err)
137 }
138
139 // for debugging
140 _ = os.MkdirAll("tmp", 0o700)
141 _ = os.WriteFile("tmp/copilot-response.json", bts, 0o600)
142
143 var data Response
144 if err := json.Unmarshal(bts, &data); err != nil {
145 return nil, fmt.Errorf("unable to unmarshal json: %w", err)
146 }
147 return data.Data, nil
148}
149
150func modelsToCatwalk(m []Model) []catwalk.Model {
151 models := make([]catwalk.Model, 0, len(m))
152 for _, model := range m {
153 models = append(models, modelToCatwalk(model))
154 }
155 return models
156}
157
158func modelToCatwalk(m Model) catwalk.Model {
159 canReason, reasoningLevels, defaultReasoning := detectReasoningCapabilities(m)
160 supportsAttachments := detectAttachmentSupport(m)
161
162 return catwalk.Model{
163 ID: m.ID,
164 Name: m.Name,
165 DefaultMaxTokens: int64(m.Capabilities.Limits.MaxOutputTokens),
166 ContextWindow: int64(m.Capabilities.Limits.MaxContextWindowTokens),
167 CanReason: canReason,
168 ReasoningLevels: reasoningLevels,
169 DefaultReasoningEffort: defaultReasoning,
170 SupportsImages: supportsAttachments,
171 }
172}
173
174const defaultReasoningEffort = "medium"
175
176var defaultReasoningLevels = []string{"low", "medium", "high"}
177
178func detectReasoningCapabilities(m Model) (canReason bool, levels []string, defaultLevel string) {
179 // Claude models with reasoning support
180 if m.ID == "claude-3.7-sonnet" ||
181 m.ID == "claude-haiku-4.5" ||
182 m.ID == "claude-opus-4.5" ||
183 m.ID == "claude-sonnet-4" ||
184 m.ID == "claude-sonnet-4.5" {
185 return true, nil, ""
186 }
187
188 // Gemini models with reasoning support
189 if strings.HasPrefix(m.ID, "gemini-2.5-") || strings.HasPrefix(m.ID, "gemini-3-") {
190 return true, defaultReasoningLevels, defaultReasoningEffort
191 }
192
193 // GPT-5 series with reasoning levels
194 if strings.HasPrefix(m.ID, "gpt-5") && !strings.Contains(m.ID, "chat") {
195 return true, defaultReasoningLevels, defaultReasoningEffort
196 }
197
198 // OpenAI o-series with reasoning levels
199 if strings.HasPrefix(m.ID, "o3-") || strings.HasPrefix(m.ID, "o4-") {
200 return true, defaultReasoningLevels, defaultReasoningEffort
201 }
202
203 // DeepSeek R1 models
204 if strings.HasPrefix(m.ID, "deepseek-r1") {
205 return true, nil, ""
206 }
207
208 // Grok models with reasoning
209 if m.ID == "grok-3-mini" || m.ID == "grok-3-mini-beta" ||
210 strings.HasPrefix(m.ID, "grok-4") ||
211 m.ID == "grok-code-fast-1" {
212 return true, defaultReasoningLevels, defaultReasoningEffort
213 }
214
215 return false, nil, ""
216}
217
218func detectAttachmentSupport(m Model) bool {
219 // Claude models support attachments (vision/multimodal)
220 if strings.HasPrefix(m.ID, "claude-") {
221 return true
222 }
223
224 // Gemini models support attachments (vision/multimodal)
225 if strings.HasPrefix(m.ID, "gemini-") {
226 return true
227 }
228
229 // GPT-5 models support attachments (based on OpenRouter pattern)
230 if strings.HasPrefix(m.ID, "gpt-5") {
231 return true
232 }
233
234 // Older GPT models do not support attachments
235 if strings.HasPrefix(m.ID, "gpt-4") || strings.HasPrefix(m.ID, "gpt-3.5") {
236 return false
237 }
238
239 // Grok models - only grok-4 supports attachments
240 if m.ID == "grok-4" || strings.HasPrefix(m.ID, "grok-4-") {
241 return true
242 }
243
244 return false
245}
246
247func copilotToken() string {
248 if token := os.Getenv("COPILOT_TOKEN"); token != "" {
249 return token
250 }
251 return tokenFromDisk()
252}
253
254func tokenFromDisk() string {
255 data, err := os.ReadFile(tokenFilePath())
256 if err != nil {
257 return ""
258 }
259 var content map[string]struct {
260 User string `json:"user"`
261 OAuthToken string `json:"oauth_token"`
262 GitHubAppID string `json:"githubAppId"`
263 }
264 if err := json.Unmarshal(data, &content); err != nil {
265 return ""
266 }
267 if app, ok := content["github.com:Iv1.b507a08c87ecfe98"]; ok {
268 return app.OAuthToken
269 }
270 return ""
271}
272
273func tokenFilePath() string {
274 switch runtime.GOOS {
275 case "windows":
276 return filepath.Join(os.Getenv("LOCALAPPDATA"), "github-copilot/apps.json")
277 default:
278 return filepath.Join(os.Getenv("HOME"), ".config/github-copilot/apps.json")
279 }
280}