1package config
2
3import (
4 "cmp"
5 "context"
6 "fmt"
7 "log/slog"
8 "maps"
9 "net/http"
10 "net/url"
11 "os"
12 "slices"
13 "strings"
14 "time"
15
16 "github.com/charmbracelet/catwalk/pkg/catwalk"
17 hyperp "github.com/charmbracelet/crush/internal/agent/hyper"
18 "github.com/charmbracelet/crush/internal/csync"
19 "github.com/charmbracelet/crush/internal/env"
20 "github.com/charmbracelet/crush/internal/oauth"
21 "github.com/charmbracelet/crush/internal/oauth/claude"
22 "github.com/charmbracelet/crush/internal/oauth/copilot"
23 "github.com/charmbracelet/crush/internal/oauth/hyper"
24 "github.com/invopop/jsonschema"
25 "github.com/tidwall/gjson"
26 "github.com/tidwall/sjson"
27)
28
29const (
30 appName = "crush"
31 defaultDataDirectory = ".crush"
32 defaultInitializeAs = "AGENTS.md"
33)
34
35var defaultContextPaths = []string{
36 ".github/copilot-instructions.md",
37 ".cursorrules",
38 ".cursor/rules/",
39 "CLAUDE.md",
40 "CLAUDE.local.md",
41 "GEMINI.md",
42 "gemini.md",
43 "crush.md",
44 "crush.local.md",
45 "Crush.md",
46 "Crush.local.md",
47 "CRUSH.md",
48 "CRUSH.local.md",
49 "AGENTS.md",
50 "agents.md",
51 "Agents.md",
52}
53
54type SelectedModelType string
55
56const (
57 SelectedModelTypeLarge SelectedModelType = "large"
58 SelectedModelTypeSmall SelectedModelType = "small"
59)
60
61const (
62 AgentCoder string = "coder"
63 AgentTask string = "task"
64)
65
66type SelectedModel struct {
67 // The model id as used by the provider API.
68 // Required.
69 Model string `json:"model" jsonschema:"required,description=The model ID as used by the provider API,example=gpt-4o"`
70 // The model provider, same as the key/id used in the providers config.
71 // Required.
72 Provider string `json:"provider" jsonschema:"required,description=The model provider ID that matches a key in the providers config,example=openai"`
73
74 // Only used by models that use the openai provider and need this set.
75 ReasoningEffort string `json:"reasoning_effort,omitempty" jsonschema:"description=Reasoning effort level for OpenAI models that support it,enum=low,enum=medium,enum=high"`
76
77 // Used by anthropic models that can reason to indicate if the model should think.
78 Think bool `json:"think,omitempty" jsonschema:"description=Enable thinking mode for Anthropic models that support reasoning"`
79
80 // Overrides the default model configuration.
81 MaxTokens int64 `json:"max_tokens,omitempty" jsonschema:"description=Maximum number of tokens for model responses,maximum=200000,example=4096"`
82 Temperature *float64 `json:"temperature,omitempty" jsonschema:"description=Sampling temperature,minimum=0,maximum=1,example=0.7"`
83 TopP *float64 `json:"top_p,omitempty" jsonschema:"description=Top-p (nucleus) sampling parameter,minimum=0,maximum=1,example=0.9"`
84 TopK *int64 `json:"top_k,omitempty" jsonschema:"description=Top-k sampling parameter"`
85 FrequencyPenalty *float64 `json:"frequency_penalty,omitempty" jsonschema:"description=Frequency penalty to reduce repetition"`
86 PresencePenalty *float64 `json:"presence_penalty,omitempty" jsonschema:"description=Presence penalty to increase topic diversity"`
87
88 // Override provider specific options.
89 ProviderOptions map[string]any `json:"provider_options,omitempty" jsonschema:"description=Additional provider-specific options for the model"`
90}
91
92type ProviderConfig struct {
93 // The provider's id.
94 ID string `json:"id,omitempty" jsonschema:"description=Unique identifier for the provider,example=openai"`
95 // The provider's name, used for display purposes.
96 Name string `json:"name,omitempty" jsonschema:"description=Human-readable name for the provider,example=OpenAI"`
97 // The provider's API endpoint.
98 BaseURL string `json:"base_url,omitempty" jsonschema:"description=Base URL for the provider's API,format=uri,example=https://api.openai.com/v1"`
99 // The provider type, e.g. "openai", "anthropic", etc. if empty it defaults to openai.
100 Type catwalk.Type `json:"type,omitempty" jsonschema:"description=Provider type that determines the API format,enum=openai,enum=openai-compat,enum=anthropic,enum=gemini,enum=azure,enum=vertexai,default=openai"`
101 // The provider's API key.
102 APIKey string `json:"api_key,omitempty" jsonschema:"description=API key for authentication with the provider,example=$OPENAI_API_KEY"`
103 // The original API key template before resolution (for re-resolution on auth errors).
104 APIKeyTemplate string `json:"-"`
105 // OAuthToken for providers that use OAuth2 authentication.
106 OAuthToken *oauth.Token `json:"oauth,omitempty" jsonschema:"description=OAuth2 token for authentication with the provider"`
107 // Marks the provider as disabled.
108 Disable bool `json:"disable,omitempty" jsonschema:"description=Whether this provider is disabled,default=false"`
109
110 // Custom system prompt prefix.
111 SystemPromptPrefix string `json:"system_prompt_prefix,omitempty" jsonschema:"description=Custom prefix to add to system prompts for this provider"`
112
113 // Extra headers to send with each request to the provider.
114 ExtraHeaders map[string]string `json:"extra_headers,omitempty" jsonschema:"description=Additional HTTP headers to send with requests"`
115 // Extra body
116 ExtraBody map[string]any `json:"extra_body,omitempty" jsonschema:"description=Additional fields to include in request bodies, only works with openai-compatible providers"`
117
118 ProviderOptions map[string]any `json:"provider_options,omitempty" jsonschema:"description=Additional provider-specific options for this provider"`
119
120 // Used to pass extra parameters to the provider.
121 ExtraParams map[string]string `json:"-"`
122
123 // The provider models
124 Models []catwalk.Model `json:"models,omitempty" jsonschema:"description=List of models available from this provider"`
125}
126
127// ToProvider converts the [ProviderConfig] to a [catwalk.Provider].
128func (pc *ProviderConfig) ToProvider() catwalk.Provider {
129 // Convert config provider to provider.Provider format
130 provider := catwalk.Provider{
131 Name: pc.Name,
132 ID: catwalk.InferenceProvider(pc.ID),
133 Models: make([]catwalk.Model, len(pc.Models)),
134 }
135
136 // Convert models
137 for i, model := range pc.Models {
138 provider.Models[i] = catwalk.Model{
139 ID: model.ID,
140 Name: model.Name,
141 CostPer1MIn: model.CostPer1MIn,
142 CostPer1MOut: model.CostPer1MOut,
143 CostPer1MInCached: model.CostPer1MInCached,
144 CostPer1MOutCached: model.CostPer1MOutCached,
145 ContextWindow: model.ContextWindow,
146 DefaultMaxTokens: model.DefaultMaxTokens,
147 CanReason: model.CanReason,
148 ReasoningLevels: model.ReasoningLevels,
149 DefaultReasoningEffort: model.DefaultReasoningEffort,
150 SupportsImages: model.SupportsImages,
151 }
152 }
153
154 return provider
155}
156
157func (pc *ProviderConfig) SetupClaudeCode() {
158 pc.SystemPromptPrefix = "You are Claude Code, Anthropic's official CLI for Claude."
159 pc.ExtraHeaders["anthropic-version"] = "2023-06-01"
160
161 value := pc.ExtraHeaders["anthropic-beta"]
162 const want = "oauth-2025-04-20"
163 if !strings.Contains(value, want) {
164 if value != "" {
165 value += ","
166 }
167 value += want
168 }
169 pc.ExtraHeaders["anthropic-beta"] = value
170}
171
172func (pc *ProviderConfig) SetupGitHubCopilot() {
173 maps.Copy(pc.ExtraHeaders, copilot.Headers())
174}
175
176type MCPType string
177
178const (
179 MCPStdio MCPType = "stdio"
180 MCPSSE MCPType = "sse"
181 MCPHttp MCPType = "http"
182)
183
184type MCPConfig struct {
185 Command string `json:"command,omitempty" jsonschema:"description=Command to execute for stdio MCP servers,example=npx"`
186 Env map[string]string `json:"env,omitempty" jsonschema:"description=Environment variables to set for the MCP server"`
187 Args []string `json:"args,omitempty" jsonschema:"description=Arguments to pass to the MCP server command"`
188 Type MCPType `json:"type" jsonschema:"required,description=Type of MCP connection,enum=stdio,enum=sse,enum=http,default=stdio"`
189 URL string `json:"url,omitempty" jsonschema:"description=URL for HTTP or SSE MCP servers,format=uri,example=http://localhost:3000/mcp"`
190 Disabled bool `json:"disabled,omitempty" jsonschema:"description=Whether this MCP server is disabled,default=false"`
191 DisabledTools []string `json:"disabled_tools,omitempty" jsonschema:"description=List of tools from this MCP server to disable,example=get-library-doc"`
192 Timeout int `json:"timeout,omitempty" jsonschema:"description=Timeout in seconds for MCP server connections,default=15,example=30,example=60,example=120"`
193
194 // TODO: maybe make it possible to get the value from the env
195 Headers map[string]string `json:"headers,omitempty" jsonschema:"description=HTTP headers for HTTP/SSE MCP servers"`
196}
197
198type LSPConfig struct {
199 Disabled bool `json:"disabled,omitempty" jsonschema:"description=Whether this LSP server is disabled,default=false"`
200 Command string `json:"command,omitempty" jsonschema:"required,description=Command to execute for the LSP server,example=gopls"`
201 Args []string `json:"args,omitempty" jsonschema:"description=Arguments to pass to the LSP server command"`
202 Env map[string]string `json:"env,omitempty" jsonschema:"description=Environment variables to set to the LSP server command"`
203 FileTypes []string `json:"filetypes,omitempty" jsonschema:"description=File types this LSP server handles,example=go,example=mod,example=rs,example=c,example=js,example=ts"`
204 RootMarkers []string `json:"root_markers,omitempty" jsonschema:"description=Files or directories that indicate the project root,example=go.mod,example=package.json,example=Cargo.toml"`
205 InitOptions map[string]any `json:"init_options,omitempty" jsonschema:"description=Initialization options passed to the LSP server during initialize request"`
206 Options map[string]any `json:"options,omitempty" jsonschema:"description=LSP server-specific settings passed during initialization"`
207}
208
209type TUIOptions struct {
210 CompactMode bool `json:"compact_mode,omitempty" jsonschema:"description=Enable compact mode for the TUI interface,default=false"`
211 DiffMode string `json:"diff_mode,omitempty" jsonschema:"description=Diff mode for the TUI interface,enum=unified,enum=split"`
212 // Here we can add themes later or any TUI related options
213 //
214
215 Completions Completions `json:"completions,omitzero" jsonschema:"description=Completions UI options"`
216}
217
218// Completions defines options for the completions UI.
219type Completions struct {
220 MaxDepth *int `json:"max_depth,omitempty" jsonschema:"description=Maximum depth for the ls tool,default=0,example=10"`
221 MaxItems *int `json:"max_items,omitempty" jsonschema:"description=Maximum number of items to return for the ls tool,default=1000,example=100"`
222}
223
224func (c Completions) Limits() (depth, items int) {
225 return ptrValOr(c.MaxDepth, 0), ptrValOr(c.MaxItems, 0)
226}
227
228type Permissions struct {
229 AllowedTools []string `json:"allowed_tools,omitempty" jsonschema:"description=List of tools that don't require permission prompts,example=bash,example=view"` // Tools that don't require permission prompts
230 SkipRequests bool `json:"-"` // Automatically accept all permissions (YOLO mode)
231}
232
233type TrailerStyle string
234
235const (
236 TrailerStyleNone TrailerStyle = "none"
237 TrailerStyleCoAuthoredBy TrailerStyle = "co-authored-by"
238 TrailerStyleAssistedBy TrailerStyle = "assisted-by"
239)
240
241type Attribution struct {
242 TrailerStyle TrailerStyle `json:"trailer_style,omitempty" jsonschema:"description=Style of attribution trailer to add to commits,enum=none,enum=co-authored-by,enum=assisted-by,default=assisted-by"`
243 CoAuthoredBy *bool `json:"co_authored_by,omitempty" jsonschema:"description=Deprecated: use trailer_style instead"`
244 GeneratedWith bool `json:"generated_with,omitempty" jsonschema:"description=Add Generated with Crush line to commit messages and issues and PRs,default=true"`
245}
246
247// JSONSchemaExtend marks the co_authored_by field as deprecated in the schema.
248func (Attribution) JSONSchemaExtend(schema *jsonschema.Schema) {
249 if schema.Properties != nil {
250 if prop, ok := schema.Properties.Get("co_authored_by"); ok {
251 prop.Deprecated = true
252 }
253 }
254}
255
256type Options struct {
257 ContextPaths []string `json:"context_paths,omitempty" jsonschema:"description=Paths to files containing context information for the AI,example=.cursorrules,example=CRUSH.md"`
258 TUI *TUIOptions `json:"tui,omitempty" jsonschema:"description=Terminal user interface options"`
259 Debug bool `json:"debug,omitempty" jsonschema:"description=Enable debug logging,default=false"`
260 DebugLSP bool `json:"debug_lsp,omitempty" jsonschema:"description=Enable debug logging for LSP servers,default=false"`
261 DisableAutoSummarize bool `json:"disable_auto_summarize,omitempty" jsonschema:"description=Disable automatic conversation summarization,default=false"`
262 DataDirectory string `json:"data_directory,omitempty" jsonschema:"description=Directory for storing application data (relative to working directory),default=.crush,example=.crush"` // Relative to the cwd
263 DisabledTools []string `json:"disabled_tools,omitempty" jsonschema:"description=List of built-in tools to disable and hide from the agent,example=bash,example=sourcegraph"`
264 DisableProviderAutoUpdate bool `json:"disable_provider_auto_update,omitempty" jsonschema:"description=Disable providers auto-update,default=false"`
265 Attribution *Attribution `json:"attribution,omitempty" jsonschema:"description=Attribution settings for generated content"`
266 DisableMetrics bool `json:"disable_metrics,omitempty" jsonschema:"description=Disable sending metrics,default=false"`
267 InitializeAs string `json:"initialize_as,omitempty" jsonschema:"description=Name of the context file to create/update during project initialization,default=AGENTS.md,example=AGENTS.md,example=CRUSH.md,example=CLAUDE.md,example=docs/LLMs.md"`
268}
269
270type MCPs map[string]MCPConfig
271
272type MCP struct {
273 Name string `json:"name"`
274 MCP MCPConfig `json:"mcp"`
275}
276
277func (m MCPs) Sorted() []MCP {
278 sorted := make([]MCP, 0, len(m))
279 for k, v := range m {
280 sorted = append(sorted, MCP{
281 Name: k,
282 MCP: v,
283 })
284 }
285 slices.SortFunc(sorted, func(a, b MCP) int {
286 return strings.Compare(a.Name, b.Name)
287 })
288 return sorted
289}
290
291type LSPs map[string]LSPConfig
292
293type LSP struct {
294 Name string `json:"name"`
295 LSP LSPConfig `json:"lsp"`
296}
297
298func (l LSPs) Sorted() []LSP {
299 sorted := make([]LSP, 0, len(l))
300 for k, v := range l {
301 sorted = append(sorted, LSP{
302 Name: k,
303 LSP: v,
304 })
305 }
306 slices.SortFunc(sorted, func(a, b LSP) int {
307 return strings.Compare(a.Name, b.Name)
308 })
309 return sorted
310}
311
312func (l LSPConfig) ResolvedEnv() []string {
313 return resolveEnvs(l.Env)
314}
315
316func (m MCPConfig) ResolvedEnv() []string {
317 return resolveEnvs(m.Env)
318}
319
320func (m MCPConfig) ResolvedHeaders() map[string]string {
321 resolver := NewShellVariableResolver(env.New())
322 for e, v := range m.Headers {
323 var err error
324 m.Headers[e], err = resolver.ResolveValue(v)
325 if err != nil {
326 slog.Error("error resolving header variable", "error", err, "variable", e, "value", v)
327 continue
328 }
329 }
330 return m.Headers
331}
332
333type Agent struct {
334 ID string `json:"id,omitempty"`
335 Name string `json:"name,omitempty"`
336 Description string `json:"description,omitempty"`
337 // This is the id of the system prompt used by the agent
338 Disabled bool `json:"disabled,omitempty"`
339
340 Model SelectedModelType `json:"model" jsonschema:"required,description=The model type to use for this agent,enum=large,enum=small,default=large"`
341
342 // The available tools for the agent
343 // if this is nil, all tools are available
344 AllowedTools []string `json:"allowed_tools,omitempty"`
345
346 // this tells us which MCPs are available for this agent
347 // if this is empty all mcps are available
348 // the string array is the list of tools from the AllowedMCP the agent has available
349 // if the string array is nil, all tools from the AllowedMCP are available
350 AllowedMCP map[string][]string `json:"allowed_mcp,omitempty"`
351
352 // Overrides the context paths for this agent
353 ContextPaths []string `json:"context_paths,omitempty"`
354}
355
356type Tools struct {
357 Ls ToolLs `json:"ls,omitzero"`
358}
359
360type ToolLs struct {
361 MaxDepth *int `json:"max_depth,omitempty" jsonschema:"description=Maximum depth for the ls tool,default=0,example=10"`
362 MaxItems *int `json:"max_items,omitempty" jsonschema:"description=Maximum number of items to return for the ls tool,default=1000,example=100"`
363}
364
365func (t ToolLs) Limits() (depth, items int) {
366 return ptrValOr(t.MaxDepth, 0), ptrValOr(t.MaxItems, 0)
367}
368
369// Config holds the configuration for crush.
370type Config struct {
371 Schema string `json:"$schema,omitempty"`
372
373 // We currently only support large/small as values here.
374 Models map[SelectedModelType]SelectedModel `json:"models,omitempty" jsonschema:"description=Model configurations for different model types,example={\"large\":{\"model\":\"gpt-4o\",\"provider\":\"openai\"}}"`
375 // Recently used models stored in the data directory config.
376 RecentModels map[SelectedModelType][]SelectedModel `json:"recent_models,omitempty" jsonschema:"description=Recently used models sorted by most recent first"`
377
378 // The providers that are configured
379 Providers *csync.Map[string, ProviderConfig] `json:"providers,omitempty" jsonschema:"description=AI provider configurations"`
380
381 MCP MCPs `json:"mcp,omitempty" jsonschema:"description=Model Context Protocol server configurations"`
382
383 LSP LSPs `json:"lsp,omitempty" jsonschema:"description=Language Server Protocol configurations"`
384
385 Options *Options `json:"options,omitempty" jsonschema:"description=General application options"`
386
387 Permissions *Permissions `json:"permissions,omitempty" jsonschema:"description=Permission settings for tool usage"`
388
389 Tools Tools `json:"tools,omitzero" jsonschema:"description=Tool configurations"`
390
391 Agents map[string]Agent `json:"-"`
392
393 // Internal
394 workingDir string `json:"-"`
395 // TODO: find a better way to do this this should probably not be part of the config
396 resolver VariableResolver
397 dataConfigDir string `json:"-"`
398 knownProviders []catwalk.Provider `json:"-"`
399}
400
401func (c *Config) WorkingDir() string {
402 return c.workingDir
403}
404
405func (c *Config) EnabledProviders() []ProviderConfig {
406 var enabled []ProviderConfig
407 for p := range c.Providers.Seq() {
408 if !p.Disable {
409 enabled = append(enabled, p)
410 }
411 }
412 return enabled
413}
414
415// IsConfigured return true if at least one provider is configured
416func (c *Config) IsConfigured() bool {
417 return len(c.EnabledProviders()) > 0
418}
419
420func (c *Config) GetModel(provider, model string) *catwalk.Model {
421 if providerConfig, ok := c.Providers.Get(provider); ok {
422 for _, m := range providerConfig.Models {
423 if m.ID == model {
424 return &m
425 }
426 }
427 }
428 return nil
429}
430
431func (c *Config) GetProviderForModel(modelType SelectedModelType) *ProviderConfig {
432 model, ok := c.Models[modelType]
433 if !ok {
434 return nil
435 }
436 if providerConfig, ok := c.Providers.Get(model.Provider); ok {
437 return &providerConfig
438 }
439 return nil
440}
441
442func (c *Config) GetModelByType(modelType SelectedModelType) *catwalk.Model {
443 model, ok := c.Models[modelType]
444 if !ok {
445 return nil
446 }
447 return c.GetModel(model.Provider, model.Model)
448}
449
450func (c *Config) LargeModel() *catwalk.Model {
451 model, ok := c.Models[SelectedModelTypeLarge]
452 if !ok {
453 return nil
454 }
455 return c.GetModel(model.Provider, model.Model)
456}
457
458func (c *Config) SmallModel() *catwalk.Model {
459 model, ok := c.Models[SelectedModelTypeSmall]
460 if !ok {
461 return nil
462 }
463 return c.GetModel(model.Provider, model.Model)
464}
465
466func (c *Config) SetCompactMode(enabled bool) error {
467 if c.Options == nil {
468 c.Options = &Options{}
469 }
470 c.Options.TUI.CompactMode = enabled
471 return c.SetConfigField("options.tui.compact_mode", enabled)
472}
473
474func (c *Config) Resolve(key string) (string, error) {
475 if c.resolver == nil {
476 return "", fmt.Errorf("no variable resolver configured")
477 }
478 return c.resolver.ResolveValue(key)
479}
480
481func (c *Config) UpdatePreferredModel(modelType SelectedModelType, model SelectedModel) error {
482 c.Models[modelType] = model
483 if err := c.SetConfigField(fmt.Sprintf("models.%s", modelType), model); err != nil {
484 return fmt.Errorf("failed to update preferred model: %w", err)
485 }
486 if err := c.recordRecentModel(modelType, model); err != nil {
487 return err
488 }
489 return nil
490}
491
492func (c *Config) HasConfigField(key string) bool {
493 data, err := os.ReadFile(c.dataConfigDir)
494 if err != nil {
495 return false
496 }
497 return gjson.Get(string(data), key).Exists()
498}
499
500func (c *Config) SetConfigField(key string, value any) error {
501 // read the data
502 data, err := os.ReadFile(c.dataConfigDir)
503 if err != nil {
504 if os.IsNotExist(err) {
505 data = []byte("{}")
506 } else {
507 return fmt.Errorf("failed to read config file: %w", err)
508 }
509 }
510
511 newValue, err := sjson.Set(string(data), key, value)
512 if err != nil {
513 return fmt.Errorf("failed to set config field %s: %w", key, err)
514 }
515 if err := os.WriteFile(c.dataConfigDir, []byte(newValue), 0o600); err != nil {
516 return fmt.Errorf("failed to write config file: %w", err)
517 }
518 return nil
519}
520
521// RefreshOAuthToken refreshes the OAuth token for the given provider.
522func (c *Config) RefreshOAuthToken(ctx context.Context, providerID string) error {
523 providerConfig, exists := c.Providers.Get(providerID)
524 if !exists {
525 return fmt.Errorf("provider %s not found", providerID)
526 }
527
528 if providerConfig.OAuthToken == nil {
529 return fmt.Errorf("provider %s does not have an OAuth token", providerID)
530 }
531
532 var newToken *oauth.Token
533 var refreshErr error
534 switch providerID {
535 case string(catwalk.InferenceProviderAnthropic):
536 newToken, refreshErr = claude.RefreshToken(ctx, providerConfig.OAuthToken.RefreshToken)
537 case string(catwalk.InferenceProviderCopilot):
538 newToken, refreshErr = copilot.RefreshToken(ctx, providerConfig.OAuthToken.RefreshToken)
539 case hyperp.Name:
540 newToken, refreshErr = hyper.ExchangeToken(ctx, providerConfig.OAuthToken.RefreshToken)
541 default:
542 return fmt.Errorf("OAuth refresh not supported for provider %s", providerID)
543 }
544 if refreshErr != nil {
545 return fmt.Errorf("failed to refresh OAuth token for provider %s: %w", providerID, refreshErr)
546 }
547
548 slog.Info("Successfully refreshed OAuth token", "provider", providerID)
549 providerConfig.OAuthToken = newToken
550
551 switch providerID {
552 case string(catwalk.InferenceProviderAnthropic):
553 providerConfig.APIKey = fmt.Sprintf("Bearer %s", newToken.AccessToken)
554 providerConfig.SetupClaudeCode()
555 case string(catwalk.InferenceProviderCopilot):
556 providerConfig.APIKey = newToken.AccessToken
557 providerConfig.SetupGitHubCopilot()
558 }
559
560 c.Providers.Set(providerID, providerConfig)
561
562 if err := cmp.Or(
563 c.SetConfigField(fmt.Sprintf("providers.%s.api_key", providerID), newToken.AccessToken),
564 c.SetConfigField(fmt.Sprintf("providers.%s.oauth", providerID), newToken),
565 ); err != nil {
566 return fmt.Errorf("failed to persist refreshed token: %w", err)
567 }
568
569 return nil
570}
571
572func (c *Config) SetProviderAPIKey(providerID string, apiKey any) error {
573 var providerConfig ProviderConfig
574 var exists bool
575 var setKeyOrToken func()
576
577 switch v := apiKey.(type) {
578 case string:
579 if err := c.SetConfigField(fmt.Sprintf("providers.%s.api_key", providerID), v); err != nil {
580 return fmt.Errorf("failed to save api key to config file: %w", err)
581 }
582 setKeyOrToken = func() { providerConfig.APIKey = v }
583 case *oauth.Token:
584 if err := cmp.Or(
585 c.SetConfigField(fmt.Sprintf("providers.%s.api_key", providerID), v.AccessToken),
586 c.SetConfigField(fmt.Sprintf("providers.%s.oauth", providerID), v),
587 ); err != nil {
588 return err
589 }
590 setKeyOrToken = func() {
591 providerConfig.APIKey = v.AccessToken
592 providerConfig.OAuthToken = v
593 switch providerID {
594 case string(catwalk.InferenceProviderAnthropic):
595 providerConfig.APIKey = fmt.Sprintf("Bearer %s", v.AccessToken)
596 providerConfig.SetupClaudeCode()
597 case string(catwalk.InferenceProviderCopilot):
598 providerConfig.SetupGitHubCopilot()
599 }
600 }
601 }
602
603 providerConfig, exists = c.Providers.Get(providerID)
604 if exists {
605 setKeyOrToken()
606 c.Providers.Set(providerID, providerConfig)
607 return nil
608 }
609
610 var foundProvider *catwalk.Provider
611 for _, p := range c.knownProviders {
612 if string(p.ID) == providerID {
613 foundProvider = &p
614 break
615 }
616 }
617
618 if foundProvider != nil {
619 // Create new provider config based on known provider
620 providerConfig = ProviderConfig{
621 ID: providerID,
622 Name: foundProvider.Name,
623 BaseURL: foundProvider.APIEndpoint,
624 Type: foundProvider.Type,
625 Disable: false,
626 ExtraHeaders: make(map[string]string),
627 ExtraParams: make(map[string]string),
628 Models: foundProvider.Models,
629 }
630 setKeyOrToken()
631 } else {
632 return fmt.Errorf("provider with ID %s not found in known providers", providerID)
633 }
634 // Store the updated provider config
635 c.Providers.Set(providerID, providerConfig)
636 return nil
637}
638
639const maxRecentModelsPerType = 5
640
641func (c *Config) recordRecentModel(modelType SelectedModelType, model SelectedModel) error {
642 if model.Provider == "" || model.Model == "" {
643 return nil
644 }
645
646 if c.RecentModels == nil {
647 c.RecentModels = make(map[SelectedModelType][]SelectedModel)
648 }
649
650 eq := func(a, b SelectedModel) bool {
651 return a.Provider == b.Provider && a.Model == b.Model
652 }
653
654 entry := SelectedModel{
655 Provider: model.Provider,
656 Model: model.Model,
657 }
658
659 current := c.RecentModels[modelType]
660 withoutCurrent := slices.DeleteFunc(slices.Clone(current), func(existing SelectedModel) bool {
661 return eq(existing, entry)
662 })
663
664 updated := append([]SelectedModel{entry}, withoutCurrent...)
665 if len(updated) > maxRecentModelsPerType {
666 updated = updated[:maxRecentModelsPerType]
667 }
668
669 if slices.EqualFunc(current, updated, eq) {
670 return nil
671 }
672
673 c.RecentModels[modelType] = updated
674
675 if err := c.SetConfigField(fmt.Sprintf("recent_models.%s", modelType), updated); err != nil {
676 return fmt.Errorf("failed to persist recent models: %w", err)
677 }
678
679 return nil
680}
681
682func allToolNames() []string {
683 return []string{
684 "agent",
685 "bash",
686 "job_output",
687 "job_kill",
688 "download",
689 "edit",
690 "multiedit",
691 "lsp_diagnostics",
692 "lsp_references",
693 "fetch",
694 "agentic_fetch",
695 "glob",
696 "grep",
697 "ls",
698 "sourcegraph",
699 "todos",
700 "view",
701 "write",
702 }
703}
704
705func resolveAllowedTools(allTools []string, disabledTools []string) []string {
706 if disabledTools == nil {
707 return allTools
708 }
709 // filter out disabled tools (exclude mode)
710 return filterSlice(allTools, disabledTools, false)
711}
712
713func resolveReadOnlyTools(tools []string) []string {
714 readOnlyTools := []string{"glob", "grep", "ls", "sourcegraph", "view"}
715 // filter to only include tools that are in allowedtools (include mode)
716 return filterSlice(tools, readOnlyTools, true)
717}
718
719func filterSlice(data []string, mask []string, include bool) []string {
720 filtered := []string{}
721 for _, s := range data {
722 // if include is true, we include items that ARE in the mask
723 // if include is false, we include items that are NOT in the mask
724 if include == slices.Contains(mask, s) {
725 filtered = append(filtered, s)
726 }
727 }
728 return filtered
729}
730
731func (c *Config) SetupAgents() {
732 allowedTools := resolveAllowedTools(allToolNames(), c.Options.DisabledTools)
733
734 agents := map[string]Agent{
735 AgentCoder: {
736 ID: AgentCoder,
737 Name: "Coder",
738 Description: "An agent that helps with executing coding tasks.",
739 Model: SelectedModelTypeLarge,
740 ContextPaths: c.Options.ContextPaths,
741 AllowedTools: allowedTools,
742 },
743
744 AgentTask: {
745 ID: AgentCoder,
746 Name: "Task",
747 Description: "An agent that helps with searching for context and finding implementation details.",
748 Model: SelectedModelTypeLarge,
749 ContextPaths: c.Options.ContextPaths,
750 AllowedTools: resolveReadOnlyTools(allowedTools),
751 // NO MCPs or LSPs by default
752 AllowedMCP: map[string][]string{},
753 },
754 }
755 c.Agents = agents
756}
757
758func (c *Config) Resolver() VariableResolver {
759 return c.resolver
760}
761
762func (c *ProviderConfig) TestConnection(resolver VariableResolver) error {
763 testURL := ""
764 headers := make(map[string]string)
765 apiKey, _ := resolver.ResolveValue(c.APIKey)
766 switch c.Type {
767 case catwalk.TypeOpenAI, catwalk.TypeOpenAICompat, catwalk.TypeOpenRouter:
768 baseURL, _ := resolver.ResolveValue(c.BaseURL)
769 if baseURL == "" {
770 baseURL = "https://api.openai.com/v1"
771 }
772 if c.ID == string(catwalk.InferenceProviderOpenRouter) {
773 testURL = baseURL + "/credits"
774 } else {
775 testURL = baseURL + "/models"
776 }
777 headers["Authorization"] = "Bearer " + apiKey
778 case catwalk.TypeAnthropic:
779 baseURL, _ := resolver.ResolveValue(c.BaseURL)
780 if baseURL == "" {
781 baseURL = "https://api.anthropic.com/v1"
782 }
783 testURL = baseURL + "/models"
784 // TODO: replace with const when catwalk is released
785 if c.ID == "kimi-coding" {
786 testURL = baseURL + "/v1/models"
787 }
788 headers["x-api-key"] = apiKey
789 headers["anthropic-version"] = "2023-06-01"
790 case catwalk.TypeGoogle:
791 baseURL, _ := resolver.ResolveValue(c.BaseURL)
792 if baseURL == "" {
793 baseURL = "https://generativelanguage.googleapis.com"
794 }
795 testURL = baseURL + "/v1beta/models?key=" + url.QueryEscape(apiKey)
796 }
797 ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
798 defer cancel()
799 client := &http.Client{}
800 req, err := http.NewRequestWithContext(ctx, "GET", testURL, nil)
801 if err != nil {
802 return fmt.Errorf("failed to create request for provider %s: %w", c.ID, err)
803 }
804 for k, v := range headers {
805 req.Header.Set(k, v)
806 }
807 for k, v := range c.ExtraHeaders {
808 req.Header.Set(k, v)
809 }
810 b, err := client.Do(req)
811 if err != nil {
812 return fmt.Errorf("failed to create request for provider %s: %w", c.ID, err)
813 }
814 if c.ID == string(catwalk.InferenceProviderZAI) {
815 if b.StatusCode == http.StatusUnauthorized {
816 // for z.ai just check if the http response is not 401
817 return fmt.Errorf("failed to connect to provider %s: %s", c.ID, b.Status)
818 }
819 } else {
820 if b.StatusCode != http.StatusOK {
821 return fmt.Errorf("failed to connect to provider %s: %s", c.ID, b.Status)
822 }
823 }
824 _ = b.Body.Close()
825 return nil
826}
827
828func resolveEnvs(envs map[string]string) []string {
829 resolver := NewShellVariableResolver(env.New())
830 for e, v := range envs {
831 var err error
832 envs[e], err = resolver.ResolveValue(v)
833 if err != nil {
834 slog.Error("error resolving environment variable", "error", err, "variable", e, "value", v)
835 continue
836 }
837 }
838
839 res := make([]string, 0, len(envs))
840 for k, v := range envs {
841 res = append(res, fmt.Sprintf("%s=%s", k, v))
842 }
843 return res
844}
845
846func ptrValOr[T any](t *T, el T) T {
847 if t == nil {
848 return el
849 }
850 return *t
851}