config.go

  1package config
  2
  3import (
  4	"cmp"
  5	"context"
  6	"fmt"
  7	"log/slog"
  8	"maps"
  9	"net/http"
 10	"net/url"
 11	"os"
 12	"slices"
 13	"strings"
 14	"time"
 15
 16	"github.com/charmbracelet/catwalk/pkg/catwalk"
 17	hyperp "github.com/charmbracelet/crush/internal/agent/hyper"
 18	"github.com/charmbracelet/crush/internal/csync"
 19	"github.com/charmbracelet/crush/internal/env"
 20	"github.com/charmbracelet/crush/internal/oauth"
 21	"github.com/charmbracelet/crush/internal/oauth/claude"
 22	"github.com/charmbracelet/crush/internal/oauth/copilot"
 23	"github.com/charmbracelet/crush/internal/oauth/hyper"
 24	"github.com/invopop/jsonschema"
 25	"github.com/tidwall/gjson"
 26	"github.com/tidwall/sjson"
 27)
 28
 29const (
 30	appName              = "crush"
 31	defaultDataDirectory = ".crush"
 32	defaultInitializeAs  = "AGENTS.md"
 33)
 34
 35var defaultContextPaths = []string{
 36	".github/copilot-instructions.md",
 37	".cursorrules",
 38	".cursor/rules/",
 39	"CLAUDE.md",
 40	"CLAUDE.local.md",
 41	"GEMINI.md",
 42	"gemini.md",
 43	"crush.md",
 44	"crush.local.md",
 45	"Crush.md",
 46	"Crush.local.md",
 47	"CRUSH.md",
 48	"CRUSH.local.md",
 49	"AGENTS.md",
 50	"agents.md",
 51	"Agents.md",
 52}
 53
 54type SelectedModelType string
 55
 56const (
 57	SelectedModelTypeLarge SelectedModelType = "large"
 58	SelectedModelTypeSmall SelectedModelType = "small"
 59)
 60
 61const (
 62	AgentCoder string = "coder"
 63	AgentTask  string = "task"
 64)
 65
 66type SelectedModel struct {
 67	// The model id as used by the provider API.
 68	// Required.
 69	Model string `json:"model" jsonschema:"required,description=The model ID as used by the provider API,example=gpt-4o"`
 70	// The model provider, same as the key/id used in the providers config.
 71	// Required.
 72	Provider string `json:"provider" jsonschema:"required,description=The model provider ID that matches a key in the providers config,example=openai"`
 73
 74	// Only used by models that use the openai provider and need this set.
 75	ReasoningEffort string `json:"reasoning_effort,omitempty" jsonschema:"description=Reasoning effort level for OpenAI models that support it,enum=low,enum=medium,enum=high"`
 76
 77	// Used by anthropic models that can reason to indicate if the model should think.
 78	Think bool `json:"think,omitempty" jsonschema:"description=Enable thinking mode for Anthropic models that support reasoning"`
 79
 80	// Overrides the default model configuration.
 81	MaxTokens        int64    `json:"max_tokens,omitempty" jsonschema:"description=Maximum number of tokens for model responses,maximum=200000,example=4096"`
 82	Temperature      *float64 `json:"temperature,omitempty" jsonschema:"description=Sampling temperature,minimum=0,maximum=1,example=0.7"`
 83	TopP             *float64 `json:"top_p,omitempty" jsonschema:"description=Top-p (nucleus) sampling parameter,minimum=0,maximum=1,example=0.9"`
 84	TopK             *int64   `json:"top_k,omitempty" jsonschema:"description=Top-k sampling parameter"`
 85	FrequencyPenalty *float64 `json:"frequency_penalty,omitempty" jsonschema:"description=Frequency penalty to reduce repetition"`
 86	PresencePenalty  *float64 `json:"presence_penalty,omitempty" jsonschema:"description=Presence penalty to increase topic diversity"`
 87
 88	// Override provider specific options.
 89	ProviderOptions map[string]any `json:"provider_options,omitempty" jsonschema:"description=Additional provider-specific options for the model"`
 90}
 91
 92type ProviderConfig struct {
 93	// The provider's id.
 94	ID string `json:"id,omitempty" jsonschema:"description=Unique identifier for the provider,example=openai"`
 95	// The provider's name, used for display purposes.
 96	Name string `json:"name,omitempty" jsonschema:"description=Human-readable name for the provider,example=OpenAI"`
 97	// The provider's API endpoint.
 98	BaseURL string `json:"base_url,omitempty" jsonschema:"description=Base URL for the provider's API,format=uri,example=https://api.openai.com/v1"`
 99	// The provider type, e.g. "openai", "anthropic", etc. if empty it defaults to openai.
100	Type catwalk.Type `json:"type,omitempty" jsonschema:"description=Provider type that determines the API format,enum=openai,enum=openai-compat,enum=anthropic,enum=gemini,enum=azure,enum=vertexai,default=openai"`
101	// The provider's API key.
102	APIKey string `json:"api_key,omitempty" jsonschema:"description=API key for authentication with the provider,example=$OPENAI_API_KEY"`
103	// The original API key template before resolution (for re-resolution on auth errors).
104	APIKeyTemplate string `json:"-"`
105	// OAuthToken for providers that use OAuth2 authentication.
106	OAuthToken *oauth.Token `json:"oauth,omitempty" jsonschema:"description=OAuth2 token for authentication with the provider"`
107	// Marks the provider as disabled.
108	Disable bool `json:"disable,omitempty" jsonschema:"description=Whether this provider is disabled,default=false"`
109
110	// Custom system prompt prefix.
111	SystemPromptPrefix string `json:"system_prompt_prefix,omitempty" jsonschema:"description=Custom prefix to add to system prompts for this provider"`
112
113	// Extra headers to send with each request to the provider.
114	ExtraHeaders map[string]string `json:"extra_headers,omitempty" jsonschema:"description=Additional HTTP headers to send with requests"`
115	// Extra body
116	ExtraBody map[string]any `json:"extra_body,omitempty" jsonschema:"description=Additional fields to include in request bodies, only works with openai-compatible providers"`
117
118	ProviderOptions map[string]any `json:"provider_options,omitempty" jsonschema:"description=Additional provider-specific options for this provider"`
119
120	// Used to pass extra parameters to the provider.
121	ExtraParams map[string]string `json:"-"`
122
123	// The provider models
124	Models []catwalk.Model `json:"models,omitempty" jsonschema:"description=List of models available from this provider"`
125}
126
127func (pc *ProviderConfig) SetupClaudeCode() {
128	pc.SystemPromptPrefix = "You are Claude Code, Anthropic's official CLI for Claude."
129	pc.ExtraHeaders["anthropic-version"] = "2023-06-01"
130
131	value := pc.ExtraHeaders["anthropic-beta"]
132	const want = "oauth-2025-04-20"
133	if !strings.Contains(value, want) {
134		if value != "" {
135			value += ","
136		}
137		value += want
138	}
139	pc.ExtraHeaders["anthropic-beta"] = value
140}
141
142func (pc *ProviderConfig) SetupGitHubCopilot() {
143	maps.Copy(pc.ExtraHeaders, copilot.Headers())
144}
145
146type MCPType string
147
148const (
149	MCPStdio MCPType = "stdio"
150	MCPSSE   MCPType = "sse"
151	MCPHttp  MCPType = "http"
152)
153
154type MCPConfig struct {
155	Command       string            `json:"command,omitempty" jsonschema:"description=Command to execute for stdio MCP servers,example=npx"`
156	Env           map[string]string `json:"env,omitempty" jsonschema:"description=Environment variables to set for the MCP server"`
157	Args          []string          `json:"args,omitempty" jsonschema:"description=Arguments to pass to the MCP server command"`
158	Type          MCPType           `json:"type" jsonschema:"required,description=Type of MCP connection,enum=stdio,enum=sse,enum=http,default=stdio"`
159	URL           string            `json:"url,omitempty" jsonschema:"description=URL for HTTP or SSE MCP servers,format=uri,example=http://localhost:3000/mcp"`
160	Disabled      bool              `json:"disabled,omitempty" jsonschema:"description=Whether this MCP server is disabled,default=false"`
161	DisabledTools []string          `json:"disabled_tools,omitempty" jsonschema:"description=List of tools from this MCP server to disable,example=get-library-doc"`
162	Timeout       int               `json:"timeout,omitempty" jsonschema:"description=Timeout in seconds for MCP server connections,default=15,example=30,example=60,example=120"`
163
164	// TODO: maybe make it possible to get the value from the env
165	Headers map[string]string `json:"headers,omitempty" jsonschema:"description=HTTP headers for HTTP/SSE MCP servers"`
166}
167
168type LSPConfig struct {
169	Disabled    bool              `json:"disabled,omitempty" jsonschema:"description=Whether this LSP server is disabled,default=false"`
170	Command     string            `json:"command,omitempty" jsonschema:"required,description=Command to execute for the LSP server,example=gopls"`
171	Args        []string          `json:"args,omitempty" jsonschema:"description=Arguments to pass to the LSP server command"`
172	Env         map[string]string `json:"env,omitempty" jsonschema:"description=Environment variables to set to the LSP server command"`
173	FileTypes   []string          `json:"filetypes,omitempty" jsonschema:"description=File types this LSP server handles,example=go,example=mod,example=rs,example=c,example=js,example=ts"`
174	RootMarkers []string          `json:"root_markers,omitempty" jsonschema:"description=Files or directories that indicate the project root,example=go.mod,example=package.json,example=Cargo.toml"`
175	InitOptions map[string]any    `json:"init_options,omitempty" jsonschema:"description=Initialization options passed to the LSP server during initialize request"`
176	Options     map[string]any    `json:"options,omitempty" jsonschema:"description=LSP server-specific settings passed during initialization"`
177}
178
179type TUIOptions struct {
180	CompactMode bool   `json:"compact_mode,omitempty" jsonschema:"description=Enable compact mode for the TUI interface,default=false"`
181	DiffMode    string `json:"diff_mode,omitempty" jsonschema:"description=Diff mode for the TUI interface,enum=unified,enum=split"`
182	// Here we can add themes later or any TUI related options
183	//
184
185	Completions Completions `json:"completions,omitzero" jsonschema:"description=Completions UI options"`
186}
187
188// Completions defines options for the completions UI.
189type Completions struct {
190	MaxDepth *int `json:"max_depth,omitempty" jsonschema:"description=Maximum depth for the ls tool,default=0,example=10"`
191	MaxItems *int `json:"max_items,omitempty" jsonschema:"description=Maximum number of items to return for the ls tool,default=1000,example=100"`
192}
193
194func (c Completions) Limits() (depth, items int) {
195	return ptrValOr(c.MaxDepth, 0), ptrValOr(c.MaxItems, 0)
196}
197
198type Permissions struct {
199	AllowedTools []string `json:"allowed_tools,omitempty" jsonschema:"description=List of tools that don't require permission prompts,example=bash,example=view"` // Tools that don't require permission prompts
200	SkipRequests bool     `json:"-"`                                                                                                                              // Automatically accept all permissions (YOLO mode)
201}
202
203type TrailerStyle string
204
205const (
206	TrailerStyleNone         TrailerStyle = "none"
207	TrailerStyleCoAuthoredBy TrailerStyle = "co-authored-by"
208	TrailerStyleAssistedBy   TrailerStyle = "assisted-by"
209)
210
211type Attribution struct {
212	TrailerStyle  TrailerStyle `json:"trailer_style,omitempty" jsonschema:"description=Style of attribution trailer to add to commits,enum=none,enum=co-authored-by,enum=assisted-by,default=assisted-by"`
213	CoAuthoredBy  *bool        `json:"co_authored_by,omitempty" jsonschema:"description=Deprecated: use trailer_style instead"`
214	GeneratedWith bool         `json:"generated_with,omitempty" jsonschema:"description=Add Generated with Crush line to commit messages and issues and PRs,default=true"`
215}
216
217// JSONSchemaExtend marks the co_authored_by field as deprecated in the schema.
218func (Attribution) JSONSchemaExtend(schema *jsonschema.Schema) {
219	if schema.Properties != nil {
220		if prop, ok := schema.Properties.Get("co_authored_by"); ok {
221			prop.Deprecated = true
222		}
223	}
224}
225
226type Options struct {
227	ContextPaths              []string     `json:"context_paths,omitempty" jsonschema:"description=Paths to files containing context information for the AI,example=.cursorrules,example=CRUSH.md"`
228	TUI                       *TUIOptions  `json:"tui,omitempty" jsonschema:"description=Terminal user interface options"`
229	Debug                     bool         `json:"debug,omitempty" jsonschema:"description=Enable debug logging,default=false"`
230	DebugLSP                  bool         `json:"debug_lsp,omitempty" jsonschema:"description=Enable debug logging for LSP servers,default=false"`
231	DisableAutoSummarize      bool         `json:"disable_auto_summarize,omitempty" jsonschema:"description=Disable automatic conversation summarization,default=false"`
232	DataDirectory             string       `json:"data_directory,omitempty" jsonschema:"description=Directory for storing application data (relative to working directory),default=.crush,example=.crush"` // Relative to the cwd
233	DisabledTools             []string     `json:"disabled_tools,omitempty" jsonschema:"description=List of built-in tools to disable and hide from the agent,example=bash,example=sourcegraph"`
234	DisableProviderAutoUpdate bool         `json:"disable_provider_auto_update,omitempty" jsonschema:"description=Disable providers auto-update,default=false"`
235	Attribution               *Attribution `json:"attribution,omitempty" jsonschema:"description=Attribution settings for generated content"`
236	DisableMetrics            bool         `json:"disable_metrics,omitempty" jsonschema:"description=Disable sending metrics,default=false"`
237	InitializeAs              string       `json:"initialize_as,omitempty" jsonschema:"description=Name of the context file to create/update during project initialization,default=AGENTS.md,example=AGENTS.md,example=CRUSH.md,example=CLAUDE.md,example=docs/LLMs.md"`
238}
239
240type MCPs map[string]MCPConfig
241
242type MCP struct {
243	Name string    `json:"name"`
244	MCP  MCPConfig `json:"mcp"`
245}
246
247func (m MCPs) Sorted() []MCP {
248	sorted := make([]MCP, 0, len(m))
249	for k, v := range m {
250		sorted = append(sorted, MCP{
251			Name: k,
252			MCP:  v,
253		})
254	}
255	slices.SortFunc(sorted, func(a, b MCP) int {
256		return strings.Compare(a.Name, b.Name)
257	})
258	return sorted
259}
260
261type LSPs map[string]LSPConfig
262
263type LSP struct {
264	Name string    `json:"name"`
265	LSP  LSPConfig `json:"lsp"`
266}
267
268func (l LSPs) Sorted() []LSP {
269	sorted := make([]LSP, 0, len(l))
270	for k, v := range l {
271		sorted = append(sorted, LSP{
272			Name: k,
273			LSP:  v,
274		})
275	}
276	slices.SortFunc(sorted, func(a, b LSP) int {
277		return strings.Compare(a.Name, b.Name)
278	})
279	return sorted
280}
281
282func (l LSPConfig) ResolvedEnv() []string {
283	return resolveEnvs(l.Env)
284}
285
286func (m MCPConfig) ResolvedEnv() []string {
287	return resolveEnvs(m.Env)
288}
289
290func (m MCPConfig) ResolvedHeaders() map[string]string {
291	resolver := NewShellVariableResolver(env.New())
292	for e, v := range m.Headers {
293		var err error
294		m.Headers[e], err = resolver.ResolveValue(v)
295		if err != nil {
296			slog.Error("error resolving header variable", "error", err, "variable", e, "value", v)
297			continue
298		}
299	}
300	return m.Headers
301}
302
303type Agent struct {
304	ID          string `json:"id,omitempty"`
305	Name        string `json:"name,omitempty"`
306	Description string `json:"description,omitempty"`
307	// This is the id of the system prompt used by the agent
308	Disabled bool `json:"disabled,omitempty"`
309
310	Model SelectedModelType `json:"model" jsonschema:"required,description=The model type to use for this agent,enum=large,enum=small,default=large"`
311
312	// The available tools for the agent
313	//  if this is nil, all tools are available
314	AllowedTools []string `json:"allowed_tools,omitempty"`
315
316	// this tells us which MCPs are available for this agent
317	//  if this is empty all mcps are available
318	//  the string array is the list of tools from the AllowedMCP the agent has available
319	//  if the string array is nil, all tools from the AllowedMCP are available
320	AllowedMCP map[string][]string `json:"allowed_mcp,omitempty"`
321
322	// Overrides the context paths for this agent
323	ContextPaths []string `json:"context_paths,omitempty"`
324}
325
326type Tools struct {
327	Ls ToolLs `json:"ls,omitzero"`
328}
329
330type ToolLs struct {
331	MaxDepth *int `json:"max_depth,omitempty" jsonschema:"description=Maximum depth for the ls tool,default=0,example=10"`
332	MaxItems *int `json:"max_items,omitempty" jsonschema:"description=Maximum number of items to return for the ls tool,default=1000,example=100"`
333}
334
335func (t ToolLs) Limits() (depth, items int) {
336	return ptrValOr(t.MaxDepth, 0), ptrValOr(t.MaxItems, 0)
337}
338
339// Config holds the configuration for crush.
340type Config struct {
341	Schema string `json:"$schema,omitempty"`
342
343	// We currently only support large/small as values here.
344	Models map[SelectedModelType]SelectedModel `json:"models,omitempty" jsonschema:"description=Model configurations for different model types,example={\"large\":{\"model\":\"gpt-4o\",\"provider\":\"openai\"}}"`
345	// Recently used models stored in the data directory config.
346	RecentModels map[SelectedModelType][]SelectedModel `json:"recent_models,omitempty" jsonschema:"description=Recently used models sorted by most recent first"`
347
348	// The providers that are configured
349	Providers *csync.Map[string, ProviderConfig] `json:"providers,omitempty" jsonschema:"description=AI provider configurations"`
350
351	MCP MCPs `json:"mcp,omitempty" jsonschema:"description=Model Context Protocol server configurations"`
352
353	LSP LSPs `json:"lsp,omitempty" jsonschema:"description=Language Server Protocol configurations"`
354
355	Options *Options `json:"options,omitempty" jsonschema:"description=General application options"`
356
357	Permissions *Permissions `json:"permissions,omitempty" jsonschema:"description=Permission settings for tool usage"`
358
359	Tools Tools `json:"tools,omitzero" jsonschema:"description=Tool configurations"`
360
361	Agents map[string]Agent `json:"-"`
362
363	// Internal
364	workingDir string `json:"-"`
365	// TODO: find a better way to do this this should probably not be part of the config
366	resolver       VariableResolver
367	dataConfigDir  string             `json:"-"`
368	knownProviders []catwalk.Provider `json:"-"`
369}
370
371func (c *Config) WorkingDir() string {
372	return c.workingDir
373}
374
375func (c *Config) EnabledProviders() []ProviderConfig {
376	var enabled []ProviderConfig
377	for p := range c.Providers.Seq() {
378		if !p.Disable {
379			enabled = append(enabled, p)
380		}
381	}
382	return enabled
383}
384
385// IsConfigured  return true if at least one provider is configured
386func (c *Config) IsConfigured() bool {
387	return len(c.EnabledProviders()) > 0
388}
389
390func (c *Config) GetModel(provider, model string) *catwalk.Model {
391	if providerConfig, ok := c.Providers.Get(provider); ok {
392		for _, m := range providerConfig.Models {
393			if m.ID == model {
394				return &m
395			}
396		}
397	}
398	return nil
399}
400
401func (c *Config) GetProviderForModel(modelType SelectedModelType) *ProviderConfig {
402	model, ok := c.Models[modelType]
403	if !ok {
404		return nil
405	}
406	if providerConfig, ok := c.Providers.Get(model.Provider); ok {
407		return &providerConfig
408	}
409	return nil
410}
411
412func (c *Config) GetModelByType(modelType SelectedModelType) *catwalk.Model {
413	model, ok := c.Models[modelType]
414	if !ok {
415		return nil
416	}
417	return c.GetModel(model.Provider, model.Model)
418}
419
420func (c *Config) LargeModel() *catwalk.Model {
421	model, ok := c.Models[SelectedModelTypeLarge]
422	if !ok {
423		return nil
424	}
425	return c.GetModel(model.Provider, model.Model)
426}
427
428func (c *Config) SmallModel() *catwalk.Model {
429	model, ok := c.Models[SelectedModelTypeSmall]
430	if !ok {
431		return nil
432	}
433	return c.GetModel(model.Provider, model.Model)
434}
435
436func (c *Config) SetCompactMode(enabled bool) error {
437	if c.Options == nil {
438		c.Options = &Options{}
439	}
440	c.Options.TUI.CompactMode = enabled
441	return c.SetConfigField("options.tui.compact_mode", enabled)
442}
443
444func (c *Config) Resolve(key string) (string, error) {
445	if c.resolver == nil {
446		return "", fmt.Errorf("no variable resolver configured")
447	}
448	return c.resolver.ResolveValue(key)
449}
450
451func (c *Config) UpdatePreferredModel(modelType SelectedModelType, model SelectedModel) error {
452	c.Models[modelType] = model
453	if err := c.SetConfigField(fmt.Sprintf("models.%s", modelType), model); err != nil {
454		return fmt.Errorf("failed to update preferred model: %w", err)
455	}
456	if err := c.recordRecentModel(modelType, model); err != nil {
457		return err
458	}
459	return nil
460}
461
462func (c *Config) HasConfigField(key string) bool {
463	data, err := os.ReadFile(c.dataConfigDir)
464	if err != nil {
465		return false
466	}
467	return gjson.Get(string(data), key).Exists()
468}
469
470func (c *Config) SetConfigField(key string, value any) error {
471	// read the data
472	data, err := os.ReadFile(c.dataConfigDir)
473	if err != nil {
474		if os.IsNotExist(err) {
475			data = []byte("{}")
476		} else {
477			return fmt.Errorf("failed to read config file: %w", err)
478		}
479	}
480
481	newValue, err := sjson.Set(string(data), key, value)
482	if err != nil {
483		return fmt.Errorf("failed to set config field %s: %w", key, err)
484	}
485	if err := os.WriteFile(c.dataConfigDir, []byte(newValue), 0o600); err != nil {
486		return fmt.Errorf("failed to write config file: %w", err)
487	}
488	return nil
489}
490
491// RefreshOAuthToken refreshes the OAuth token for the given provider.
492func (c *Config) RefreshOAuthToken(ctx context.Context, providerID string) error {
493	providerConfig, exists := c.Providers.Get(providerID)
494	if !exists {
495		return fmt.Errorf("provider %s not found", providerID)
496	}
497
498	if providerConfig.OAuthToken == nil {
499		return fmt.Errorf("provider %s does not have an OAuth token", providerID)
500	}
501
502	var newToken *oauth.Token
503	var refreshErr error
504	switch providerID {
505	case string(catwalk.InferenceProviderAnthropic):
506		newToken, refreshErr = claude.RefreshToken(ctx, providerConfig.OAuthToken.RefreshToken)
507	case string(catwalk.InferenceProviderCopilot):
508		newToken, refreshErr = copilot.RefreshToken(ctx, providerConfig.OAuthToken.RefreshToken)
509	case hyperp.Name:
510		newToken, refreshErr = hyper.ExchangeToken(ctx, providerConfig.OAuthToken.RefreshToken)
511	default:
512		return fmt.Errorf("OAuth refresh not supported for provider %s", providerID)
513	}
514	if refreshErr != nil {
515		return fmt.Errorf("failed to refresh OAuth token for provider %s: %w", providerID, refreshErr)
516	}
517
518	slog.Info("Successfully refreshed OAuth token", "provider", providerID)
519	providerConfig.OAuthToken = newToken
520
521	switch providerID {
522	case string(catwalk.InferenceProviderAnthropic):
523		providerConfig.APIKey = fmt.Sprintf("Bearer %s", newToken.AccessToken)
524		providerConfig.SetupClaudeCode()
525	case string(catwalk.InferenceProviderCopilot):
526		providerConfig.APIKey = newToken.AccessToken
527		providerConfig.SetupGitHubCopilot()
528	}
529
530	c.Providers.Set(providerID, providerConfig)
531
532	if err := cmp.Or(
533		c.SetConfigField(fmt.Sprintf("providers.%s.api_key", providerID), newToken.AccessToken),
534		c.SetConfigField(fmt.Sprintf("providers.%s.oauth", providerID), newToken),
535	); err != nil {
536		return fmt.Errorf("failed to persist refreshed token: %w", err)
537	}
538
539	return nil
540}
541
542func (c *Config) SetProviderAPIKey(providerID string, apiKey any) error {
543	var providerConfig ProviderConfig
544	var exists bool
545	var setKeyOrToken func()
546
547	switch v := apiKey.(type) {
548	case string:
549		if err := c.SetConfigField(fmt.Sprintf("providers.%s.api_key", providerID), v); err != nil {
550			return fmt.Errorf("failed to save api key to config file: %w", err)
551		}
552		setKeyOrToken = func() { providerConfig.APIKey = v }
553	case *oauth.Token:
554		if err := cmp.Or(
555			c.SetConfigField(fmt.Sprintf("providers.%s.api_key", providerID), v.AccessToken),
556			c.SetConfigField(fmt.Sprintf("providers.%s.oauth", providerID), v),
557		); err != nil {
558			return err
559		}
560		setKeyOrToken = func() {
561			providerConfig.APIKey = v.AccessToken
562			providerConfig.OAuthToken = v
563			switch providerID {
564			case string(catwalk.InferenceProviderAnthropic):
565				providerConfig.APIKey = fmt.Sprintf("Bearer %s", v.AccessToken)
566				providerConfig.SetupClaudeCode()
567			case string(catwalk.InferenceProviderCopilot):
568				providerConfig.SetupGitHubCopilot()
569			}
570		}
571	}
572
573	providerConfig, exists = c.Providers.Get(providerID)
574	if exists {
575		setKeyOrToken()
576		c.Providers.Set(providerID, providerConfig)
577		return nil
578	}
579
580	var foundProvider *catwalk.Provider
581	for _, p := range c.knownProviders {
582		if string(p.ID) == providerID {
583			foundProvider = &p
584			break
585		}
586	}
587
588	if foundProvider != nil {
589		// Create new provider config based on known provider
590		providerConfig = ProviderConfig{
591			ID:           providerID,
592			Name:         foundProvider.Name,
593			BaseURL:      foundProvider.APIEndpoint,
594			Type:         foundProvider.Type,
595			Disable:      false,
596			ExtraHeaders: make(map[string]string),
597			ExtraParams:  make(map[string]string),
598			Models:       foundProvider.Models,
599		}
600		setKeyOrToken()
601	} else {
602		return fmt.Errorf("provider with ID %s not found in known providers", providerID)
603	}
604	// Store the updated provider config
605	c.Providers.Set(providerID, providerConfig)
606	return nil
607}
608
609const maxRecentModelsPerType = 5
610
611func (c *Config) recordRecentModel(modelType SelectedModelType, model SelectedModel) error {
612	if model.Provider == "" || model.Model == "" {
613		return nil
614	}
615
616	if c.RecentModels == nil {
617		c.RecentModels = make(map[SelectedModelType][]SelectedModel)
618	}
619
620	eq := func(a, b SelectedModel) bool {
621		return a.Provider == b.Provider && a.Model == b.Model
622	}
623
624	entry := SelectedModel{
625		Provider: model.Provider,
626		Model:    model.Model,
627	}
628
629	current := c.RecentModels[modelType]
630	withoutCurrent := slices.DeleteFunc(slices.Clone(current), func(existing SelectedModel) bool {
631		return eq(existing, entry)
632	})
633
634	updated := append([]SelectedModel{entry}, withoutCurrent...)
635	if len(updated) > maxRecentModelsPerType {
636		updated = updated[:maxRecentModelsPerType]
637	}
638
639	if slices.EqualFunc(current, updated, eq) {
640		return nil
641	}
642
643	c.RecentModels[modelType] = updated
644
645	if err := c.SetConfigField(fmt.Sprintf("recent_models.%s", modelType), updated); err != nil {
646		return fmt.Errorf("failed to persist recent models: %w", err)
647	}
648
649	return nil
650}
651
652func allToolNames() []string {
653	return []string{
654		"agent",
655		"bash",
656		"job_output",
657		"job_kill",
658		"download",
659		"edit",
660		"multiedit",
661		"lsp_diagnostics",
662		"lsp_references",
663		"fetch",
664		"agentic_fetch",
665		"glob",
666		"grep",
667		"ls",
668		"sourcegraph",
669		"todos",
670		"view",
671		"write",
672	}
673}
674
675func resolveAllowedTools(allTools []string, disabledTools []string) []string {
676	if disabledTools == nil {
677		return allTools
678	}
679	// filter out disabled tools (exclude mode)
680	return filterSlice(allTools, disabledTools, false)
681}
682
683func resolveReadOnlyTools(tools []string) []string {
684	readOnlyTools := []string{"glob", "grep", "ls", "sourcegraph", "view"}
685	// filter to only include tools that are in allowedtools (include mode)
686	return filterSlice(tools, readOnlyTools, true)
687}
688
689func filterSlice(data []string, mask []string, include bool) []string {
690	filtered := []string{}
691	for _, s := range data {
692		// if include is true, we include items that ARE in the mask
693		// if include is false, we include items that are NOT in the mask
694		if include == slices.Contains(mask, s) {
695			filtered = append(filtered, s)
696		}
697	}
698	return filtered
699}
700
701func (c *Config) SetupAgents() {
702	allowedTools := resolveAllowedTools(allToolNames(), c.Options.DisabledTools)
703
704	agents := map[string]Agent{
705		AgentCoder: {
706			ID:           AgentCoder,
707			Name:         "Coder",
708			Description:  "An agent that helps with executing coding tasks.",
709			Model:        SelectedModelTypeLarge,
710			ContextPaths: c.Options.ContextPaths,
711			AllowedTools: allowedTools,
712		},
713
714		AgentTask: {
715			ID:           AgentCoder,
716			Name:         "Task",
717			Description:  "An agent that helps with searching for context and finding implementation details.",
718			Model:        SelectedModelTypeLarge,
719			ContextPaths: c.Options.ContextPaths,
720			AllowedTools: resolveReadOnlyTools(allowedTools),
721			// NO MCPs or LSPs by default
722			AllowedMCP: map[string][]string{},
723		},
724	}
725	c.Agents = agents
726}
727
728func (c *Config) Resolver() VariableResolver {
729	return c.resolver
730}
731
732func (c *ProviderConfig) TestConnection(resolver VariableResolver) error {
733	testURL := ""
734	headers := make(map[string]string)
735	apiKey, _ := resolver.ResolveValue(c.APIKey)
736	switch c.Type {
737	case catwalk.TypeOpenAI, catwalk.TypeOpenAICompat, catwalk.TypeOpenRouter:
738		baseURL, _ := resolver.ResolveValue(c.BaseURL)
739		if baseURL == "" {
740			baseURL = "https://api.openai.com/v1"
741		}
742		if c.ID == string(catwalk.InferenceProviderOpenRouter) {
743			testURL = baseURL + "/credits"
744		} else {
745			testURL = baseURL + "/models"
746		}
747		headers["Authorization"] = "Bearer " + apiKey
748	case catwalk.TypeAnthropic:
749		baseURL, _ := resolver.ResolveValue(c.BaseURL)
750		if baseURL == "" {
751			baseURL = "https://api.anthropic.com/v1"
752		}
753		testURL = baseURL + "/models"
754		// TODO: replace with const when catwalk is released
755		if c.ID == "kimi-coding" {
756			testURL = baseURL + "/v1/models"
757		}
758		headers["x-api-key"] = apiKey
759		headers["anthropic-version"] = "2023-06-01"
760	case catwalk.TypeGoogle:
761		baseURL, _ := resolver.ResolveValue(c.BaseURL)
762		if baseURL == "" {
763			baseURL = "https://generativelanguage.googleapis.com"
764		}
765		testURL = baseURL + "/v1beta/models?key=" + url.QueryEscape(apiKey)
766	}
767	ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
768	defer cancel()
769	client := &http.Client{}
770	req, err := http.NewRequestWithContext(ctx, "GET", testURL, nil)
771	if err != nil {
772		return fmt.Errorf("failed to create request for provider %s: %w", c.ID, err)
773	}
774	for k, v := range headers {
775		req.Header.Set(k, v)
776	}
777	for k, v := range c.ExtraHeaders {
778		req.Header.Set(k, v)
779	}
780	b, err := client.Do(req)
781	if err != nil {
782		return fmt.Errorf("failed to create request for provider %s: %w", c.ID, err)
783	}
784	if c.ID == string(catwalk.InferenceProviderZAI) {
785		if b.StatusCode == http.StatusUnauthorized {
786			// for z.ai just check if the http response is not 401
787			return fmt.Errorf("failed to connect to provider %s: %s", c.ID, b.Status)
788		}
789	} else {
790		if b.StatusCode != http.StatusOK {
791			return fmt.Errorf("failed to connect to provider %s: %s", c.ID, b.Status)
792		}
793	}
794	_ = b.Body.Close()
795	return nil
796}
797
798func resolveEnvs(envs map[string]string) []string {
799	resolver := NewShellVariableResolver(env.New())
800	for e, v := range envs {
801		var err error
802		envs[e], err = resolver.ResolveValue(v)
803		if err != nil {
804			slog.Error("error resolving environment variable", "error", err, "variable", e, "value", v)
805			continue
806		}
807	}
808
809	res := make([]string, 0, len(envs))
810	for k, v := range envs {
811		res = append(res, fmt.Sprintf("%s=%s", k, v))
812	}
813	return res
814}
815
816func ptrValOr[T any](t *T, el T) T {
817	if t == nil {
818		return el
819	}
820	return *t
821}