config.go

  1package config
  2
  3import (
  4	"cmp"
  5	"context"
  6	"fmt"
  7	"log/slog"
  8	"net/http"
  9	"net/url"
 10	"os"
 11	"slices"
 12	"strings"
 13	"time"
 14
 15	"github.com/charmbracelet/catwalk/pkg/catwalk"
 16	"github.com/charmbracelet/crush/internal/csync"
 17	"github.com/charmbracelet/crush/internal/env"
 18	"github.com/charmbracelet/crush/internal/oauth"
 19	"github.com/charmbracelet/crush/internal/oauth/claude"
 20	"github.com/invopop/jsonschema"
 21	"github.com/tidwall/sjson"
 22)
 23
 24const (
 25	appName              = "crush"
 26	defaultDataDirectory = ".crush"
 27	defaultInitializeAs  = "AGENTS.md"
 28)
 29
 30var defaultContextPaths = []string{
 31	".github/copilot-instructions.md",
 32	".cursorrules",
 33	".cursor/rules/",
 34	"CLAUDE.md",
 35	"CLAUDE.local.md",
 36	"GEMINI.md",
 37	"gemini.md",
 38	"crush.md",
 39	"crush.local.md",
 40	"Crush.md",
 41	"Crush.local.md",
 42	"CRUSH.md",
 43	"CRUSH.local.md",
 44	"AGENTS.md",
 45	"agents.md",
 46	"Agents.md",
 47}
 48
 49type SelectedModelType string
 50
 51const (
 52	SelectedModelTypeLarge SelectedModelType = "large"
 53	SelectedModelTypeSmall SelectedModelType = "small"
 54)
 55
 56const (
 57	AgentCoder string = "coder"
 58	AgentTask  string = "task"
 59)
 60
 61type SelectedModel struct {
 62	// The model id as used by the provider API.
 63	// Required.
 64	Model string `json:"model" jsonschema:"required,description=The model ID as used by the provider API,example=gpt-4o"`
 65	// The model provider, same as the key/id used in the providers config.
 66	// Required.
 67	Provider string `json:"provider" jsonschema:"required,description=The model provider ID that matches a key in the providers config,example=openai"`
 68
 69	// Only used by models that use the openai provider and need this set.
 70	ReasoningEffort string `json:"reasoning_effort,omitempty" jsonschema:"description=Reasoning effort level for OpenAI models that support it,enum=low,enum=medium,enum=high"`
 71
 72	// Used by anthropic models that can reason to indicate if the model should think.
 73	Think bool `json:"think,omitempty" jsonschema:"description=Enable thinking mode for Anthropic models that support reasoning"`
 74
 75	// Overrides the default model configuration.
 76	MaxTokens        int64    `json:"max_tokens,omitempty" jsonschema:"description=Maximum number of tokens for model responses,maximum=200000,example=4096"`
 77	Temperature      *float64 `json:"temperature,omitempty" jsonschema:"description=Sampling temperature,minimum=0,maximum=1,example=0.7"`
 78	TopP             *float64 `json:"top_p,omitempty" jsonschema:"description=Top-p (nucleus) sampling parameter,minimum=0,maximum=1,example=0.9"`
 79	TopK             *int64   `json:"top_k,omitempty" jsonschema:"description=Top-k sampling parameter"`
 80	FrequencyPenalty *float64 `json:"frequency_penalty,omitempty" jsonschema:"description=Frequency penalty to reduce repetition"`
 81	PresencePenalty  *float64 `json:"presence_penalty,omitempty" jsonschema:"description=Presence penalty to increase topic diversity"`
 82
 83	// Override provider specific options.
 84	ProviderOptions map[string]any `json:"provider_options,omitempty" jsonschema:"description=Additional provider-specific options for the model"`
 85}
 86
 87type ProviderConfig struct {
 88	// The provider's id.
 89	ID string `json:"id,omitempty" jsonschema:"description=Unique identifier for the provider,example=openai"`
 90	// The provider's name, used for display purposes.
 91	Name string `json:"name,omitempty" jsonschema:"description=Human-readable name for the provider,example=OpenAI"`
 92	// The provider's API endpoint.
 93	BaseURL string `json:"base_url,omitempty" jsonschema:"description=Base URL for the provider's API,format=uri,example=https://api.openai.com/v1"`
 94	// The provider type, e.g. "openai", "anthropic", etc. if empty it defaults to openai.
 95	Type catwalk.Type `json:"type,omitempty" jsonschema:"description=Provider type that determines the API format,enum=openai,enum=openai-compat,enum=anthropic,enum=gemini,enum=azure,enum=vertexai,default=openai"`
 96	// The provider's API key.
 97	APIKey string `json:"api_key,omitempty" jsonschema:"description=API key for authentication with the provider,example=$OPENAI_API_KEY"`
 98	// OAuthToken for providers that use OAuth2 authentication.
 99	OAuthToken *oauth.Token `json:"oauth,omitempty" jsonschema:"description=OAuth2 token for authentication with the provider"`
100	// Marks the provider as disabled.
101	Disable bool `json:"disable,omitempty" jsonschema:"description=Whether this provider is disabled,default=false"`
102
103	// Custom system prompt prefix.
104	SystemPromptPrefix string `json:"system_prompt_prefix,omitempty" jsonschema:"description=Custom prefix to add to system prompts for this provider"`
105
106	// Extra headers to send with each request to the provider.
107	ExtraHeaders map[string]string `json:"extra_headers,omitempty" jsonschema:"description=Additional HTTP headers to send with requests"`
108	// Extra body
109	ExtraBody map[string]any `json:"extra_body,omitempty" jsonschema:"description=Additional fields to include in request bodies, only works with openai-compatible providers"`
110
111	ProviderOptions map[string]any `json:"provider_options,omitempty" jsonschema:"description=Additional provider-specific options for this provider"`
112
113	// Used to pass extra parameters to the provider.
114	ExtraParams map[string]string `json:"-"`
115
116	// The provider models
117	Models []catwalk.Model `json:"models,omitempty" jsonschema:"description=List of models available from this provider"`
118}
119
120func (pc *ProviderConfig) SetupClaudeCode() {
121	pc.APIKey = fmt.Sprintf("Bearer %s", pc.OAuthToken.AccessToken)
122	pc.SystemPromptPrefix = "You are Claude Code, Anthropic's official CLI for Claude."
123	pc.ExtraHeaders["anthropic-version"] = "2023-06-01"
124
125	value := pc.ExtraHeaders["anthropic-beta"]
126	const want = "oauth-2025-04-20"
127	if !strings.Contains(value, want) {
128		if value != "" {
129			value += ","
130		}
131		value += want
132	}
133	pc.ExtraHeaders["anthropic-beta"] = value
134}
135
136type MCPType string
137
138const (
139	MCPStdio MCPType = "stdio"
140	MCPSSE   MCPType = "sse"
141	MCPHttp  MCPType = "http"
142)
143
144type MCPConfig struct {
145	Command  string            `json:"command,omitempty" jsonschema:"description=Command to execute for stdio MCP servers,example=npx"`
146	Env      map[string]string `json:"env,omitempty" jsonschema:"description=Environment variables to set for the MCP server"`
147	Args     []string          `json:"args,omitempty" jsonschema:"description=Arguments to pass to the MCP server command"`
148	Type     MCPType           `json:"type" jsonschema:"required,description=Type of MCP connection,enum=stdio,enum=sse,enum=http,default=stdio"`
149	URL      string            `json:"url,omitempty" jsonschema:"description=URL for HTTP or SSE MCP servers,format=uri,example=http://localhost:3000/mcp"`
150	Disabled bool              `json:"disabled,omitempty" jsonschema:"description=Whether this MCP server is disabled,default=false"`
151	Timeout  int               `json:"timeout,omitempty" jsonschema:"description=Timeout in seconds for MCP server connections,default=15,example=30,example=60,example=120"`
152
153	// TODO: maybe make it possible to get the value from the env
154	Headers map[string]string `json:"headers,omitempty" jsonschema:"description=HTTP headers for HTTP/SSE MCP servers"`
155}
156
157type LSPConfig struct {
158	Disabled    bool              `json:"disabled,omitempty" jsonschema:"description=Whether this LSP server is disabled,default=false"`
159	Command     string            `json:"command,omitempty" jsonschema:"required,description=Command to execute for the LSP server,example=gopls"`
160	Args        []string          `json:"args,omitempty" jsonschema:"description=Arguments to pass to the LSP server command"`
161	Env         map[string]string `json:"env,omitempty" jsonschema:"description=Environment variables to set to the LSP server command"`
162	FileTypes   []string          `json:"filetypes,omitempty" jsonschema:"description=File types this LSP server handles,example=go,example=mod,example=rs,example=c,example=js,example=ts"`
163	RootMarkers []string          `json:"root_markers,omitempty" jsonschema:"description=Files or directories that indicate the project root,example=go.mod,example=package.json,example=Cargo.toml"`
164	InitOptions map[string]any    `json:"init_options,omitempty" jsonschema:"description=Initialization options passed to the LSP server during initialize request"`
165	Options     map[string]any    `json:"options,omitempty" jsonschema:"description=LSP server-specific settings passed during initialization"`
166}
167
168type TUIOptions struct {
169	CompactMode bool   `json:"compact_mode,omitempty" jsonschema:"description=Enable compact mode for the TUI interface,default=false"`
170	DiffMode    string `json:"diff_mode,omitempty" jsonschema:"description=Diff mode for the TUI interface,enum=unified,enum=split"`
171	// Here we can add themes later or any TUI related options
172	//
173
174	Completions Completions `json:"completions,omitzero" jsonschema:"description=Completions UI options"`
175}
176
177// Completions defines options for the completions UI.
178type Completions struct {
179	MaxDepth *int `json:"max_depth,omitempty" jsonschema:"description=Maximum depth for the ls tool,default=0,example=10"`
180	MaxItems *int `json:"max_items,omitempty" jsonschema:"description=Maximum number of items to return for the ls tool,default=1000,example=100"`
181}
182
183func (c Completions) Limits() (depth, items int) {
184	return ptrValOr(c.MaxDepth, 0), ptrValOr(c.MaxItems, 0)
185}
186
187type Permissions struct {
188	AllowedTools []string `json:"allowed_tools,omitempty" jsonschema:"description=List of tools that don't require permission prompts,example=bash,example=view"` // Tools that don't require permission prompts
189	SkipRequests bool     `json:"-"`                                                                                                                              // Automatically accept all permissions (YOLO mode)
190}
191
192type TrailerStyle string
193
194const (
195	TrailerStyleNone         TrailerStyle = "none"
196	TrailerStyleCoAuthoredBy TrailerStyle = "co-authored-by"
197	TrailerStyleAssistedBy   TrailerStyle = "assisted-by"
198)
199
200type Attribution struct {
201	TrailerStyle  TrailerStyle `json:"trailer_style,omitempty" jsonschema:"description=Style of attribution trailer to add to commits,enum=none,enum=co-authored-by,enum=assisted-by,default=assisted-by"`
202	CoAuthoredBy  *bool        `json:"co_authored_by,omitempty" jsonschema:"description=Deprecated: use trailer_style instead"`
203	GeneratedWith bool         `json:"generated_with,omitempty" jsonschema:"description=Add Generated with Crush line to commit messages and issues and PRs,default=true"`
204}
205
206// JSONSchemaExtend marks the co_authored_by field as deprecated in the schema.
207func (Attribution) JSONSchemaExtend(schema *jsonschema.Schema) {
208	if schema.Properties != nil {
209		if prop, ok := schema.Properties.Get("co_authored_by"); ok {
210			prop.Deprecated = true
211		}
212	}
213}
214
215type Options struct {
216	ContextPaths              []string     `json:"context_paths,omitempty" jsonschema:"description=Paths to files containing context information for the AI,example=.cursorrules,example=CRUSH.md"`
217	TUI                       *TUIOptions  `json:"tui,omitempty" jsonschema:"description=Terminal user interface options"`
218	Debug                     bool         `json:"debug,omitempty" jsonschema:"description=Enable debug logging,default=false"`
219	DebugLSP                  bool         `json:"debug_lsp,omitempty" jsonschema:"description=Enable debug logging for LSP servers,default=false"`
220	DisableAutoSummarize      bool         `json:"disable_auto_summarize,omitempty" jsonschema:"description=Disable automatic conversation summarization,default=false"`
221	DataDirectory             string       `json:"data_directory,omitempty" jsonschema:"description=Directory for storing application data (relative to working directory),default=.crush,example=.crush"` // Relative to the cwd
222	DisabledTools             []string     `json:"disabled_tools" jsonschema:"description=Tools to disable"`
223	DisableProviderAutoUpdate bool         `json:"disable_provider_auto_update,omitempty" jsonschema:"description=Disable providers auto-update,default=false"`
224	DisableAutoUpdate         bool         `json:"disable_auto_update,omitempty" jsonschema:"description=Disable automatic update checks,default=false"`
225	Attribution               *Attribution `json:"attribution,omitempty" jsonschema:"description=Attribution settings for generated content"`
226	DisableMetrics            bool         `json:"disable_metrics,omitempty" jsonschema:"description=Disable sending metrics,default=false"`
227	InitializeAs              string       `json:"initialize_as,omitempty" jsonschema:"description=Name of the context file to create/update during project initialization,default=AGENTS.md,example=AGENTS.md,example=CRUSH.md,example=CLAUDE.md,example=docs/LLMs.md"`
228}
229
230type MCPs map[string]MCPConfig
231
232type MCP struct {
233	Name string    `json:"name"`
234	MCP  MCPConfig `json:"mcp"`
235}
236
237func (m MCPs) Sorted() []MCP {
238	sorted := make([]MCP, 0, len(m))
239	for k, v := range m {
240		sorted = append(sorted, MCP{
241			Name: k,
242			MCP:  v,
243		})
244	}
245	slices.SortFunc(sorted, func(a, b MCP) int {
246		return strings.Compare(a.Name, b.Name)
247	})
248	return sorted
249}
250
251type LSPs map[string]LSPConfig
252
253type LSP struct {
254	Name string    `json:"name"`
255	LSP  LSPConfig `json:"lsp"`
256}
257
258func (l LSPs) Sorted() []LSP {
259	sorted := make([]LSP, 0, len(l))
260	for k, v := range l {
261		sorted = append(sorted, LSP{
262			Name: k,
263			LSP:  v,
264		})
265	}
266	slices.SortFunc(sorted, func(a, b LSP) int {
267		return strings.Compare(a.Name, b.Name)
268	})
269	return sorted
270}
271
272func (l LSPConfig) ResolvedEnv() []string {
273	return resolveEnvs(l.Env)
274}
275
276func (m MCPConfig) ResolvedEnv() []string {
277	return resolveEnvs(m.Env)
278}
279
280func (m MCPConfig) ResolvedHeaders() map[string]string {
281	resolver := NewShellVariableResolver(env.New())
282	for e, v := range m.Headers {
283		var err error
284		m.Headers[e], err = resolver.ResolveValue(v)
285		if err != nil {
286			slog.Error("error resolving header variable", "error", err, "variable", e, "value", v)
287			continue
288		}
289	}
290	return m.Headers
291}
292
293type Agent struct {
294	ID          string `json:"id,omitempty"`
295	Name        string `json:"name,omitempty"`
296	Description string `json:"description,omitempty"`
297	// This is the id of the system prompt used by the agent
298	Disabled bool `json:"disabled,omitempty"`
299
300	Model SelectedModelType `json:"model" jsonschema:"required,description=The model type to use for this agent,enum=large,enum=small,default=large"`
301
302	// The available tools for the agent
303	//  if this is nil, all tools are available
304	AllowedTools []string `json:"allowed_tools,omitempty"`
305
306	// this tells us which MCPs are available for this agent
307	//  if this is empty all mcps are available
308	//  the string array is the list of tools from the AllowedMCP the agent has available
309	//  if the string array is nil, all tools from the AllowedMCP are available
310	AllowedMCP map[string][]string `json:"allowed_mcp,omitempty"`
311
312	// Overrides the context paths for this agent
313	ContextPaths []string `json:"context_paths,omitempty"`
314}
315
316type Tools struct {
317	Ls ToolLs `json:"ls,omitzero"`
318}
319
320type ToolLs struct {
321	MaxDepth *int `json:"max_depth,omitempty" jsonschema:"description=Maximum depth for the ls tool,default=0,example=10"`
322	MaxItems *int `json:"max_items,omitempty" jsonschema:"description=Maximum number of items to return for the ls tool,default=1000,example=100"`
323}
324
325func (t ToolLs) Limits() (depth, items int) {
326	return ptrValOr(t.MaxDepth, 0), ptrValOr(t.MaxItems, 0)
327}
328
329// Config holds the configuration for crush.
330type Config struct {
331	Schema string `json:"$schema,omitempty"`
332
333	// We currently only support large/small as values here.
334	Models map[SelectedModelType]SelectedModel `json:"models,omitempty" jsonschema:"description=Model configurations for different model types,example={\"large\":{\"model\":\"gpt-4o\",\"provider\":\"openai\"}}"`
335	// Recently used models stored in the data directory config.
336	RecentModels map[SelectedModelType][]SelectedModel `json:"recent_models,omitempty" jsonschema:"description=Recently used models sorted by most recent first"`
337
338	// The providers that are configured
339	Providers *csync.Map[string, ProviderConfig] `json:"providers,omitempty" jsonschema:"description=AI provider configurations"`
340
341	MCP MCPs `json:"mcp,omitempty" jsonschema:"description=Model Context Protocol server configurations"`
342
343	LSP LSPs `json:"lsp,omitempty" jsonschema:"description=Language Server Protocol configurations"`
344
345	Options *Options `json:"options,omitempty" jsonschema:"description=General application options"`
346
347	Permissions *Permissions `json:"permissions,omitempty" jsonschema:"description=Permission settings for tool usage"`
348
349	Tools Tools `json:"tools,omitzero" jsonschema:"description=Tool configurations"`
350
351	Agents map[string]Agent `json:"-"`
352
353	// Internal
354	workingDir string `json:"-"`
355	// TODO: find a better way to do this this should probably not be part of the config
356	resolver       VariableResolver
357	dataConfigDir  string             `json:"-"`
358	knownProviders []catwalk.Provider `json:"-"`
359}
360
361func (c *Config) WorkingDir() string {
362	return c.workingDir
363}
364
365func (c *Config) EnabledProviders() []ProviderConfig {
366	var enabled []ProviderConfig
367	for p := range c.Providers.Seq() {
368		if !p.Disable {
369			enabled = append(enabled, p)
370		}
371	}
372	return enabled
373}
374
375// IsConfigured  return true if at least one provider is configured
376func (c *Config) IsConfigured() bool {
377	return len(c.EnabledProviders()) > 0
378}
379
380func (c *Config) GetModel(provider, model string) *catwalk.Model {
381	if providerConfig, ok := c.Providers.Get(provider); ok {
382		for _, m := range providerConfig.Models {
383			if m.ID == model {
384				return &m
385			}
386		}
387	}
388	return nil
389}
390
391func (c *Config) GetProviderForModel(modelType SelectedModelType) *ProviderConfig {
392	model, ok := c.Models[modelType]
393	if !ok {
394		return nil
395	}
396	if providerConfig, ok := c.Providers.Get(model.Provider); ok {
397		return &providerConfig
398	}
399	return nil
400}
401
402func (c *Config) GetModelByType(modelType SelectedModelType) *catwalk.Model {
403	model, ok := c.Models[modelType]
404	if !ok {
405		return nil
406	}
407	return c.GetModel(model.Provider, model.Model)
408}
409
410func (c *Config) LargeModel() *catwalk.Model {
411	model, ok := c.Models[SelectedModelTypeLarge]
412	if !ok {
413		return nil
414	}
415	return c.GetModel(model.Provider, model.Model)
416}
417
418func (c *Config) SmallModel() *catwalk.Model {
419	model, ok := c.Models[SelectedModelTypeSmall]
420	if !ok {
421		return nil
422	}
423	return c.GetModel(model.Provider, model.Model)
424}
425
426func (c *Config) SetCompactMode(enabled bool) error {
427	if c.Options == nil {
428		c.Options = &Options{}
429	}
430	c.Options.TUI.CompactMode = enabled
431	return c.SetConfigField("options.tui.compact_mode", enabled)
432}
433
434func (c *Config) Resolve(key string) (string, error) {
435	if c.resolver == nil {
436		return "", fmt.Errorf("no variable resolver configured")
437	}
438	return c.resolver.ResolveValue(key)
439}
440
441func (c *Config) UpdatePreferredModel(modelType SelectedModelType, model SelectedModel) error {
442	c.Models[modelType] = model
443	if err := c.SetConfigField(fmt.Sprintf("models.%s", modelType), model); err != nil {
444		return fmt.Errorf("failed to update preferred model: %w", err)
445	}
446	if err := c.recordRecentModel(modelType, model); err != nil {
447		return err
448	}
449	return nil
450}
451
452func (c *Config) SetConfigField(key string, value any) error {
453	// read the data
454	data, err := os.ReadFile(c.dataConfigDir)
455	if err != nil {
456		if os.IsNotExist(err) {
457			data = []byte("{}")
458		} else {
459			return fmt.Errorf("failed to read config file: %w", err)
460		}
461	}
462
463	newValue, err := sjson.Set(string(data), key, value)
464	if err != nil {
465		return fmt.Errorf("failed to set config field %s: %w", key, err)
466	}
467	if err := os.WriteFile(c.dataConfigDir, []byte(newValue), 0o600); err != nil {
468		return fmt.Errorf("failed to write config file: %w", err)
469	}
470	return nil
471}
472
473func (c *Config) RefreshOAuthToken(ctx context.Context, providerID string) error {
474	providerConfig, exists := c.Providers.Get(providerID)
475	if !exists {
476		return fmt.Errorf("provider %s not found", providerID)
477	}
478
479	if providerConfig.OAuthToken == nil {
480		return fmt.Errorf("provider %s does not have an OAuth token", providerID)
481	}
482
483	// Only Anthropic provider uses OAuth for now
484	if providerID != string(catwalk.InferenceProviderAnthropic) {
485		return fmt.Errorf("OAuth refresh not supported for provider %s", providerID)
486	}
487
488	newToken, err := claude.RefreshToken(ctx, providerConfig.OAuthToken.RefreshToken)
489	if err != nil {
490		return fmt.Errorf("failed to refresh OAuth token for provider %s: %w", providerID, err)
491	}
492
493	slog.Info("Successfully refreshed OAuth token in background", "provider", providerID)
494	providerConfig.OAuthToken = newToken
495	providerConfig.APIKey = fmt.Sprintf("Bearer %s", newToken.AccessToken)
496	providerConfig.SetupClaudeCode()
497
498	c.Providers.Set(providerID, providerConfig)
499
500	if err := cmp.Or(
501		c.SetConfigField(fmt.Sprintf("providers.%s.api_key", providerID), newToken.AccessToken),
502		c.SetConfigField(fmt.Sprintf("providers.%s.oauth", providerID), newToken),
503	); err != nil {
504		return fmt.Errorf("failed to persist refreshed token: %w", err)
505	}
506
507	return nil
508}
509
510func (c *Config) SetProviderAPIKey(providerID string, apiKey any) error {
511	var providerConfig ProviderConfig
512	var exists bool
513	var setKeyOrToken func()
514
515	switch v := apiKey.(type) {
516	case string:
517		if err := c.SetConfigField(fmt.Sprintf("providers.%s.api_key", providerID), v); err != nil {
518			return fmt.Errorf("failed to save api key to config file: %w", err)
519		}
520		setKeyOrToken = func() { providerConfig.APIKey = v }
521	case *oauth.Token:
522		if err := cmp.Or(
523			c.SetConfigField(fmt.Sprintf("providers.%s.api_key", providerID), v.AccessToken),
524			c.SetConfigField(fmt.Sprintf("providers.%s.oauth", providerID), v),
525		); err != nil {
526			return err
527		}
528		setKeyOrToken = func() {
529			providerConfig.APIKey = v.AccessToken
530			providerConfig.OAuthToken = v
531			providerConfig.SetupClaudeCode()
532		}
533	}
534
535	providerConfig, exists = c.Providers.Get(providerID)
536	if exists {
537		setKeyOrToken()
538		c.Providers.Set(providerID, providerConfig)
539		return nil
540	}
541
542	var foundProvider *catwalk.Provider
543	for _, p := range c.knownProviders {
544		if string(p.ID) == providerID {
545			foundProvider = &p
546			break
547		}
548	}
549
550	if foundProvider != nil {
551		// Create new provider config based on known provider
552		providerConfig = ProviderConfig{
553			ID:           providerID,
554			Name:         foundProvider.Name,
555			BaseURL:      foundProvider.APIEndpoint,
556			Type:         foundProvider.Type,
557			Disable:      false,
558			ExtraHeaders: make(map[string]string),
559			ExtraParams:  make(map[string]string),
560			Models:       foundProvider.Models,
561		}
562		setKeyOrToken()
563	} else {
564		return fmt.Errorf("provider with ID %s not found in known providers", providerID)
565	}
566	// Store the updated provider config
567	c.Providers.Set(providerID, providerConfig)
568	return nil
569}
570
571const maxRecentModelsPerType = 5
572
573func (c *Config) recordRecentModel(modelType SelectedModelType, model SelectedModel) error {
574	if model.Provider == "" || model.Model == "" {
575		return nil
576	}
577
578	if c.RecentModels == nil {
579		c.RecentModels = make(map[SelectedModelType][]SelectedModel)
580	}
581
582	eq := func(a, b SelectedModel) bool {
583		return a.Provider == b.Provider && a.Model == b.Model
584	}
585
586	entry := SelectedModel{
587		Provider: model.Provider,
588		Model:    model.Model,
589	}
590
591	current := c.RecentModels[modelType]
592	withoutCurrent := slices.DeleteFunc(slices.Clone(current), func(existing SelectedModel) bool {
593		return eq(existing, entry)
594	})
595
596	updated := append([]SelectedModel{entry}, withoutCurrent...)
597	if len(updated) > maxRecentModelsPerType {
598		updated = updated[:maxRecentModelsPerType]
599	}
600
601	if slices.EqualFunc(current, updated, eq) {
602		return nil
603	}
604
605	c.RecentModels[modelType] = updated
606
607	if err := c.SetConfigField(fmt.Sprintf("recent_models.%s", modelType), updated); err != nil {
608		return fmt.Errorf("failed to persist recent models: %w", err)
609	}
610
611	return nil
612}
613
614func allToolNames() []string {
615	return []string{
616		"agent",
617		"bash",
618		"job_output",
619		"job_kill",
620		"download",
621		"edit",
622		"multiedit",
623		"lsp_diagnostics",
624		"lsp_references",
625		"fetch",
626		"agentic_fetch",
627		"glob",
628		"grep",
629		"ls",
630		"sourcegraph",
631		"view",
632		"write",
633	}
634}
635
636func resolveAllowedTools(allTools []string, disabledTools []string) []string {
637	if disabledTools == nil {
638		return allTools
639	}
640	// filter out disabled tools (exclude mode)
641	return filterSlice(allTools, disabledTools, false)
642}
643
644func resolveReadOnlyTools(tools []string) []string {
645	readOnlyTools := []string{"glob", "grep", "ls", "sourcegraph", "view"}
646	// filter to only include tools that are in allowedtools (include mode)
647	return filterSlice(tools, readOnlyTools, true)
648}
649
650func filterSlice(data []string, mask []string, include bool) []string {
651	filtered := []string{}
652	for _, s := range data {
653		// if include is true, we include items that ARE in the mask
654		// if include is false, we include items that are NOT in the mask
655		if include == slices.Contains(mask, s) {
656			filtered = append(filtered, s)
657		}
658	}
659	return filtered
660}
661
662func (c *Config) SetupAgents() {
663	allowedTools := resolveAllowedTools(allToolNames(), c.Options.DisabledTools)
664
665	agents := map[string]Agent{
666		AgentCoder: {
667			ID:           AgentCoder,
668			Name:         "Coder",
669			Description:  "An agent that helps with executing coding tasks.",
670			Model:        SelectedModelTypeLarge,
671			ContextPaths: c.Options.ContextPaths,
672			AllowedTools: allowedTools,
673		},
674
675		AgentTask: {
676			ID:           AgentCoder,
677			Name:         "Task",
678			Description:  "An agent that helps with searching for context and finding implementation details.",
679			Model:        SelectedModelTypeLarge,
680			ContextPaths: c.Options.ContextPaths,
681			AllowedTools: resolveReadOnlyTools(allowedTools),
682			// NO MCPs or LSPs by default
683			AllowedMCP: map[string][]string{},
684		},
685	}
686	c.Agents = agents
687}
688
689func (c *Config) Resolver() VariableResolver {
690	return c.resolver
691}
692
693func (c *ProviderConfig) TestConnection(resolver VariableResolver) error {
694	testURL := ""
695	headers := make(map[string]string)
696	apiKey, _ := resolver.ResolveValue(c.APIKey)
697	switch c.Type {
698	case catwalk.TypeOpenAI, catwalk.TypeOpenAICompat, catwalk.TypeOpenRouter:
699		baseURL, _ := resolver.ResolveValue(c.BaseURL)
700		if baseURL == "" {
701			baseURL = "https://api.openai.com/v1"
702		}
703		if c.ID == string(catwalk.InferenceProviderOpenRouter) {
704			testURL = baseURL + "/credits"
705		} else {
706			testURL = baseURL + "/models"
707		}
708		headers["Authorization"] = "Bearer " + apiKey
709	case catwalk.TypeAnthropic:
710		baseURL, _ := resolver.ResolveValue(c.BaseURL)
711		if baseURL == "" {
712			baseURL = "https://api.anthropic.com/v1"
713		}
714		testURL = baseURL + "/models"
715		// TODO: replace with const when catwalk is released
716		if c.ID == "kimi-coding" {
717			testURL = baseURL + "/v1/models"
718		}
719		headers["x-api-key"] = apiKey
720		headers["anthropic-version"] = "2023-06-01"
721	case catwalk.TypeGoogle:
722		baseURL, _ := resolver.ResolveValue(c.BaseURL)
723		if baseURL == "" {
724			baseURL = "https://generativelanguage.googleapis.com"
725		}
726		testURL = baseURL + "/v1beta/models?key=" + url.QueryEscape(apiKey)
727	}
728	ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
729	defer cancel()
730	client := &http.Client{}
731	req, err := http.NewRequestWithContext(ctx, "GET", testURL, nil)
732	if err != nil {
733		return fmt.Errorf("failed to create request for provider %s: %w", c.ID, err)
734	}
735	for k, v := range headers {
736		req.Header.Set(k, v)
737	}
738	for k, v := range c.ExtraHeaders {
739		req.Header.Set(k, v)
740	}
741	b, err := client.Do(req)
742	if err != nil {
743		return fmt.Errorf("failed to create request for provider %s: %w", c.ID, err)
744	}
745	if c.ID == string(catwalk.InferenceProviderZAI) {
746		if b.StatusCode == http.StatusUnauthorized {
747			// for z.ai just check if the http response is not 401
748			return fmt.Errorf("failed to connect to provider %s: %s", c.ID, b.Status)
749		}
750	} else {
751		if b.StatusCode != http.StatusOK {
752			return fmt.Errorf("failed to connect to provider %s: %s", c.ID, b.Status)
753		}
754	}
755	_ = b.Body.Close()
756	return nil
757}
758
759func resolveEnvs(envs map[string]string) []string {
760	resolver := NewShellVariableResolver(env.New())
761	for e, v := range envs {
762		var err error
763		envs[e], err = resolver.ResolveValue(v)
764		if err != nil {
765			slog.Error("error resolving environment variable", "error", err, "variable", e, "value", v)
766			continue
767		}
768	}
769
770	res := make([]string, 0, len(envs))
771	for k, v := range envs {
772		res = append(res, fmt.Sprintf("%s=%s", k, v))
773	}
774	return res
775}
776
777func ptrValOr[T any](t *T, el T) T {
778	if t == nil {
779		return el
780	}
781	return *t
782}