config.go

  1package config
  2
  3import (
  4	"cmp"
  5	"context"
  6	"fmt"
  7	"log/slog"
  8	"net/http"
  9	"net/url"
 10	"os"
 11	"slices"
 12	"strings"
 13	"time"
 14
 15	"github.com/charmbracelet/catwalk/pkg/catwalk"
 16	"github.com/charmbracelet/crush/internal/csync"
 17	"github.com/charmbracelet/crush/internal/env"
 18	"github.com/charmbracelet/crush/internal/oauth"
 19	"github.com/charmbracelet/crush/internal/oauth/claude"
 20	"github.com/invopop/jsonschema"
 21	"github.com/tidwall/sjson"
 22)
 23
 24const (
 25	appName              = "crush"
 26	defaultDataDirectory = ".crush"
 27	defaultInitializeAs  = "AGENTS.md"
 28)
 29
 30var defaultContextPaths = []string{
 31	".github/copilot-instructions.md",
 32	".cursorrules",
 33	".cursor/rules/",
 34	"CLAUDE.md",
 35	"CLAUDE.local.md",
 36	"GEMINI.md",
 37	"gemini.md",
 38	"crush.md",
 39	"crush.local.md",
 40	"Crush.md",
 41	"Crush.local.md",
 42	"CRUSH.md",
 43	"CRUSH.local.md",
 44	"AGENTS.md",
 45	"agents.md",
 46	"Agents.md",
 47}
 48
 49type SelectedModelType string
 50
 51const (
 52	SelectedModelTypeLarge SelectedModelType = "large"
 53	SelectedModelTypeSmall SelectedModelType = "small"
 54)
 55
 56const (
 57	AgentCoder string = "coder"
 58	AgentTask  string = "task"
 59)
 60
 61type SelectedModel struct {
 62	// The model id as used by the provider API.
 63	// Required.
 64	Model string `json:"model" jsonschema:"required,description=The model ID as used by the provider API,example=gpt-4o"`
 65	// The model provider, same as the key/id used in the providers config.
 66	// Required.
 67	Provider string `json:"provider" jsonschema:"required,description=The model provider ID that matches a key in the providers config,example=openai"`
 68
 69	// Only used by models that use the openai provider and need this set.
 70	ReasoningEffort string `json:"reasoning_effort,omitempty" jsonschema:"description=Reasoning effort level for OpenAI models that support it,enum=low,enum=medium,enum=high"`
 71
 72	// Used by anthropic models that can reason to indicate if the model should think.
 73	Think bool `json:"think,omitempty" jsonschema:"description=Enable thinking mode for Anthropic models that support reasoning"`
 74
 75	// Overrides the default model configuration.
 76	MaxTokens        int64    `json:"max_tokens,omitempty" jsonschema:"description=Maximum number of tokens for model responses,maximum=200000,example=4096"`
 77	Temperature      *float64 `json:"temperature,omitempty" jsonschema:"description=Sampling temperature,minimum=0,maximum=1,example=0.7"`
 78	TopP             *float64 `json:"top_p,omitempty" jsonschema:"description=Top-p (nucleus) sampling parameter,minimum=0,maximum=1,example=0.9"`
 79	TopK             *int64   `json:"top_k,omitempty" jsonschema:"description=Top-k sampling parameter"`
 80	FrequencyPenalty *float64 `json:"frequency_penalty,omitempty" jsonschema:"description=Frequency penalty to reduce repetition"`
 81	PresencePenalty  *float64 `json:"presence_penalty,omitempty" jsonschema:"description=Presence penalty to increase topic diversity"`
 82
 83	// Override provider specific options.
 84	ProviderOptions map[string]any `json:"provider_options,omitempty" jsonschema:"description=Additional provider-specific options for the model"`
 85}
 86
 87type ProviderConfig struct {
 88	// The provider's id.
 89	ID string `json:"id,omitempty" jsonschema:"description=Unique identifier for the provider,example=openai"`
 90	// The provider's name, used for display purposes.
 91	Name string `json:"name,omitempty" jsonschema:"description=Human-readable name for the provider,example=OpenAI"`
 92	// The provider's API endpoint.
 93	BaseURL string `json:"base_url,omitempty" jsonschema:"description=Base URL for the provider's API,format=uri,example=https://api.openai.com/v1"`
 94	// The provider type, e.g. "openai", "anthropic", etc. if empty it defaults to openai.
 95	Type catwalk.Type `json:"type,omitempty" jsonschema:"description=Provider type that determines the API format,enum=openai,enum=openai-compat,enum=anthropic,enum=gemini,enum=azure,enum=vertexai,default=openai"`
 96	// The provider's API key.
 97	APIKey string `json:"api_key,omitempty" jsonschema:"description=API key for authentication with the provider,example=$OPENAI_API_KEY"`
 98	// The original API key template before resolution (for re-resolution on auth errors).
 99	APIKeyTemplate string `json:"-"`
100	// OAuthToken for providers that use OAuth2 authentication.
101	OAuthToken *oauth.Token `json:"oauth,omitempty" jsonschema:"description=OAuth2 token for authentication with the provider"`
102	// Marks the provider as disabled.
103	Disable bool `json:"disable,omitempty" jsonschema:"description=Whether this provider is disabled,default=false"`
104
105	// Custom system prompt prefix.
106	SystemPromptPrefix string `json:"system_prompt_prefix,omitempty" jsonschema:"description=Custom prefix to add to system prompts for this provider"`
107
108	// Extra headers to send with each request to the provider.
109	ExtraHeaders map[string]string `json:"extra_headers,omitempty" jsonschema:"description=Additional HTTP headers to send with requests"`
110	// Extra body
111	ExtraBody map[string]any `json:"extra_body,omitempty" jsonschema:"description=Additional fields to include in request bodies, only works with openai-compatible providers"`
112
113	ProviderOptions map[string]any `json:"provider_options,omitempty" jsonschema:"description=Additional provider-specific options for this provider"`
114
115	// Used to pass extra parameters to the provider.
116	ExtraParams map[string]string `json:"-"`
117
118	// The provider models
119	Models []catwalk.Model `json:"models,omitempty" jsonschema:"description=List of models available from this provider"`
120}
121
122func (pc *ProviderConfig) SetupClaudeCode() {
123	pc.APIKey = fmt.Sprintf("Bearer %s", pc.OAuthToken.AccessToken)
124	pc.SystemPromptPrefix = "You are Claude Code, Anthropic's official CLI for Claude."
125	pc.ExtraHeaders["anthropic-version"] = "2023-06-01"
126
127	value := pc.ExtraHeaders["anthropic-beta"]
128	const want = "oauth-2025-04-20"
129	if !strings.Contains(value, want) {
130		if value != "" {
131			value += ","
132		}
133		value += want
134	}
135	pc.ExtraHeaders["anthropic-beta"] = value
136}
137
138type MCPType string
139
140const (
141	MCPStdio MCPType = "stdio"
142	MCPSSE   MCPType = "sse"
143	MCPHttp  MCPType = "http"
144)
145
146type MCPConfig struct {
147	Command  string            `json:"command,omitempty" jsonschema:"description=Command to execute for stdio MCP servers,example=npx"`
148	Env      map[string]string `json:"env,omitempty" jsonschema:"description=Environment variables to set for the MCP server"`
149	Args     []string          `json:"args,omitempty" jsonschema:"description=Arguments to pass to the MCP server command"`
150	Type     MCPType           `json:"type" jsonschema:"required,description=Type of MCP connection,enum=stdio,enum=sse,enum=http,default=stdio"`
151	URL      string            `json:"url,omitempty" jsonschema:"description=URL for HTTP or SSE MCP servers,format=uri,example=http://localhost:3000/mcp"`
152	Disabled bool              `json:"disabled,omitempty" jsonschema:"description=Whether this MCP server is disabled,default=false"`
153	Timeout  int               `json:"timeout,omitempty" jsonschema:"description=Timeout in seconds for MCP server connections,default=15,example=30,example=60,example=120"`
154
155	// TODO: maybe make it possible to get the value from the env
156	Headers map[string]string `json:"headers,omitempty" jsonschema:"description=HTTP headers for HTTP/SSE MCP servers"`
157}
158
159type LSPConfig struct {
160	Disabled    bool              `json:"disabled,omitempty" jsonschema:"description=Whether this LSP server is disabled,default=false"`
161	Command     string            `json:"command,omitempty" jsonschema:"required,description=Command to execute for the LSP server,example=gopls"`
162	Args        []string          `json:"args,omitempty" jsonschema:"description=Arguments to pass to the LSP server command"`
163	Env         map[string]string `json:"env,omitempty" jsonschema:"description=Environment variables to set to the LSP server command"`
164	FileTypes   []string          `json:"filetypes,omitempty" jsonschema:"description=File types this LSP server handles,example=go,example=mod,example=rs,example=c,example=js,example=ts"`
165	RootMarkers []string          `json:"root_markers,omitempty" jsonschema:"description=Files or directories that indicate the project root,example=go.mod,example=package.json,example=Cargo.toml"`
166	InitOptions map[string]any    `json:"init_options,omitempty" jsonschema:"description=Initialization options passed to the LSP server during initialize request"`
167	Options     map[string]any    `json:"options,omitempty" jsonschema:"description=LSP server-specific settings passed during initialization"`
168}
169
170type TUIOptions struct {
171	CompactMode bool   `json:"compact_mode,omitempty" jsonschema:"description=Enable compact mode for the TUI interface,default=false"`
172	DiffMode    string `json:"diff_mode,omitempty" jsonschema:"description=Diff mode for the TUI interface,enum=unified,enum=split"`
173	// Here we can add themes later or any TUI related options
174	//
175
176	Completions Completions `json:"completions,omitzero" jsonschema:"description=Completions UI options"`
177}
178
179// Completions defines options for the completions UI.
180type Completions struct {
181	MaxDepth *int `json:"max_depth,omitempty" jsonschema:"description=Maximum depth for the ls tool,default=0,example=10"`
182	MaxItems *int `json:"max_items,omitempty" jsonschema:"description=Maximum number of items to return for the ls tool,default=1000,example=100"`
183}
184
185func (c Completions) Limits() (depth, items int) {
186	return ptrValOr(c.MaxDepth, 0), ptrValOr(c.MaxItems, 0)
187}
188
189type Permissions struct {
190	AllowedTools []string `json:"allowed_tools,omitempty" jsonschema:"description=List of tools that don't require permission prompts,example=bash,example=view"` // Tools that don't require permission prompts
191	SkipRequests bool     `json:"-"`                                                                                                                              // Automatically accept all permissions (YOLO mode)
192}
193
194type TrailerStyle string
195
196const (
197	TrailerStyleNone         TrailerStyle = "none"
198	TrailerStyleCoAuthoredBy TrailerStyle = "co-authored-by"
199	TrailerStyleAssistedBy   TrailerStyle = "assisted-by"
200)
201
202type Attribution struct {
203	TrailerStyle  TrailerStyle `json:"trailer_style,omitempty" jsonschema:"description=Style of attribution trailer to add to commits,enum=none,enum=co-authored-by,enum=assisted-by,default=assisted-by"`
204	CoAuthoredBy  *bool        `json:"co_authored_by,omitempty" jsonschema:"description=Deprecated: use trailer_style instead"`
205	GeneratedWith bool         `json:"generated_with,omitempty" jsonschema:"description=Add Generated with Crush line to commit messages and issues and PRs,default=true"`
206}
207
208// JSONSchemaExtend marks the co_authored_by field as deprecated in the schema.
209func (Attribution) JSONSchemaExtend(schema *jsonschema.Schema) {
210	if schema.Properties != nil {
211		if prop, ok := schema.Properties.Get("co_authored_by"); ok {
212			prop.Deprecated = true
213		}
214	}
215}
216
217type Options struct {
218	ContextPaths              []string     `json:"context_paths,omitempty" jsonschema:"description=Paths to files containing context information for the AI,example=.cursorrules,example=CRUSH.md"`
219	TUI                       *TUIOptions  `json:"tui,omitempty" jsonschema:"description=Terminal user interface options"`
220	Debug                     bool         `json:"debug,omitempty" jsonschema:"description=Enable debug logging,default=false"`
221	DebugLSP                  bool         `json:"debug_lsp,omitempty" jsonschema:"description=Enable debug logging for LSP servers,default=false"`
222	DisableAutoSummarize      bool         `json:"disable_auto_summarize,omitempty" jsonschema:"description=Disable automatic conversation summarization,default=false"`
223	DataDirectory             string       `json:"data_directory,omitempty" jsonschema:"description=Directory for storing application data (relative to working directory),default=.crush,example=.crush"` // Relative to the cwd
224	DisabledTools             []string     `json:"disabled_tools" jsonschema:"description=Tools to disable"`
225	DisableProviderAutoUpdate bool         `json:"disable_provider_auto_update,omitempty" jsonschema:"description=Disable providers auto-update,default=false"`
226	Attribution               *Attribution `json:"attribution,omitempty" jsonschema:"description=Attribution settings for generated content"`
227	DisableMetrics            bool         `json:"disable_metrics,omitempty" jsonschema:"description=Disable sending metrics,default=false"`
228	InitializeAs              string       `json:"initialize_as,omitempty" jsonschema:"description=Name of the context file to create/update during project initialization,default=AGENTS.md,example=AGENTS.md,example=CRUSH.md,example=CLAUDE.md,example=docs/LLMs.md"`
229}
230
231type MCPs map[string]MCPConfig
232
233type MCP struct {
234	Name string    `json:"name"`
235	MCP  MCPConfig `json:"mcp"`
236}
237
238func (m MCPs) Sorted() []MCP {
239	sorted := make([]MCP, 0, len(m))
240	for k, v := range m {
241		sorted = append(sorted, MCP{
242			Name: k,
243			MCP:  v,
244		})
245	}
246	slices.SortFunc(sorted, func(a, b MCP) int {
247		return strings.Compare(a.Name, b.Name)
248	})
249	return sorted
250}
251
252type LSPs map[string]LSPConfig
253
254type LSP struct {
255	Name string    `json:"name"`
256	LSP  LSPConfig `json:"lsp"`
257}
258
259func (l LSPs) Sorted() []LSP {
260	sorted := make([]LSP, 0, len(l))
261	for k, v := range l {
262		sorted = append(sorted, LSP{
263			Name: k,
264			LSP:  v,
265		})
266	}
267	slices.SortFunc(sorted, func(a, b LSP) int {
268		return strings.Compare(a.Name, b.Name)
269	})
270	return sorted
271}
272
273func (l LSPConfig) ResolvedEnv() []string {
274	return resolveEnvs(l.Env)
275}
276
277func (m MCPConfig) ResolvedEnv() []string {
278	return resolveEnvs(m.Env)
279}
280
281func (m MCPConfig) ResolvedHeaders() map[string]string {
282	resolver := NewShellVariableResolver(env.New())
283	for e, v := range m.Headers {
284		var err error
285		m.Headers[e], err = resolver.ResolveValue(v)
286		if err != nil {
287			slog.Error("error resolving header variable", "error", err, "variable", e, "value", v)
288			continue
289		}
290	}
291	return m.Headers
292}
293
294type Agent struct {
295	ID          string `json:"id,omitempty"`
296	Name        string `json:"name,omitempty"`
297	Description string `json:"description,omitempty"`
298	// This is the id of the system prompt used by the agent
299	Disabled bool `json:"disabled,omitempty"`
300
301	Model SelectedModelType `json:"model" jsonschema:"required,description=The model type to use for this agent,enum=large,enum=small,default=large"`
302
303	// The available tools for the agent
304	//  if this is nil, all tools are available
305	AllowedTools []string `json:"allowed_tools,omitempty"`
306
307	// this tells us which MCPs are available for this agent
308	//  if this is empty all mcps are available
309	//  the string array is the list of tools from the AllowedMCP the agent has available
310	//  if the string array is nil, all tools from the AllowedMCP are available
311	AllowedMCP map[string][]string `json:"allowed_mcp,omitempty"`
312
313	// Overrides the context paths for this agent
314	ContextPaths []string `json:"context_paths,omitempty"`
315}
316
317type Tools struct {
318	Ls ToolLs `json:"ls,omitzero"`
319}
320
321type ToolLs struct {
322	MaxDepth *int `json:"max_depth,omitempty" jsonschema:"description=Maximum depth for the ls tool,default=0,example=10"`
323	MaxItems *int `json:"max_items,omitempty" jsonschema:"description=Maximum number of items to return for the ls tool,default=1000,example=100"`
324}
325
326func (t ToolLs) Limits() (depth, items int) {
327	return ptrValOr(t.MaxDepth, 0), ptrValOr(t.MaxItems, 0)
328}
329
330// Config holds the configuration for crush.
331type Config struct {
332	Schema string `json:"$schema,omitempty"`
333
334	// We currently only support large/small as values here.
335	Models map[SelectedModelType]SelectedModel `json:"models,omitempty" jsonschema:"description=Model configurations for different model types,example={\"large\":{\"model\":\"gpt-4o\",\"provider\":\"openai\"}}"`
336	// Recently used models stored in the data directory config.
337	RecentModels map[SelectedModelType][]SelectedModel `json:"recent_models,omitempty" jsonschema:"description=Recently used models sorted by most recent first"`
338
339	// The providers that are configured
340	Providers *csync.Map[string, ProviderConfig] `json:"providers,omitempty" jsonschema:"description=AI provider configurations"`
341
342	MCP MCPs `json:"mcp,omitempty" jsonschema:"description=Model Context Protocol server configurations"`
343
344	LSP LSPs `json:"lsp,omitempty" jsonschema:"description=Language Server Protocol configurations"`
345
346	Options *Options `json:"options,omitempty" jsonschema:"description=General application options"`
347
348	Permissions *Permissions `json:"permissions,omitempty" jsonschema:"description=Permission settings for tool usage"`
349
350	Tools Tools `json:"tools,omitzero" jsonschema:"description=Tool configurations"`
351
352	Agents map[string]Agent `json:"-"`
353
354	// Internal
355	workingDir string `json:"-"`
356	// TODO: find a better way to do this this should probably not be part of the config
357	resolver       VariableResolver
358	dataConfigDir  string             `json:"-"`
359	knownProviders []catwalk.Provider `json:"-"`
360}
361
362func (c *Config) WorkingDir() string {
363	return c.workingDir
364}
365
366func (c *Config) EnabledProviders() []ProviderConfig {
367	var enabled []ProviderConfig
368	for p := range c.Providers.Seq() {
369		if !p.Disable {
370			enabled = append(enabled, p)
371		}
372	}
373	return enabled
374}
375
376// IsConfigured  return true if at least one provider is configured
377func (c *Config) IsConfigured() bool {
378	return len(c.EnabledProviders()) > 0
379}
380
381func (c *Config) GetModel(provider, model string) *catwalk.Model {
382	if providerConfig, ok := c.Providers.Get(provider); ok {
383		for _, m := range providerConfig.Models {
384			if m.ID == model {
385				return &m
386			}
387		}
388	}
389	return nil
390}
391
392func (c *Config) GetProviderForModel(modelType SelectedModelType) *ProviderConfig {
393	model, ok := c.Models[modelType]
394	if !ok {
395		return nil
396	}
397	if providerConfig, ok := c.Providers.Get(model.Provider); ok {
398		return &providerConfig
399	}
400	return nil
401}
402
403func (c *Config) GetModelByType(modelType SelectedModelType) *catwalk.Model {
404	model, ok := c.Models[modelType]
405	if !ok {
406		return nil
407	}
408	return c.GetModel(model.Provider, model.Model)
409}
410
411func (c *Config) LargeModel() *catwalk.Model {
412	model, ok := c.Models[SelectedModelTypeLarge]
413	if !ok {
414		return nil
415	}
416	return c.GetModel(model.Provider, model.Model)
417}
418
419func (c *Config) SmallModel() *catwalk.Model {
420	model, ok := c.Models[SelectedModelTypeSmall]
421	if !ok {
422		return nil
423	}
424	return c.GetModel(model.Provider, model.Model)
425}
426
427func (c *Config) SetCompactMode(enabled bool) error {
428	if c.Options == nil {
429		c.Options = &Options{}
430	}
431	c.Options.TUI.CompactMode = enabled
432	return c.SetConfigField("options.tui.compact_mode", enabled)
433}
434
435func (c *Config) Resolve(key string) (string, error) {
436	if c.resolver == nil {
437		return "", fmt.Errorf("no variable resolver configured")
438	}
439	return c.resolver.ResolveValue(key)
440}
441
442func (c *Config) UpdatePreferredModel(modelType SelectedModelType, model SelectedModel) error {
443	c.Models[modelType] = model
444	if err := c.SetConfigField(fmt.Sprintf("models.%s", modelType), model); err != nil {
445		return fmt.Errorf("failed to update preferred model: %w", err)
446	}
447	if err := c.recordRecentModel(modelType, model); err != nil {
448		return err
449	}
450	return nil
451}
452
453func (c *Config) SetConfigField(key string, value any) error {
454	// read the data
455	data, err := os.ReadFile(c.dataConfigDir)
456	if err != nil {
457		if os.IsNotExist(err) {
458			data = []byte("{}")
459		} else {
460			return fmt.Errorf("failed to read config file: %w", err)
461		}
462	}
463
464	newValue, err := sjson.Set(string(data), key, value)
465	if err != nil {
466		return fmt.Errorf("failed to set config field %s: %w", key, err)
467	}
468	if err := os.WriteFile(c.dataConfigDir, []byte(newValue), 0o600); err != nil {
469		return fmt.Errorf("failed to write config file: %w", err)
470	}
471	return nil
472}
473
474// RefreshOAuthToken refreshes the OAuth token for the given provider.
475func (c *Config) RefreshOAuthToken(ctx context.Context, providerID string) error {
476	providerConfig, exists := c.Providers.Get(providerID)
477	if !exists {
478		return fmt.Errorf("provider %s not found", providerID)
479	}
480
481	if providerConfig.OAuthToken == nil {
482		return fmt.Errorf("provider %s does not have an OAuth token", providerID)
483	}
484
485	// Only Anthropic provider uses OAuth for now.
486	if providerID != string(catwalk.InferenceProviderAnthropic) {
487		return fmt.Errorf("OAuth refresh not supported for provider %s", providerID)
488	}
489
490	newToken, err := claude.RefreshToken(ctx, providerConfig.OAuthToken.RefreshToken)
491	if err != nil {
492		return fmt.Errorf("failed to refresh OAuth token for provider %s: %w", providerID, err)
493	}
494
495	slog.Info("Successfully refreshed OAuth token", "provider", providerID)
496	providerConfig.OAuthToken = newToken
497	providerConfig.APIKey = fmt.Sprintf("Bearer %s", newToken.AccessToken)
498	providerConfig.SetupClaudeCode()
499
500	c.Providers.Set(providerID, providerConfig)
501
502	if err := cmp.Or(
503		c.SetConfigField(fmt.Sprintf("providers.%s.api_key", providerID), newToken.AccessToken),
504		c.SetConfigField(fmt.Sprintf("providers.%s.oauth", providerID), newToken),
505	); err != nil {
506		return fmt.Errorf("failed to persist refreshed token: %w", err)
507	}
508
509	return nil
510}
511
512func (c *Config) SetProviderAPIKey(providerID string, apiKey any) error {
513	var providerConfig ProviderConfig
514	var exists bool
515	var setKeyOrToken func()
516
517	switch v := apiKey.(type) {
518	case string:
519		if err := c.SetConfigField(fmt.Sprintf("providers.%s.api_key", providerID), v); err != nil {
520			return fmt.Errorf("failed to save api key to config file: %w", err)
521		}
522		setKeyOrToken = func() { providerConfig.APIKey = v }
523	case *oauth.Token:
524		if err := cmp.Or(
525			c.SetConfigField(fmt.Sprintf("providers.%s.api_key", providerID), v.AccessToken),
526			c.SetConfigField(fmt.Sprintf("providers.%s.oauth", providerID), v),
527		); err != nil {
528			return err
529		}
530		setKeyOrToken = func() {
531			providerConfig.APIKey = v.AccessToken
532			providerConfig.OAuthToken = v
533			providerConfig.SetupClaudeCode()
534		}
535	}
536
537	providerConfig, exists = c.Providers.Get(providerID)
538	if exists {
539		setKeyOrToken()
540		c.Providers.Set(providerID, providerConfig)
541		return nil
542	}
543
544	var foundProvider *catwalk.Provider
545	for _, p := range c.knownProviders {
546		if string(p.ID) == providerID {
547			foundProvider = &p
548			break
549		}
550	}
551
552	if foundProvider != nil {
553		// Create new provider config based on known provider
554		providerConfig = ProviderConfig{
555			ID:           providerID,
556			Name:         foundProvider.Name,
557			BaseURL:      foundProvider.APIEndpoint,
558			Type:         foundProvider.Type,
559			Disable:      false,
560			ExtraHeaders: make(map[string]string),
561			ExtraParams:  make(map[string]string),
562			Models:       foundProvider.Models,
563		}
564		setKeyOrToken()
565	} else {
566		return fmt.Errorf("provider with ID %s not found in known providers", providerID)
567	}
568	// Store the updated provider config
569	c.Providers.Set(providerID, providerConfig)
570	return nil
571}
572
573const maxRecentModelsPerType = 5
574
575func (c *Config) recordRecentModel(modelType SelectedModelType, model SelectedModel) error {
576	if model.Provider == "" || model.Model == "" {
577		return nil
578	}
579
580	if c.RecentModels == nil {
581		c.RecentModels = make(map[SelectedModelType][]SelectedModel)
582	}
583
584	eq := func(a, b SelectedModel) bool {
585		return a.Provider == b.Provider && a.Model == b.Model
586	}
587
588	entry := SelectedModel{
589		Provider: model.Provider,
590		Model:    model.Model,
591	}
592
593	current := c.RecentModels[modelType]
594	withoutCurrent := slices.DeleteFunc(slices.Clone(current), func(existing SelectedModel) bool {
595		return eq(existing, entry)
596	})
597
598	updated := append([]SelectedModel{entry}, withoutCurrent...)
599	if len(updated) > maxRecentModelsPerType {
600		updated = updated[:maxRecentModelsPerType]
601	}
602
603	if slices.EqualFunc(current, updated, eq) {
604		return nil
605	}
606
607	c.RecentModels[modelType] = updated
608
609	if err := c.SetConfigField(fmt.Sprintf("recent_models.%s", modelType), updated); err != nil {
610		return fmt.Errorf("failed to persist recent models: %w", err)
611	}
612
613	return nil
614}
615
616func allToolNames() []string {
617	return []string{
618		"agent",
619		"bash",
620		"job_output",
621		"job_kill",
622		"download",
623		"edit",
624		"multiedit",
625		"lsp_diagnostics",
626		"lsp_references",
627		"fetch",
628		"agentic_fetch",
629		"glob",
630		"grep",
631		"ls",
632		"sourcegraph",
633		"view",
634		"write",
635	}
636}
637
638func resolveAllowedTools(allTools []string, disabledTools []string) []string {
639	if disabledTools == nil {
640		return allTools
641	}
642	// filter out disabled tools (exclude mode)
643	return filterSlice(allTools, disabledTools, false)
644}
645
646func resolveReadOnlyTools(tools []string) []string {
647	readOnlyTools := []string{"glob", "grep", "ls", "sourcegraph", "view"}
648	// filter to only include tools that are in allowedtools (include mode)
649	return filterSlice(tools, readOnlyTools, true)
650}
651
652func filterSlice(data []string, mask []string, include bool) []string {
653	filtered := []string{}
654	for _, s := range data {
655		// if include is true, we include items that ARE in the mask
656		// if include is false, we include items that are NOT in the mask
657		if include == slices.Contains(mask, s) {
658			filtered = append(filtered, s)
659		}
660	}
661	return filtered
662}
663
664func (c *Config) SetupAgents() {
665	allowedTools := resolveAllowedTools(allToolNames(), c.Options.DisabledTools)
666
667	agents := map[string]Agent{
668		AgentCoder: {
669			ID:           AgentCoder,
670			Name:         "Coder",
671			Description:  "An agent that helps with executing coding tasks.",
672			Model:        SelectedModelTypeLarge,
673			ContextPaths: c.Options.ContextPaths,
674			AllowedTools: allowedTools,
675		},
676
677		AgentTask: {
678			ID:           AgentCoder,
679			Name:         "Task",
680			Description:  "An agent that helps with searching for context and finding implementation details.",
681			Model:        SelectedModelTypeLarge,
682			ContextPaths: c.Options.ContextPaths,
683			AllowedTools: resolveReadOnlyTools(allowedTools),
684			// NO MCPs or LSPs by default
685			AllowedMCP: map[string][]string{},
686		},
687	}
688	c.Agents = agents
689}
690
691func (c *Config) Resolver() VariableResolver {
692	return c.resolver
693}
694
695func (c *ProviderConfig) TestConnection(resolver VariableResolver) error {
696	testURL := ""
697	headers := make(map[string]string)
698	apiKey, _ := resolver.ResolveValue(c.APIKey)
699	switch c.Type {
700	case catwalk.TypeOpenAI, catwalk.TypeOpenAICompat, catwalk.TypeOpenRouter:
701		baseURL, _ := resolver.ResolveValue(c.BaseURL)
702		if baseURL == "" {
703			baseURL = "https://api.openai.com/v1"
704		}
705		if c.ID == string(catwalk.InferenceProviderOpenRouter) {
706			testURL = baseURL + "/credits"
707		} else {
708			testURL = baseURL + "/models"
709		}
710		headers["Authorization"] = "Bearer " + apiKey
711	case catwalk.TypeAnthropic:
712		baseURL, _ := resolver.ResolveValue(c.BaseURL)
713		if baseURL == "" {
714			baseURL = "https://api.anthropic.com/v1"
715		}
716		testURL = baseURL + "/models"
717		// TODO: replace with const when catwalk is released
718		if c.ID == "kimi-coding" {
719			testURL = baseURL + "/v1/models"
720		}
721		headers["x-api-key"] = apiKey
722		headers["anthropic-version"] = "2023-06-01"
723	case catwalk.TypeGoogle:
724		baseURL, _ := resolver.ResolveValue(c.BaseURL)
725		if baseURL == "" {
726			baseURL = "https://generativelanguage.googleapis.com"
727		}
728		testURL = baseURL + "/v1beta/models?key=" + url.QueryEscape(apiKey)
729	}
730	ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
731	defer cancel()
732	client := &http.Client{}
733	req, err := http.NewRequestWithContext(ctx, "GET", testURL, nil)
734	if err != nil {
735		return fmt.Errorf("failed to create request for provider %s: %w", c.ID, err)
736	}
737	for k, v := range headers {
738		req.Header.Set(k, v)
739	}
740	for k, v := range c.ExtraHeaders {
741		req.Header.Set(k, v)
742	}
743	b, err := client.Do(req)
744	if err != nil {
745		return fmt.Errorf("failed to create request for provider %s: %w", c.ID, err)
746	}
747	if c.ID == string(catwalk.InferenceProviderZAI) {
748		if b.StatusCode == http.StatusUnauthorized {
749			// for z.ai just check if the http response is not 401
750			return fmt.Errorf("failed to connect to provider %s: %s", c.ID, b.Status)
751		}
752	} else {
753		if b.StatusCode != http.StatusOK {
754			return fmt.Errorf("failed to connect to provider %s: %s", c.ID, b.Status)
755		}
756	}
757	_ = b.Body.Close()
758	return nil
759}
760
761func resolveEnvs(envs map[string]string) []string {
762	resolver := NewShellVariableResolver(env.New())
763	for e, v := range envs {
764		var err error
765		envs[e], err = resolver.ResolveValue(v)
766		if err != nil {
767			slog.Error("error resolving environment variable", "error", err, "variable", e, "value", v)
768			continue
769		}
770	}
771
772	res := make([]string, 0, len(envs))
773	for k, v := range envs {
774		res = append(res, fmt.Sprintf("%s=%s", k, v))
775	}
776	return res
777}
778
779func ptrValOr[T any](t *T, el T) T {
780	if t == nil {
781		return el
782	}
783	return *t
784}