config.go

  1package config
  2
  3import (
  4	"context"
  5	"fmt"
  6	"log/slog"
  7	"net/http"
  8	"net/url"
  9	"os"
 10	"slices"
 11	"strings"
 12	"time"
 13
 14	"github.com/charmbracelet/catwalk/pkg/catwalk"
 15	"github.com/charmbracelet/crush/internal/csync"
 16	"github.com/charmbracelet/crush/internal/env"
 17	"github.com/invopop/jsonschema"
 18	"github.com/tidwall/sjson"
 19)
 20
 21const (
 22	appName              = "crush"
 23	defaultDataDirectory = ".crush"
 24)
 25
 26var defaultContextPaths = []string{
 27	".github/copilot-instructions.md",
 28	".cursorrules",
 29	".cursor/rules/",
 30	"CLAUDE.md",
 31	"CLAUDE.local.md",
 32	"GEMINI.md",
 33	"gemini.md",
 34	"crush.md",
 35	"crush.local.md",
 36	"Crush.md",
 37	"Crush.local.md",
 38	"CRUSH.md",
 39	"CRUSH.local.md",
 40	"AGENTS.md",
 41	"agents.md",
 42	"Agents.md",
 43}
 44
 45type SelectedModelType string
 46
 47const (
 48	SelectedModelTypeLarge SelectedModelType = "large"
 49	SelectedModelTypeSmall SelectedModelType = "small"
 50)
 51
 52const (
 53	AgentCoder string = "coder"
 54	AgentTask  string = "task"
 55)
 56
 57type SelectedModel struct {
 58	// The model id as used by the provider API.
 59	// Required.
 60	Model string `json:"model" jsonschema:"required,description=The model ID as used by the provider API,example=gpt-4o"`
 61	// The model provider, same as the key/id used in the providers config.
 62	// Required.
 63	Provider string `json:"provider" jsonschema:"required,description=The model provider ID that matches a key in the providers config,example=openai"`
 64
 65	// Only used by models that use the openai provider and need this set.
 66	ReasoningEffort string `json:"reasoning_effort,omitempty" jsonschema:"description=Reasoning effort level for OpenAI models that support it,enum=low,enum=medium,enum=high"`
 67
 68	// Used by anthropic models that can reason to indicate if the model should think.
 69	Think bool `json:"think,omitempty" jsonschema:"description=Enable thinking mode for Anthropic models that support reasoning"`
 70
 71	// Overrides the default model configuration.
 72	MaxTokens        int64    `json:"max_tokens,omitempty" jsonschema:"description=Maximum number of tokens for model responses,minimum=1,maximum=200000,example=4096"`
 73	Temperature      *float64 `json:"temperature,omitempty" jsonschema:"description=Sampling temperature,minimum=0,maximum=1,example=0.7"`
 74	TopP             *float64 `json:"top_p,omitempty" jsonschema:"description=Top-p (nucleus) sampling parameter,minimum=0,maximum=1,example=0.9"`
 75	TopK             *int64   `json:"top_k,omitempty" jsonschema:"description=Top-k sampling parameter"`
 76	FrequencyPenalty *float64 `json:"frequency_penalty,omitempty" jsonschema:"description=Frequency penalty to reduce repetition"`
 77	PresencePenalty  *float64 `json:"presence_penalty,omitempty" jsonschema:"description=Presence penalty to increase topic diversity"`
 78
 79	// Override provider specific options.
 80	ProviderOptions map[string]any `json:"provider_options,omitempty" jsonschema:"description=Additional provider-specific options for the model"`
 81}
 82
 83type ProviderConfig struct {
 84	// The provider's id.
 85	ID string `json:"id,omitempty" jsonschema:"description=Unique identifier for the provider,example=openai"`
 86	// The provider's name, used for display purposes.
 87	Name string `json:"name,omitempty" jsonschema:"description=Human-readable name for the provider,example=OpenAI"`
 88	// The provider's API endpoint.
 89	BaseURL string `json:"base_url,omitempty" jsonschema:"description=Base URL for the provider's API,format=uri,example=https://api.openai.com/v1"`
 90	// The provider type, e.g. "openai", "anthropic", etc. if empty it defaults to openai.
 91	Type catwalk.Type `json:"type,omitempty" jsonschema:"description=Provider type that determines the API format,enum=openai,enum=anthropic,enum=gemini,enum=azure,enum=vertexai,default=openai"`
 92	// The provider's API key.
 93	APIKey string `json:"api_key,omitempty" jsonschema:"description=API key for authentication with the provider,example=$OPENAI_API_KEY"`
 94	// Marks the provider as disabled.
 95	Disable bool `json:"disable,omitempty" jsonschema:"description=Whether this provider is disabled,default=false"`
 96
 97	// Custom system prompt prefix.
 98	SystemPromptPrefix string `json:"system_prompt_prefix,omitempty" jsonschema:"description=Custom prefix to add to system prompts for this provider"`
 99
100	// Extra headers to send with each request to the provider.
101	ExtraHeaders map[string]string `json:"extra_headers,omitempty" jsonschema:"description=Additional HTTP headers to send with requests"`
102	// Extra body
103	ExtraBody map[string]any `json:"extra_body,omitempty" jsonschema:"description=Additional fields to include in request bodies, only works with openai-compatible providers"`
104
105	ProviderOptions map[string]any `json:"provider_options,omitempty" jsonschema:"description=Additional provider-specific options for this provider"`
106
107	// Used to pass extra parameters to the provider.
108	ExtraParams map[string]string `json:"-"`
109
110	// The provider models
111	Models []catwalk.Model `json:"models,omitempty" jsonschema:"description=List of models available from this provider"`
112}
113
114type MCPType string
115
116const (
117	MCPStdio MCPType = "stdio"
118	MCPSSE   MCPType = "sse"
119	MCPHttp  MCPType = "http"
120)
121
122type MCPConfig struct {
123	Command  string            `json:"command,omitempty" jsonschema:"description=Command to execute for stdio MCP servers,example=npx"`
124	Env      map[string]string `json:"env,omitempty" jsonschema:"description=Environment variables to set for the MCP server"`
125	Args     []string          `json:"args,omitempty" jsonschema:"description=Arguments to pass to the MCP server command"`
126	Type     MCPType           `json:"type" jsonschema:"required,description=Type of MCP connection,enum=stdio,enum=sse,enum=http,default=stdio"`
127	URL      string            `json:"url,omitempty" jsonschema:"description=URL for HTTP or SSE MCP servers,format=uri,example=http://localhost:3000/mcp"`
128	Disabled bool              `json:"disabled,omitempty" jsonschema:"description=Whether this MCP server is disabled,default=false"`
129	Timeout  int               `json:"timeout,omitempty" jsonschema:"description=Timeout in seconds for MCP server connections,default=15,example=30,example=60,example=120"`
130
131	// TODO: maybe make it possible to get the value from the env
132	Headers map[string]string `json:"headers,omitempty" jsonschema:"description=HTTP headers for HTTP/SSE MCP servers"`
133}
134
135type LSPConfig struct {
136	Disabled    bool              `json:"disabled,omitempty" jsonschema:"description=Whether this LSP server is disabled,default=false"`
137	Command     string            `json:"command,omitempty" jsonschema:"required,description=Command to execute for the LSP server,example=gopls"`
138	Args        []string          `json:"args,omitempty" jsonschema:"description=Arguments to pass to the LSP server command"`
139	Env         map[string]string `json:"env,omitempty" jsonschema:"description=Environment variables to set to the LSP server command"`
140	FileTypes   []string          `json:"filetypes,omitempty" jsonschema:"description=File types this LSP server handles,example=go,example=mod,example=rs,example=c,example=js,example=ts"`
141	RootMarkers []string          `json:"root_markers,omitempty" jsonschema:"description=Files or directories that indicate the project root,example=go.mod,example=package.json,example=Cargo.toml"`
142	InitOptions map[string]any    `json:"init_options,omitempty" jsonschema:"description=Initialization options passed to the LSP server during initialize request"`
143	Options     map[string]any    `json:"options,omitempty" jsonschema:"description=LSP server-specific settings passed during initialization"`
144}
145
146type TUIOptions struct {
147	CompactMode bool   `json:"compact_mode,omitempty" jsonschema:"description=Enable compact mode for the TUI interface,default=false"`
148	DiffMode    string `json:"diff_mode,omitempty" jsonschema:"description=Diff mode for the TUI interface,enum=unified,enum=split"`
149	// Here we can add themes later or any TUI related options
150	//
151
152	Completions Completions `json:"completions,omitzero" jsonschema:"description=Completions UI options"`
153}
154
155// Completions defines options for the completions UI.
156type Completions struct {
157	MaxDepth *int `json:"max_depth,omitempty" jsonschema:"description=Maximum depth for the ls tool,default=0,example=10"`
158	MaxItems *int `json:"max_items,omitempty" jsonschema:"description=Maximum number of items to return for the ls tool,default=1000,example=100"`
159}
160
161func (c Completions) Limits() (depth, items int) {
162	return ptrValOr(c.MaxDepth, 0), ptrValOr(c.MaxItems, 0)
163}
164
165type Permissions struct {
166	AllowedTools []string `json:"allowed_tools,omitempty" jsonschema:"description=List of tools that don't require permission prompts,example=bash,example=view"` // Tools that don't require permission prompts
167	SkipRequests bool     `json:"-"`                                                                                                                              // Automatically accept all permissions (YOLO mode)
168}
169
170type TrailerStyle string
171
172const (
173	TrailerStyleNone         TrailerStyle = "none"
174	TrailerStyleCoAuthoredBy TrailerStyle = "co-authored-by"
175	TrailerStyleAssistedBy   TrailerStyle = "assisted-by"
176)
177
178type Attribution struct {
179	TrailerStyle  TrailerStyle `json:"trailer_style,omitempty" jsonschema:"description=Style of attribution trailer to add to commits,enum=none,enum=co-authored-by,enum=assisted-by,default=co-authored-by"`
180	CoAuthoredBy  *bool        `json:"co_authored_by,omitempty" jsonschema:"description=Deprecated: use trailer_style instead"`
181	GeneratedWith bool         `json:"generated_with,omitempty" jsonschema:"description=Add Generated with Crush line to commit messages and issues and PRs,default=true"`
182}
183
184// JSONSchemaExtend marks the co_authored_by field as deprecated in the schema.
185func (Attribution) JSONSchemaExtend(schema *jsonschema.Schema) {
186	if schema.Properties != nil {
187		if prop, ok := schema.Properties.Get("co_authored_by"); ok {
188			prop.Deprecated = true
189		}
190	}
191}
192
193type Options struct {
194	ContextPaths              []string     `json:"context_paths,omitempty" jsonschema:"description=Paths to files containing context information for the AI,example=.cursorrules,example=CRUSH.md"`
195	TUI                       *TUIOptions  `json:"tui,omitempty" jsonschema:"description=Terminal user interface options"`
196	Debug                     bool         `json:"debug,omitempty" jsonschema:"description=Enable debug logging,default=false"`
197	DebugLSP                  bool         `json:"debug_lsp,omitempty" jsonschema:"description=Enable debug logging for LSP servers,default=false"`
198	DisableAutoSummarize      bool         `json:"disable_auto_summarize,omitempty" jsonschema:"description=Disable automatic conversation summarization,default=false"`
199	DataDirectory             string       `json:"data_directory,omitempty" jsonschema:"description=Directory for storing application data (relative to working directory),default=.crush,example=.crush"` // Relative to the cwd
200	DisabledTools             []string     `json:"disabled_tools" jsonschema:"description=Tools to disable"`
201	DisableProviderAutoUpdate bool         `json:"disable_provider_auto_update,omitempty" jsonschema:"description=Disable providers auto-update,default=false"`
202	Attribution               *Attribution `json:"attribution,omitempty" jsonschema:"description=Attribution settings for generated content"`
203	DisableMetrics            bool         `json:"disable_metrics,omitempty" jsonschema:"description=Disable sending metrics,default=false"`
204}
205
206type MCPs map[string]MCPConfig
207
208type MCP struct {
209	Name string    `json:"name"`
210	MCP  MCPConfig `json:"mcp"`
211}
212
213func (m MCPs) Sorted() []MCP {
214	sorted := make([]MCP, 0, len(m))
215	for k, v := range m {
216		sorted = append(sorted, MCP{
217			Name: k,
218			MCP:  v,
219		})
220	}
221	slices.SortFunc(sorted, func(a, b MCP) int {
222		return strings.Compare(a.Name, b.Name)
223	})
224	return sorted
225}
226
227type LSPs map[string]LSPConfig
228
229type LSP struct {
230	Name string    `json:"name"`
231	LSP  LSPConfig `json:"lsp"`
232}
233
234func (l LSPs) Sorted() []LSP {
235	sorted := make([]LSP, 0, len(l))
236	for k, v := range l {
237		sorted = append(sorted, LSP{
238			Name: k,
239			LSP:  v,
240		})
241	}
242	slices.SortFunc(sorted, func(a, b LSP) int {
243		return strings.Compare(a.Name, b.Name)
244	})
245	return sorted
246}
247
248func (l LSPConfig) ResolvedEnv() []string {
249	return resolveEnvs(l.Env)
250}
251
252func (m MCPConfig) ResolvedEnv() []string {
253	return resolveEnvs(m.Env)
254}
255
256func (m MCPConfig) ResolvedHeaders() map[string]string {
257	resolver := NewShellVariableResolver(env.New())
258	for e, v := range m.Headers {
259		var err error
260		m.Headers[e], err = resolver.ResolveValue(v)
261		if err != nil {
262			slog.Error("error resolving header variable", "error", err, "variable", e, "value", v)
263			continue
264		}
265	}
266	return m.Headers
267}
268
269type Agent struct {
270	ID          string `json:"id,omitempty"`
271	Name        string `json:"name,omitempty"`
272	Description string `json:"description,omitempty"`
273	// This is the id of the system prompt used by the agent
274	Disabled bool `json:"disabled,omitempty"`
275
276	Model SelectedModelType `json:"model" jsonschema:"required,description=The model type to use for this agent,enum=large,enum=small,default=large"`
277
278	// The available tools for the agent
279	//  if this is nil, all tools are available
280	AllowedTools []string `json:"allowed_tools,omitempty"`
281
282	// this tells us which MCPs are available for this agent
283	//  if this is empty all mcps are available
284	//  the string array is the list of tools from the AllowedMCP the agent has available
285	//  if the string array is nil, all tools from the AllowedMCP are available
286	AllowedMCP map[string][]string `json:"allowed_mcp,omitempty"`
287
288	// Overrides the context paths for this agent
289	ContextPaths []string `json:"context_paths,omitempty"`
290}
291
292type Tools struct {
293	Ls ToolLs `json:"ls,omitzero"`
294}
295
296type ToolLs struct {
297	MaxDepth *int `json:"max_depth,omitempty" jsonschema:"description=Maximum depth for the ls tool,default=0,example=10"`
298	MaxItems *int `json:"max_items,omitempty" jsonschema:"description=Maximum number of items to return for the ls tool,default=1000,example=100"`
299}
300
301func (t ToolLs) Limits() (depth, items int) {
302	return ptrValOr(t.MaxDepth, 0), ptrValOr(t.MaxItems, 0)
303}
304
305// Config holds the configuration for crush.
306type Config struct {
307	Schema string `json:"$schema,omitempty"`
308
309	// We currently only support large/small as values here.
310	Models map[SelectedModelType]SelectedModel `json:"models,omitempty" jsonschema:"description=Model configurations for different model types,example={\"large\":{\"model\":\"gpt-4o\",\"provider\":\"openai\"}}"`
311	// Recently used models stored in the data directory config.
312	RecentModels map[SelectedModelType][]SelectedModel `json:"recent_models,omitempty" jsonschema:"description=Recently used models sorted by most recent first"`
313
314	// The providers that are configured
315	Providers *csync.Map[string, ProviderConfig] `json:"providers,omitempty" jsonschema:"description=AI provider configurations"`
316
317	MCP MCPs `json:"mcp,omitempty" jsonschema:"description=Model Context Protocol server configurations"`
318
319	LSP LSPs `json:"lsp,omitempty" jsonschema:"description=Language Server Protocol configurations"`
320
321	Options *Options `json:"options,omitempty" jsonschema:"description=General application options"`
322
323	Permissions *Permissions `json:"permissions,omitempty" jsonschema:"description=Permission settings for tool usage"`
324
325	Tools Tools `json:"tools,omitzero" jsonschema:"description=Tool configurations"`
326
327	Agents map[string]Agent `json:"-"`
328
329	// Internal
330	workingDir string `json:"-"`
331	// TODO: find a better way to do this this should probably not be part of the config
332	resolver       VariableResolver
333	dataConfigDir  string             `json:"-"`
334	knownProviders []catwalk.Provider `json:"-"`
335}
336
337func (c *Config) WorkingDir() string {
338	return c.workingDir
339}
340
341func (c *Config) EnabledProviders() []ProviderConfig {
342	var enabled []ProviderConfig
343	for p := range c.Providers.Seq() {
344		if !p.Disable {
345			enabled = append(enabled, p)
346		}
347	}
348	return enabled
349}
350
351// IsConfigured  return true if at least one provider is configured
352func (c *Config) IsConfigured() bool {
353	return len(c.EnabledProviders()) > 0
354}
355
356func (c *Config) GetModel(provider, model string) *catwalk.Model {
357	if providerConfig, ok := c.Providers.Get(provider); ok {
358		for _, m := range providerConfig.Models {
359			if m.ID == model {
360				return &m
361			}
362		}
363	}
364	return nil
365}
366
367func (c *Config) GetProviderForModel(modelType SelectedModelType) *ProviderConfig {
368	model, ok := c.Models[modelType]
369	if !ok {
370		return nil
371	}
372	if providerConfig, ok := c.Providers.Get(model.Provider); ok {
373		return &providerConfig
374	}
375	return nil
376}
377
378func (c *Config) GetModelByType(modelType SelectedModelType) *catwalk.Model {
379	model, ok := c.Models[modelType]
380	if !ok {
381		return nil
382	}
383	return c.GetModel(model.Provider, model.Model)
384}
385
386func (c *Config) LargeModel() *catwalk.Model {
387	model, ok := c.Models[SelectedModelTypeLarge]
388	if !ok {
389		return nil
390	}
391	return c.GetModel(model.Provider, model.Model)
392}
393
394func (c *Config) SmallModel() *catwalk.Model {
395	model, ok := c.Models[SelectedModelTypeSmall]
396	if !ok {
397		return nil
398	}
399	return c.GetModel(model.Provider, model.Model)
400}
401
402func (c *Config) SetCompactMode(enabled bool) error {
403	if c.Options == nil {
404		c.Options = &Options{}
405	}
406	c.Options.TUI.CompactMode = enabled
407	return c.SetConfigField("options.tui.compact_mode", enabled)
408}
409
410func (c *Config) Resolve(key string) (string, error) {
411	if c.resolver == nil {
412		return "", fmt.Errorf("no variable resolver configured")
413	}
414	return c.resolver.ResolveValue(key)
415}
416
417func (c *Config) UpdatePreferredModel(modelType SelectedModelType, model SelectedModel) error {
418	c.Models[modelType] = model
419	if err := c.SetConfigField(fmt.Sprintf("models.%s", modelType), model); err != nil {
420		return fmt.Errorf("failed to update preferred model: %w", err)
421	}
422	if err := c.recordRecentModel(modelType, model); err != nil {
423		return err
424	}
425	return nil
426}
427
428func (c *Config) SetConfigField(key string, value any) error {
429	// read the data
430	data, err := os.ReadFile(c.dataConfigDir)
431	if err != nil {
432		if os.IsNotExist(err) {
433			data = []byte("{}")
434		} else {
435			return fmt.Errorf("failed to read config file: %w", err)
436		}
437	}
438
439	newValue, err := sjson.Set(string(data), key, value)
440	if err != nil {
441		return fmt.Errorf("failed to set config field %s: %w", key, err)
442	}
443	if err := os.WriteFile(c.dataConfigDir, []byte(newValue), 0o600); err != nil {
444		return fmt.Errorf("failed to write config file: %w", err)
445	}
446	return nil
447}
448
449func (c *Config) SetProviderAPIKey(providerID, apiKey string) error {
450	// First save to the config file
451	err := c.SetConfigField("providers."+providerID+".api_key", apiKey)
452	if err != nil {
453		return fmt.Errorf("failed to save API key to config file: %w", err)
454	}
455
456	providerConfig, exists := c.Providers.Get(providerID)
457	if exists {
458		providerConfig.APIKey = apiKey
459		c.Providers.Set(providerID, providerConfig)
460		return nil
461	}
462
463	var foundProvider *catwalk.Provider
464	for _, p := range c.knownProviders {
465		if string(p.ID) == providerID {
466			foundProvider = &p
467			break
468		}
469	}
470
471	if foundProvider != nil {
472		// Create new provider config based on known provider
473		providerConfig = ProviderConfig{
474			ID:           providerID,
475			Name:         foundProvider.Name,
476			BaseURL:      foundProvider.APIEndpoint,
477			Type:         foundProvider.Type,
478			APIKey:       apiKey,
479			Disable:      false,
480			ExtraHeaders: make(map[string]string),
481			ExtraParams:  make(map[string]string),
482			Models:       foundProvider.Models,
483		}
484	} else {
485		return fmt.Errorf("provider with ID %s not found in known providers", providerID)
486	}
487	// Store the updated provider config
488	c.Providers.Set(providerID, providerConfig)
489	return nil
490}
491
492const maxRecentModelsPerType = 5
493
494func (c *Config) recordRecentModel(modelType SelectedModelType, model SelectedModel) error {
495	if model.Provider == "" || model.Model == "" {
496		return nil
497	}
498
499	if c.RecentModels == nil {
500		c.RecentModels = make(map[SelectedModelType][]SelectedModel)
501	}
502
503	eq := func(a, b SelectedModel) bool {
504		return a.Provider == b.Provider && a.Model == b.Model
505	}
506
507	entry := SelectedModel{
508		Provider: model.Provider,
509		Model:    model.Model,
510	}
511
512	current := c.RecentModels[modelType]
513	withoutCurrent := slices.DeleteFunc(slices.Clone(current), func(existing SelectedModel) bool {
514		return eq(existing, entry)
515	})
516
517	updated := append([]SelectedModel{entry}, withoutCurrent...)
518	if len(updated) > maxRecentModelsPerType {
519		updated = updated[:maxRecentModelsPerType]
520	}
521
522	if slices.EqualFunc(current, updated, eq) {
523		return nil
524	}
525
526	c.RecentModels[modelType] = updated
527
528	if err := c.SetConfigField(fmt.Sprintf("recent_models.%s", modelType), updated); err != nil {
529		return fmt.Errorf("failed to persist recent models: %w", err)
530	}
531
532	return nil
533}
534
535func allToolNames() []string {
536	return []string{
537		"agent",
538		"bash",
539		"job_output",
540		"job_kill",
541		"download",
542		"edit",
543		"multiedit",
544		"lsp_diagnostics",
545		"lsp_references",
546		"fetch",
547		"agentic_fetch",
548		"glob",
549		"grep",
550		"ls",
551		"sourcegraph",
552		"view",
553		"write",
554	}
555}
556
557func resolveAllowedTools(allTools []string, disabledTools []string) []string {
558	if disabledTools == nil {
559		return allTools
560	}
561	// filter out disabled tools (exclude mode)
562	return filterSlice(allTools, disabledTools, false)
563}
564
565func resolveReadOnlyTools(tools []string) []string {
566	readOnlyTools := []string{"glob", "grep", "ls", "sourcegraph", "view"}
567	// filter to only include tools that are in allowedtools (include mode)
568	return filterSlice(tools, readOnlyTools, true)
569}
570
571func filterSlice(data []string, mask []string, include bool) []string {
572	filtered := []string{}
573	for _, s := range data {
574		// if include is true, we include items that ARE in the mask
575		// if include is false, we include items that are NOT in the mask
576		if include == slices.Contains(mask, s) {
577			filtered = append(filtered, s)
578		}
579	}
580	return filtered
581}
582
583func (c *Config) SetupAgents() {
584	allowedTools := resolveAllowedTools(allToolNames(), c.Options.DisabledTools)
585
586	agents := map[string]Agent{
587		AgentCoder: {
588			ID:           AgentCoder,
589			Name:         "Coder",
590			Description:  "An agent that helps with executing coding tasks.",
591			Model:        SelectedModelTypeLarge,
592			ContextPaths: c.Options.ContextPaths,
593			AllowedTools: allowedTools,
594		},
595
596		AgentTask: {
597			ID:           AgentCoder,
598			Name:         "Task",
599			Description:  "An agent that helps with searching for context and finding implementation details.",
600			Model:        SelectedModelTypeLarge,
601			ContextPaths: c.Options.ContextPaths,
602			AllowedTools: resolveReadOnlyTools(allowedTools),
603			// NO MCPs or LSPs by default
604			AllowedMCP: map[string][]string{},
605		},
606	}
607	c.Agents = agents
608}
609
610func (c *Config) Resolver() VariableResolver {
611	return c.resolver
612}
613
614func (c *ProviderConfig) TestConnection(resolver VariableResolver) error {
615	testURL := ""
616	headers := make(map[string]string)
617	apiKey, _ := resolver.ResolveValue(c.APIKey)
618	switch c.Type {
619	case catwalk.TypeOpenAI, catwalk.TypeOpenAICompat, catwalk.TypeOpenRouter:
620		baseURL, _ := resolver.ResolveValue(c.BaseURL)
621		if baseURL == "" {
622			baseURL = "https://api.openai.com/v1"
623		}
624		if c.ID == string(catwalk.InferenceProviderOpenRouter) {
625			testURL = baseURL + "/credits"
626		} else {
627			testURL = baseURL + "/models"
628		}
629		headers["Authorization"] = "Bearer " + apiKey
630	case catwalk.TypeAnthropic:
631		baseURL, _ := resolver.ResolveValue(c.BaseURL)
632		if baseURL == "" {
633			baseURL = "https://api.anthropic.com/v1"
634		}
635		testURL = baseURL + "/models"
636		headers["x-api-key"] = apiKey
637		headers["anthropic-version"] = "2023-06-01"
638	case catwalk.TypeGoogle:
639		baseURL, _ := resolver.ResolveValue(c.BaseURL)
640		if baseURL == "" {
641			baseURL = "https://generativelanguage.googleapis.com"
642		}
643		testURL = baseURL + "/v1beta/models?key=" + url.QueryEscape(apiKey)
644	}
645	ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
646	defer cancel()
647	client := &http.Client{}
648	req, err := http.NewRequestWithContext(ctx, "GET", testURL, nil)
649	if err != nil {
650		return fmt.Errorf("failed to create request for provider %s: %w", c.ID, err)
651	}
652	for k, v := range headers {
653		req.Header.Set(k, v)
654	}
655	for k, v := range c.ExtraHeaders {
656		req.Header.Set(k, v)
657	}
658	b, err := client.Do(req)
659	if err != nil {
660		return fmt.Errorf("failed to create request for provider %s: %w", c.ID, err)
661	}
662	if c.ID == string(catwalk.InferenceProviderZAI) {
663		if b.StatusCode == http.StatusUnauthorized {
664			// for z.ai just check if the http response is not 401
665			return fmt.Errorf("failed to connect to provider %s: %s", c.ID, b.Status)
666		}
667	} else {
668		if b.StatusCode != http.StatusOK {
669			return fmt.Errorf("failed to connect to provider %s: %s", c.ID, b.Status)
670		}
671	}
672	_ = b.Body.Close()
673	return nil
674}
675
676func resolveEnvs(envs map[string]string) []string {
677	resolver := NewShellVariableResolver(env.New())
678	for e, v := range envs {
679		var err error
680		envs[e], err = resolver.ResolveValue(v)
681		if err != nil {
682			slog.Error("error resolving environment variable", "error", err, "variable", e, "value", v)
683			continue
684		}
685	}
686
687	res := make([]string, 0, len(envs))
688	for k, v := range envs {
689		res = append(res, fmt.Sprintf("%s=%s", k, v))
690	}
691	return res
692}
693
694func ptrValOr[T any](t *T, el T) T {
695	if t == nil {
696		return el
697	}
698	return *t
699}