config.go

  1package config
  2
  3import (
  4	"context"
  5	"fmt"
  6	"log/slog"
  7	"net/http"
  8	"net/url"
  9	"os"
 10	"slices"
 11	"strings"
 12	"time"
 13
 14	"git.secluded.site/crush/internal/csync"
 15	"git.secluded.site/crush/internal/env"
 16	"github.com/charmbracelet/catwalk/pkg/catwalk"
 17	"github.com/invopop/jsonschema"
 18	"github.com/tidwall/sjson"
 19)
 20
 21const (
 22	appName              = "crush"
 23	defaultDataDirectory = ".crush"
 24	defaultInitializeAs  = "AGENTS.md"
 25)
 26
 27var defaultContextPaths = []string{
 28	".github/copilot-instructions.md",
 29	".cursorrules",
 30	".cursor/rules/",
 31	"CLAUDE.md",
 32	"CLAUDE.local.md",
 33	"GEMINI.md",
 34	"gemini.md",
 35	"crush.md",
 36	"crush.local.md",
 37	"Crush.md",
 38	"Crush.local.md",
 39	"CRUSH.md",
 40	"CRUSH.local.md",
 41	"AGENTS.md",
 42	"agents.md",
 43	"Agents.md",
 44}
 45
 46type SelectedModelType string
 47
 48const (
 49	SelectedModelTypeLarge SelectedModelType = "large"
 50	SelectedModelTypeSmall SelectedModelType = "small"
 51)
 52
 53const (
 54	AgentCoder string = "coder"
 55	AgentTask  string = "task"
 56)
 57
 58type SelectedModel struct {
 59	// The model id as used by the provider API.
 60	// Required.
 61	Model string `json:"model" jsonschema:"required,description=The model ID as used by the provider API,example=gpt-4o"`
 62	// The model provider, same as the key/id used in the providers config.
 63	// Required.
 64	Provider string `json:"provider" jsonschema:"required,description=The model provider ID that matches a key in the providers config,example=openai"`
 65
 66	// Only used by models that use the openai provider and need this set.
 67	ReasoningEffort string `json:"reasoning_effort,omitempty" jsonschema:"description=Reasoning effort level for OpenAI models that support it,enum=low,enum=medium,enum=high"`
 68
 69	// Used by anthropic models that can reason to indicate if the model should think.
 70	Think bool `json:"think,omitempty" jsonschema:"description=Enable thinking mode for Anthropic models that support reasoning"`
 71
 72	// Overrides the default model configuration.
 73	MaxTokens        int64    `json:"max_tokens,omitempty" jsonschema:"description=Maximum number of tokens for model responses,minimum=1,maximum=200000,example=4096"`
 74	Temperature      *float64 `json:"temperature,omitempty" jsonschema:"description=Sampling temperature,minimum=0,maximum=1,example=0.7"`
 75	TopP             *float64 `json:"top_p,omitempty" jsonschema:"description=Top-p (nucleus) sampling parameter,minimum=0,maximum=1,example=0.9"`
 76	TopK             *int64   `json:"top_k,omitempty" jsonschema:"description=Top-k sampling parameter"`
 77	FrequencyPenalty *float64 `json:"frequency_penalty,omitempty" jsonschema:"description=Frequency penalty to reduce repetition"`
 78	PresencePenalty  *float64 `json:"presence_penalty,omitempty" jsonschema:"description=Presence penalty to increase topic diversity"`
 79
 80	// Override provider specific options.
 81	ProviderOptions map[string]any `json:"provider_options,omitempty" jsonschema:"description=Additional provider-specific options for the model"`
 82}
 83
 84type ProviderConfig struct {
 85	// The provider's id.
 86	ID string `json:"id,omitempty" jsonschema:"description=Unique identifier for the provider,example=openai"`
 87	// The provider's name, used for display purposes.
 88	Name string `json:"name,omitempty" jsonschema:"description=Human-readable name for the provider,example=OpenAI"`
 89	// The provider's API endpoint.
 90	BaseURL string `json:"base_url,omitempty" jsonschema:"description=Base URL for the provider's API,format=uri,example=https://api.openai.com/v1"`
 91	// The provider type, e.g. "openai", "anthropic", etc. if empty it defaults to openai.
 92	Type catwalk.Type `json:"type,omitempty" jsonschema:"description=Provider type that determines the API format,enum=openai,enum=openai-compat,enum=anthropic,enum=gemini,enum=azure,enum=vertexai,default=openai"`
 93	// The provider's API key.
 94	APIKey string `json:"api_key,omitempty" jsonschema:"description=API key for authentication with the provider,example=$OPENAI_API_KEY"`
 95	// Marks the provider as disabled.
 96	Disable bool `json:"disable,omitempty" jsonschema:"description=Whether this provider is disabled,default=false"`
 97
 98	// Custom system prompt prefix.
 99	SystemPromptPrefix string `json:"system_prompt_prefix,omitempty" jsonschema:"description=Custom prefix to add to system prompts for this provider"`
100
101	// Extra headers to send with each request to the provider.
102	ExtraHeaders map[string]string `json:"extra_headers,omitempty" jsonschema:"description=Additional HTTP headers to send with requests"`
103	// Extra body
104	ExtraBody map[string]any `json:"extra_body,omitempty" jsonschema:"description=Additional fields to include in request bodies, only works with openai-compatible providers"`
105
106	ProviderOptions map[string]any `json:"provider_options,omitempty" jsonschema:"description=Additional provider-specific options for this provider"`
107
108	// Used to pass extra parameters to the provider.
109	ExtraParams map[string]string `json:"-"`
110
111	// The provider models
112	Models []catwalk.Model `json:"models,omitempty" jsonschema:"description=List of models available from this provider"`
113}
114
115type MCPType string
116
117const (
118	MCPStdio MCPType = "stdio"
119	MCPSSE   MCPType = "sse"
120	MCPHttp  MCPType = "http"
121)
122
123type MCPConfig struct {
124	Command  string            `json:"command,omitempty" jsonschema:"description=Command to execute for stdio MCP servers,example=npx"`
125	Env      map[string]string `json:"env,omitempty" jsonschema:"description=Environment variables to set for the MCP server"`
126	Args     []string          `json:"args,omitempty" jsonschema:"description=Arguments to pass to the MCP server command"`
127	Type     MCPType           `json:"type" jsonschema:"required,description=Type of MCP connection,enum=stdio,enum=sse,enum=http,default=stdio"`
128	URL      string            `json:"url,omitempty" jsonschema:"description=URL for HTTP or SSE MCP servers,format=uri,example=http://localhost:3000/mcp"`
129	Disabled bool              `json:"disabled,omitempty" jsonschema:"description=Whether this MCP server is disabled,default=false"`
130	Timeout  int               `json:"timeout,omitempty" jsonschema:"description=Timeout in seconds for MCP server connections,default=15,example=30,example=60,example=120"`
131
132	// TODO: maybe make it possible to get the value from the env
133	Headers map[string]string `json:"headers,omitempty" jsonschema:"description=HTTP headers for HTTP/SSE MCP servers"`
134}
135
136type LSPConfig struct {
137	Disabled    bool              `json:"disabled,omitempty" jsonschema:"description=Whether this LSP server is disabled,default=false"`
138	Command     string            `json:"command,omitempty" jsonschema:"required,description=Command to execute for the LSP server,example=gopls"`
139	Args        []string          `json:"args,omitempty" jsonschema:"description=Arguments to pass to the LSP server command"`
140	Env         map[string]string `json:"env,omitempty" jsonschema:"description=Environment variables to set to the LSP server command"`
141	FileTypes   []string          `json:"filetypes,omitempty" jsonschema:"description=File types this LSP server handles,example=go,example=mod,example=rs,example=c,example=js,example=ts"`
142	RootMarkers []string          `json:"root_markers,omitempty" jsonschema:"description=Files or directories that indicate the project root,example=go.mod,example=package.json,example=Cargo.toml"`
143	InitOptions map[string]any    `json:"init_options,omitempty" jsonschema:"description=Initialization options passed to the LSP server during initialize request"`
144	Options     map[string]any    `json:"options,omitempty" jsonschema:"description=LSP server-specific settings passed during initialization"`
145}
146
147type TUIOptions struct {
148	CompactMode bool   `json:"compact_mode,omitempty" jsonschema:"description=Enable compact mode for the TUI interface,default=false"`
149	DiffMode    string `json:"diff_mode,omitempty" jsonschema:"description=Diff mode for the TUI interface,enum=unified,enum=split"`
150	// Here we can add themes later or any TUI related options
151	//
152
153	Completions Completions `json:"completions,omitzero" jsonschema:"description=Completions UI options"`
154}
155
156// Completions defines options for the completions UI.
157type Completions struct {
158	MaxDepth *int `json:"max_depth,omitempty" jsonschema:"description=Maximum depth for the ls tool,default=0,example=10"`
159	MaxItems *int `json:"max_items,omitempty" jsonschema:"description=Maximum number of items to return for the ls tool,default=1000,example=100"`
160}
161
162func (c Completions) Limits() (depth, items int) {
163	return ptrValOr(c.MaxDepth, 0), ptrValOr(c.MaxItems, 0)
164}
165
166type Permissions struct {
167	AllowedTools []string `json:"allowed_tools,omitempty" jsonschema:"description=List of tools that don't require permission prompts,example=bash,example=view"` // Tools that don't require permission prompts
168	SkipRequests bool     `json:"-"`                                                                                                                              // Automatically accept all permissions (YOLO mode)
169}
170
171type TrailerStyle string
172
173const (
174	TrailerStyleNone         TrailerStyle = "none"
175	TrailerStyleCoAuthoredBy TrailerStyle = "co-authored-by"
176	TrailerStyleAssistedBy   TrailerStyle = "assisted-by"
177)
178
179type Attribution struct {
180	TrailerStyle  TrailerStyle `json:"trailer_style,omitempty" jsonschema:"description=Style of attribution trailer to add to commits,enum=none,enum=co-authored-by,enum=assisted-by,default=assisted-by"`
181	CoAuthoredBy  *bool        `json:"co_authored_by,omitempty" jsonschema:"description=Deprecated: use trailer_style instead"`
182	GeneratedWith bool         `json:"generated_with,omitempty" jsonschema:"description=Add Generated with Crush line to commit messages and issues and PRs,default=true"`
183}
184
185// JSONSchemaExtend marks the co_authored_by field as deprecated in the schema.
186func (Attribution) JSONSchemaExtend(schema *jsonschema.Schema) {
187	if schema.Properties != nil {
188		if prop, ok := schema.Properties.Get("co_authored_by"); ok {
189			prop.Deprecated = true
190		}
191	}
192}
193
194type Options struct {
195	ContextPaths              []string     `json:"context_paths,omitempty" jsonschema:"description=Paths to files containing context information for the AI,example=.cursorrules,example=CRUSH.md"`
196	MemoryPaths               []string     `json:"memory_paths,omitempty" jsonschema:"description=Paths to files containing memory information for the AI,default=~/.config/crush/CRUSH.md,default=~/.config/AGENTS.md"`
197	TUI                       *TUIOptions  `json:"tui,omitempty" jsonschema:"description=Terminal user interface options"`
198	Debug                     bool         `json:"debug,omitempty" jsonschema:"description=Enable debug logging,default=false"`
199	DebugLSP                  bool         `json:"debug_lsp,omitempty" jsonschema:"description=Enable debug logging for LSP servers,default=false"`
200	DisableAutoSummarize      bool         `json:"disable_auto_summarize,omitempty" jsonschema:"description=Disable automatic conversation summarization,default=false"`
201	DataDirectory             string       `json:"data_directory,omitempty" jsonschema:"description=Directory for storing application data (relative to working directory),default=.crush,example=.crush"` // Relative to the cwd
202	DisabledTools             []string     `json:"disabled_tools" jsonschema:"description=Tools to disable"`
203	DisableProviderAutoUpdate bool         `json:"disable_provider_auto_update,omitempty" jsonschema:"description=Disable providers auto-update,default=false"`
204	Attribution               *Attribution `json:"attribution,omitempty" jsonschema:"description=Attribution settings for generated content"`
205	DisableMetrics            bool         `json:"disable_metrics,omitempty" jsonschema:"description=Disable sending metrics,default=false"`
206	InitializeAs              string       `json:"initialize_as,omitempty" jsonschema:"description=Name of the context file to create/update during project initialization,default=AGENTS.md,example=AGENTS.md,example=CRUSH.md,example=CLAUDE.md,example=docs/LLMs.md"`
207	DisableNotifications      bool         `json:"disable_notifications,omitempty" jsonschema:"description=Disable desktop notifications,default=false"`
208}
209
210type MCPs map[string]MCPConfig
211
212type MCP struct {
213	Name string    `json:"name"`
214	MCP  MCPConfig `json:"mcp"`
215}
216
217func (m MCPs) Sorted() []MCP {
218	sorted := make([]MCP, 0, len(m))
219	for k, v := range m {
220		sorted = append(sorted, MCP{
221			Name: k,
222			MCP:  v,
223		})
224	}
225	slices.SortFunc(sorted, func(a, b MCP) int {
226		return strings.Compare(a.Name, b.Name)
227	})
228	return sorted
229}
230
231type LSPs map[string]LSPConfig
232
233type LSP struct {
234	Name string    `json:"name"`
235	LSP  LSPConfig `json:"lsp"`
236}
237
238func (l LSPs) Sorted() []LSP {
239	sorted := make([]LSP, 0, len(l))
240	for k, v := range l {
241		sorted = append(sorted, LSP{
242			Name: k,
243			LSP:  v,
244		})
245	}
246	slices.SortFunc(sorted, func(a, b LSP) int {
247		return strings.Compare(a.Name, b.Name)
248	})
249	return sorted
250}
251
252func (l LSPConfig) ResolvedEnv() []string {
253	return resolveEnvs(l.Env)
254}
255
256func (m MCPConfig) ResolvedEnv() []string {
257	return resolveEnvs(m.Env)
258}
259
260func (m MCPConfig) ResolvedHeaders() map[string]string {
261	resolver := NewShellVariableResolver(env.New())
262	for e, v := range m.Headers {
263		var err error
264		m.Headers[e], err = resolver.ResolveValue(v)
265		if err != nil {
266			slog.Error("error resolving header variable", "error", err, "variable", e, "value", v)
267			continue
268		}
269	}
270	return m.Headers
271}
272
273type Agent struct {
274	ID          string `json:"id,omitempty"`
275	Name        string `json:"name,omitempty"`
276	Description string `json:"description,omitempty"`
277	// This is the id of the system prompt used by the agent
278	Disabled bool `json:"disabled,omitempty"`
279
280	Model SelectedModelType `json:"model" jsonschema:"required,description=The model type to use for this agent,enum=large,enum=small,default=large"`
281
282	// The available tools for the agent
283	//  if this is nil, all tools are available
284	AllowedTools []string `json:"allowed_tools,omitempty"`
285
286	// this tells us which MCPs are available for this agent
287	//  if this is empty all mcps are available
288	//  the string array is the list of tools from the AllowedMCP the agent has available
289	//  if the string array is nil, all tools from the AllowedMCP are available
290	AllowedMCP map[string][]string `json:"allowed_mcp,omitempty"`
291
292	// Overrides the context paths for this agent
293	ContextPaths []string `json:"context_paths,omitempty"`
294}
295
296type Tools struct {
297	Ls ToolLs `json:"ls,omitzero"`
298}
299
300type ToolLs struct {
301	MaxDepth *int `json:"max_depth,omitempty" jsonschema:"description=Maximum depth for the ls tool,default=0,example=10"`
302	MaxItems *int `json:"max_items,omitempty" jsonschema:"description=Maximum number of items to return for the ls tool,default=1000,example=100"`
303}
304
305func (t ToolLs) Limits() (depth, items int) {
306	return ptrValOr(t.MaxDepth, 0), ptrValOr(t.MaxItems, 0)
307}
308
309// Config holds the configuration for crush.
310type Config struct {
311	Schema string `json:"$schema,omitempty"`
312
313	// We currently only support large/small as values here.
314	Models map[SelectedModelType]SelectedModel `json:"models,omitempty" jsonschema:"description=Model configurations for different model types,example={\"large\":{\"model\":\"gpt-4o\",\"provider\":\"openai\"}}"`
315	// Recently used models stored in the data directory config.
316	RecentModels map[SelectedModelType][]SelectedModel `json:"recent_models,omitempty" jsonschema:"description=Recently used models sorted by most recent first"`
317
318	// The providers that are configured
319	Providers *csync.Map[string, ProviderConfig] `json:"providers,omitempty" jsonschema:"description=AI provider configurations"`
320
321	MCP MCPs `json:"mcp,omitempty" jsonschema:"description=Model Context Protocol server configurations"`
322
323	LSP LSPs `json:"lsp,omitempty" jsonschema:"description=Language Server Protocol configurations"`
324
325	Options *Options `json:"options,omitempty" jsonschema:"description=General application options"`
326
327	Permissions *Permissions `json:"permissions,omitempty" jsonschema:"description=Permission settings for tool usage"`
328
329	Tools Tools `json:"tools,omitzero" jsonschema:"description=Tool configurations"`
330
331	Agents map[string]Agent `json:"-"`
332
333	// Internal
334	workingDir string `json:"-"`
335	// TODO: find a better way to do this this should probably not be part of the config
336	resolver       VariableResolver
337	dataConfigDir  string             `json:"-"`
338	knownProviders []catwalk.Provider `json:"-"`
339}
340
341func (c *Config) WorkingDir() string {
342	return c.workingDir
343}
344
345func (c *Config) EnabledProviders() []ProviderConfig {
346	var enabled []ProviderConfig
347	for p := range c.Providers.Seq() {
348		if !p.Disable {
349			enabled = append(enabled, p)
350		}
351	}
352	return enabled
353}
354
355// IsConfigured  return true if at least one provider is configured
356func (c *Config) IsConfigured() bool {
357	return len(c.EnabledProviders()) > 0
358}
359
360func (c *Config) GetModel(provider, model string) *catwalk.Model {
361	if providerConfig, ok := c.Providers.Get(provider); ok {
362		for _, m := range providerConfig.Models {
363			if m.ID == model {
364				return &m
365			}
366		}
367	}
368	return nil
369}
370
371func (c *Config) GetProviderForModel(modelType SelectedModelType) *ProviderConfig {
372	model, ok := c.Models[modelType]
373	if !ok {
374		return nil
375	}
376	if providerConfig, ok := c.Providers.Get(model.Provider); ok {
377		return &providerConfig
378	}
379	return nil
380}
381
382func (c *Config) GetModelByType(modelType SelectedModelType) *catwalk.Model {
383	model, ok := c.Models[modelType]
384	if !ok {
385		return nil
386	}
387	return c.GetModel(model.Provider, model.Model)
388}
389
390func (c *Config) LargeModel() *catwalk.Model {
391	model, ok := c.Models[SelectedModelTypeLarge]
392	if !ok {
393		return nil
394	}
395	return c.GetModel(model.Provider, model.Model)
396}
397
398func (c *Config) SmallModel() *catwalk.Model {
399	model, ok := c.Models[SelectedModelTypeSmall]
400	if !ok {
401		return nil
402	}
403	return c.GetModel(model.Provider, model.Model)
404}
405
406func (c *Config) SetCompactMode(enabled bool) error {
407	if c.Options == nil {
408		c.Options = &Options{}
409	}
410	c.Options.TUI.CompactMode = enabled
411	return c.SetConfigField("options.tui.compact_mode", enabled)
412}
413
414func (c *Config) Resolve(key string) (string, error) {
415	if c.resolver == nil {
416		return "", fmt.Errorf("no variable resolver configured")
417	}
418	return c.resolver.ResolveValue(key)
419}
420
421func (c *Config) UpdatePreferredModel(modelType SelectedModelType, model SelectedModel) error {
422	c.Models[modelType] = model
423	if err := c.SetConfigField(fmt.Sprintf("models.%s", modelType), model); err != nil {
424		return fmt.Errorf("failed to update preferred model: %w", err)
425	}
426	if err := c.recordRecentModel(modelType, model); err != nil {
427		return err
428	}
429	return nil
430}
431
432func (c *Config) SetConfigField(key string, value any) error {
433	// read the data
434	data, err := os.ReadFile(c.dataConfigDir)
435	if err != nil {
436		if os.IsNotExist(err) {
437			data = []byte("{}")
438		} else {
439			return fmt.Errorf("failed to read config file: %w", err)
440		}
441	}
442
443	newValue, err := sjson.Set(string(data), key, value)
444	if err != nil {
445		return fmt.Errorf("failed to set config field %s: %w", key, err)
446	}
447	if err := os.WriteFile(c.dataConfigDir, []byte(newValue), 0o600); err != nil {
448		return fmt.Errorf("failed to write config file: %w", err)
449	}
450	return nil
451}
452
453func (c *Config) SetProviderAPIKey(providerID, apiKey string) error {
454	// First save to the config file
455	err := c.SetConfigField("providers."+providerID+".api_key", apiKey)
456	if err != nil {
457		return fmt.Errorf("failed to save API key to config file: %w", err)
458	}
459
460	providerConfig, exists := c.Providers.Get(providerID)
461	if exists {
462		providerConfig.APIKey = apiKey
463		c.Providers.Set(providerID, providerConfig)
464		return nil
465	}
466
467	var foundProvider *catwalk.Provider
468	for _, p := range c.knownProviders {
469		if string(p.ID) == providerID {
470			foundProvider = &p
471			break
472		}
473	}
474
475	if foundProvider != nil {
476		// Create new provider config based on known provider
477		providerConfig = ProviderConfig{
478			ID:           providerID,
479			Name:         foundProvider.Name,
480			BaseURL:      foundProvider.APIEndpoint,
481			Type:         foundProvider.Type,
482			APIKey:       apiKey,
483			Disable:      false,
484			ExtraHeaders: make(map[string]string),
485			ExtraParams:  make(map[string]string),
486			Models:       foundProvider.Models,
487		}
488	} else {
489		return fmt.Errorf("provider with ID %s not found in known providers", providerID)
490	}
491	// Store the updated provider config
492	c.Providers.Set(providerID, providerConfig)
493	return nil
494}
495
496const maxRecentModelsPerType = 5
497
498func (c *Config) recordRecentModel(modelType SelectedModelType, model SelectedModel) error {
499	if model.Provider == "" || model.Model == "" {
500		return nil
501	}
502
503	if c.RecentModels == nil {
504		c.RecentModels = make(map[SelectedModelType][]SelectedModel)
505	}
506
507	eq := func(a, b SelectedModel) bool {
508		return a.Provider == b.Provider && a.Model == b.Model
509	}
510
511	entry := SelectedModel{
512		Provider: model.Provider,
513		Model:    model.Model,
514	}
515
516	current := c.RecentModels[modelType]
517	withoutCurrent := slices.DeleteFunc(slices.Clone(current), func(existing SelectedModel) bool {
518		return eq(existing, entry)
519	})
520
521	updated := append([]SelectedModel{entry}, withoutCurrent...)
522	if len(updated) > maxRecentModelsPerType {
523		updated = updated[:maxRecentModelsPerType]
524	}
525
526	if slices.EqualFunc(current, updated, eq) {
527		return nil
528	}
529
530	c.RecentModels[modelType] = updated
531
532	if err := c.SetConfigField(fmt.Sprintf("recent_models.%s", modelType), updated); err != nil {
533		return fmt.Errorf("failed to persist recent models: %w", err)
534	}
535
536	return nil
537}
538
539func allToolNames() []string {
540	return []string{
541		"agent",
542		"bash",
543		"job_output",
544		"job_kill",
545		"download",
546		"edit",
547		"multiedit",
548		"lsp_diagnostics",
549		"lsp_references",
550		"fetch",
551		"agentic_fetch",
552		"glob",
553		"grep",
554		"ls",
555		"sourcegraph",
556		"view",
557		"write",
558	}
559}
560
561func resolveAllowedTools(allTools []string, disabledTools []string) []string {
562	if disabledTools == nil {
563		return allTools
564	}
565	// filter out disabled tools (exclude mode)
566	return filterSlice(allTools, disabledTools, false)
567}
568
569func resolveReadOnlyTools(tools []string) []string {
570	readOnlyTools := []string{"glob", "grep", "ls", "sourcegraph", "view"}
571	// filter to only include tools that are in allowedtools (include mode)
572	return filterSlice(tools, readOnlyTools, true)
573}
574
575func filterSlice(data []string, mask []string, include bool) []string {
576	filtered := []string{}
577	for _, s := range data {
578		// if include is true, we include items that ARE in the mask
579		// if include is false, we include items that are NOT in the mask
580		if include == slices.Contains(mask, s) {
581			filtered = append(filtered, s)
582		}
583	}
584	return filtered
585}
586
587func (c *Config) SetupAgents() {
588	allowedTools := resolveAllowedTools(allToolNames(), c.Options.DisabledTools)
589
590	agents := map[string]Agent{
591		AgentCoder: {
592			ID:           AgentCoder,
593			Name:         "Coder",
594			Description:  "An agent that helps with executing coding tasks.",
595			Model:        SelectedModelTypeLarge,
596			ContextPaths: c.Options.ContextPaths,
597			AllowedTools: allowedTools,
598		},
599
600		AgentTask: {
601			ID:           AgentCoder,
602			Name:         "Task",
603			Description:  "An agent that helps with searching for context and finding implementation details.",
604			Model:        SelectedModelTypeLarge,
605			ContextPaths: c.Options.ContextPaths,
606			AllowedTools: resolveReadOnlyTools(allowedTools),
607			// NO MCPs or LSPs by default
608			AllowedMCP: map[string][]string{},
609		},
610	}
611	c.Agents = agents
612}
613
614func (c *Config) Resolver() VariableResolver {
615	return c.resolver
616}
617
618func (c *ProviderConfig) TestConnection(resolver VariableResolver) error {
619	testURL := ""
620	headers := make(map[string]string)
621	apiKey, _ := resolver.ResolveValue(c.APIKey)
622	switch c.Type {
623	case catwalk.TypeOpenAI, catwalk.TypeOpenAICompat, catwalk.TypeOpenRouter:
624		baseURL, _ := resolver.ResolveValue(c.BaseURL)
625		if baseURL == "" {
626			baseURL = "https://api.openai.com/v1"
627		}
628		if c.ID == string(catwalk.InferenceProviderOpenRouter) {
629			testURL = baseURL + "/credits"
630		} else {
631			testURL = baseURL + "/models"
632		}
633		headers["Authorization"] = "Bearer " + apiKey
634	case catwalk.TypeAnthropic:
635		baseURL, _ := resolver.ResolveValue(c.BaseURL)
636		if baseURL == "" {
637			baseURL = "https://api.anthropic.com/v1"
638		}
639		testURL = baseURL + "/models"
640		// TODO: replace with const when catwalk is released
641		if c.ID == "kimi-coding" {
642			testURL = baseURL + "/v1/models"
643		}
644		headers["x-api-key"] = apiKey
645		headers["anthropic-version"] = "2023-06-01"
646	case catwalk.TypeGoogle:
647		baseURL, _ := resolver.ResolveValue(c.BaseURL)
648		if baseURL == "" {
649			baseURL = "https://generativelanguage.googleapis.com"
650		}
651		testURL = baseURL + "/v1beta/models?key=" + url.QueryEscape(apiKey)
652	}
653	ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
654	defer cancel()
655	client := &http.Client{}
656	req, err := http.NewRequestWithContext(ctx, "GET", testURL, nil)
657	if err != nil {
658		return fmt.Errorf("failed to create request for provider %s: %w", c.ID, err)
659	}
660	for k, v := range headers {
661		req.Header.Set(k, v)
662	}
663	for k, v := range c.ExtraHeaders {
664		req.Header.Set(k, v)
665	}
666	b, err := client.Do(req)
667	if err != nil {
668		return fmt.Errorf("failed to create request for provider %s: %w", c.ID, err)
669	}
670	if c.ID == string(catwalk.InferenceProviderZAI) {
671		if b.StatusCode == http.StatusUnauthorized {
672			// for z.ai just check if the http response is not 401
673			return fmt.Errorf("failed to connect to provider %s: %s", c.ID, b.Status)
674		}
675	} else {
676		if b.StatusCode != http.StatusOK {
677			return fmt.Errorf("failed to connect to provider %s: %s", c.ID, b.Status)
678		}
679	}
680	_ = b.Body.Close()
681	return nil
682}
683
684func resolveEnvs(envs map[string]string) []string {
685	resolver := NewShellVariableResolver(env.New())
686	for e, v := range envs {
687		var err error
688		envs[e], err = resolver.ResolveValue(v)
689		if err != nil {
690			slog.Error("error resolving environment variable", "error", err, "variable", e, "value", v)
691			continue
692		}
693	}
694
695	res := make([]string, 0, len(envs))
696	for k, v := range envs {
697		res = append(res, fmt.Sprintf("%s=%s", k, v))
698	}
699	return res
700}
701
702func ptrValOr[T any](t *T, el T) T {
703	if t == nil {
704		return el
705	}
706	return *t
707}