config.go

  1package config
  2
  3import (
  4	"context"
  5	"fmt"
  6	"log/slog"
  7	"net/http"
  8	"net/url"
  9	"os"
 10	"slices"
 11	"strings"
 12	"time"
 13
 14	"github.com/charmbracelet/catwalk/pkg/catwalk"
 15	"github.com/charmbracelet/crush/internal/csync"
 16	"github.com/charmbracelet/crush/internal/env"
 17	"github.com/tidwall/sjson"
 18)
 19
 20const (
 21	appName              = "crush"
 22	defaultDataDirectory = ".crush"
 23)
 24
 25var defaultContextPaths = []string{
 26	".github/copilot-instructions.md",
 27	".cursorrules",
 28	".cursor/rules/",
 29	"CLAUDE.md",
 30	"CLAUDE.local.md",
 31	"GEMINI.md",
 32	"gemini.md",
 33	"crush.md",
 34	"crush.local.md",
 35	"Crush.md",
 36	"Crush.local.md",
 37	"CRUSH.md",
 38	"CRUSH.local.md",
 39	"AGENTS.md",
 40	"agents.md",
 41	"Agents.md",
 42}
 43
 44type SelectedModelType string
 45
 46const (
 47	SelectedModelTypeLarge SelectedModelType = "large"
 48	SelectedModelTypeSmall SelectedModelType = "small"
 49)
 50
 51type SelectedModel struct {
 52	// The model id as used by the provider API.
 53	// Required.
 54	Model string `json:"model" jsonschema:"required,description=The model ID as used by the provider API,example=gpt-4o"`
 55	// The model provider, same as the key/id used in the providers config.
 56	// Required.
 57	Provider string `json:"provider" jsonschema:"required,description=The model provider ID that matches a key in the providers config,example=openai"`
 58
 59	// Only used by models that use the openai provider and need this set.
 60	ReasoningEffort string `json:"reasoning_effort,omitempty" jsonschema:"description=Reasoning effort level for OpenAI models that support it,enum=low,enum=medium,enum=high"`
 61
 62	// Overrides the default model configuration.
 63	MaxTokens int64 `json:"max_tokens,omitempty" jsonschema:"description=Maximum number of tokens for model responses,minimum=1,maximum=200000,example=4096"`
 64
 65	// Used by anthropic models that can reason to indicate if the model should think.
 66	Think bool `json:"think,omitempty" jsonschema:"description=Enable thinking mode for Anthropic models that support reasoning"`
 67}
 68
 69type ProviderConfig struct {
 70	// The provider's id.
 71	ID string `json:"id,omitempty" jsonschema:"description=Unique identifier for the provider,example=openai"`
 72	// The provider's name, used for display purposes.
 73	Name string `json:"name,omitempty" jsonschema:"description=Human-readable name for the provider,example=OpenAI"`
 74	// The provider's API endpoint.
 75	BaseURL string `json:"base_url,omitempty" jsonschema:"description=Base URL for the provider's API,format=uri,example=https://api.openai.com/v1"`
 76	// The provider type, e.g. "openai", "anthropic", etc. if empty it defaults to openai.
 77	Type catwalk.Type `json:"type,omitempty" jsonschema:"description=Provider type that determines the API format,enum=openai,enum=anthropic,enum=gemini,enum=azure,enum=vertexai,default=openai"`
 78	// The provider's API key.
 79	APIKey string `json:"api_key,omitempty" jsonschema:"description=API key for authentication with the provider,example=$OPENAI_API_KEY"`
 80	// Marks the provider as disabled.
 81	Disable bool `json:"disable,omitempty" jsonschema:"description=Whether this provider is disabled,default=false"`
 82
 83	// Custom system prompt prefix.
 84	SystemPromptPrefix string `json:"system_prompt_prefix,omitempty" jsonschema:"description=Custom prefix to add to system prompts for this provider"`
 85
 86	// Extra headers to send with each request to the provider.
 87	ExtraHeaders map[string]string `json:"extra_headers,omitempty" jsonschema:"description=Additional HTTP headers to send with requests"`
 88	// Extra body
 89	ExtraBody map[string]any `json:"extra_body,omitempty" jsonschema:"description=Additional fields to include in request bodies"`
 90
 91	// Used to pass extra parameters to the provider.
 92	ExtraParams map[string]string `json:"-"`
 93
 94	// The provider models
 95	Models []catwalk.Model `json:"models,omitempty" jsonschema:"description=List of models available from this provider"`
 96}
 97
 98type MCPType string
 99
100const (
101	MCPStdio MCPType = "stdio"
102	MCPSse   MCPType = "sse"
103	MCPHttp  MCPType = "http"
104)
105
106type MCPConfig struct {
107	Command  string            `json:"command,omitempty" jsonschema:"description=Command to execute for stdio MCP servers,example=npx"`
108	Env      map[string]string `json:"env,omitempty" jsonschema:"description=Environment variables to set for the MCP server"`
109	Args     []string          `json:"args,omitempty" jsonschema:"description=Arguments to pass to the MCP server command"`
110	Type     MCPType           `json:"type" jsonschema:"required,description=Type of MCP connection,enum=stdio,enum=sse,enum=http,default=stdio"`
111	URL      string            `json:"url,omitempty" jsonschema:"description=URL for HTTP or SSE MCP servers,format=uri,example=http://localhost:3000/mcp"`
112	Disabled bool              `json:"disabled,omitempty" jsonschema:"description=Whether this MCP server is disabled,default=false"`
113	Timeout  int               `json:"timeout,omitempty" jsonschema:"description=Timeout in seconds for MCP server connections,default=15,example=30,example=60,example=120"`
114
115	// TODO: maybe make it possible to get the value from the env
116	Headers map[string]string `json:"headers,omitempty" jsonschema:"description=HTTP headers for HTTP/SSE MCP servers"`
117}
118
119type LSPConfig struct {
120	Disabled    bool              `json:"disabled,omitempty" jsonschema:"description=Whether this LSP server is disabled,default=false"`
121	Command     string            `json:"command,omitempty" jsonschema:"required,description=Command to execute for the LSP server,example=gopls"`
122	Args        []string          `json:"args,omitempty" jsonschema:"description=Arguments to pass to the LSP server command"`
123	Env         map[string]string `json:"env,omitempty" jsonschema:"description=Environment variables to set to the LSP server command"`
124	FileTypes   []string          `json:"filetypes,omitempty" jsonschema:"description=File types this LSP server handles,example=go,example=mod,example=rs,example=c,example=js,example=ts"`
125	RootMarkers []string          `json:"root_markers,omitempty" jsonschema:"description=Files or directories that indicate the project root,example=go.mod,example=package.json,example=Cargo.toml"`
126	InitOptions map[string]any    `json:"init_options,omitempty" jsonschema:"description=Initialization options passed to the LSP server during initialize request"`
127	Options     map[string]any    `json:"options,omitempty" jsonschema:"description=LSP server-specific settings passed during initialization"`
128}
129
130type TUIOptions struct {
131	CompactMode bool   `json:"compact_mode,omitempty" jsonschema:"description=Enable compact mode for the TUI interface,default=false"`
132	DiffMode    string `json:"diff_mode,omitempty" jsonschema:"description=Diff mode for the TUI interface,enum=unified,enum=split"`
133	// Here we can add themes later or any TUI related options
134}
135
136type Permissions struct {
137	AllowedTools []string `json:"allowed_tools,omitempty" jsonschema:"description=List of tools that don't require permission prompts,example=bash,example=view"` // Tools that don't require permission prompts
138	SkipRequests bool     `json:"-"`                                                                                                                              // Automatically accept all permissions (YOLO mode)
139}
140
141type Attribution struct {
142	CoAuthoredBy  bool `json:"co_authored_by,omitempty" jsonschema:"description=Add Co-Authored-By trailer to commit messages,default=true"`
143	GeneratedWith bool `json:"generated_with,omitempty" jsonschema:"description=Add Generated with Crush line to commit messages and issues and PRs,default=true"`
144}
145
146type Options struct {
147	ContextPaths              []string     `json:"context_paths,omitempty" jsonschema:"description=Paths to files containing context information for the AI,example=.cursorrules,example=CRUSH.md"`
148	TUI                       *TUIOptions  `json:"tui,omitempty" jsonschema:"description=Terminal user interface options"`
149	Debug                     bool         `json:"debug,omitempty" jsonschema:"description=Enable debug logging,default=false"`
150	DebugLSP                  bool         `json:"debug_lsp,omitempty" jsonschema:"description=Enable debug logging for LSP servers,default=false"`
151	DisableAutoSummarize      bool         `json:"disable_auto_summarize,omitempty" jsonschema:"description=Disable automatic conversation summarization,default=false"`
152	DataDirectory             string       `json:"data_directory,omitempty" jsonschema:"description=Directory for storing application data (relative to working directory),default=.crush,example=.crush"` // Relative to the cwd
153	DisabledTools             []string     `json:"disabled_tools" jsonschema:"description=Tools to disable"`
154	DisableProviderAutoUpdate bool         `json:"disable_provider_auto_update,omitempty" jsonschema:"description=Disable providers auto-update,default=false"`
155	Attribution               *Attribution `json:"attribution,omitempty" jsonschema:"description=Attribution settings for generated content"`
156	DisableMetrics            bool         `json:"disable_metrics,omitempty" jsonschema:"description=Disable sending metrics,default=false"`
157}
158
159type MCPs map[string]MCPConfig
160
161type MCP struct {
162	Name string    `json:"name"`
163	MCP  MCPConfig `json:"mcp"`
164}
165
166func (m MCPs) Sorted() []MCP {
167	sorted := make([]MCP, 0, len(m))
168	for k, v := range m {
169		sorted = append(sorted, MCP{
170			Name: k,
171			MCP:  v,
172		})
173	}
174	slices.SortFunc(sorted, func(a, b MCP) int {
175		return strings.Compare(a.Name, b.Name)
176	})
177	return sorted
178}
179
180type LSPs map[string]LSPConfig
181
182type LSP struct {
183	Name string    `json:"name"`
184	LSP  LSPConfig `json:"lsp"`
185}
186
187func (l LSPs) Sorted() []LSP {
188	sorted := make([]LSP, 0, len(l))
189	for k, v := range l {
190		sorted = append(sorted, LSP{
191			Name: k,
192			LSP:  v,
193		})
194	}
195	slices.SortFunc(sorted, func(a, b LSP) int {
196		return strings.Compare(a.Name, b.Name)
197	})
198	return sorted
199}
200
201func (l LSPConfig) ResolvedEnv() []string {
202	return resolveEnvs(l.Env)
203}
204
205func (m MCPConfig) ResolvedEnv() []string {
206	return resolveEnvs(m.Env)
207}
208
209func (m MCPConfig) ResolvedHeaders() map[string]string {
210	resolver := NewShellVariableResolver(env.New())
211	for e, v := range m.Headers {
212		var err error
213		m.Headers[e], err = resolver.ResolveValue(v)
214		if err != nil {
215			slog.Error("error resolving header variable", "error", err, "variable", e, "value", v)
216			continue
217		}
218	}
219	return m.Headers
220}
221
222type Agent struct {
223	ID          string `json:"id,omitempty"`
224	Name        string `json:"name,omitempty"`
225	Description string `json:"description,omitempty"`
226	// This is the id of the system prompt used by the agent
227	Disabled bool `json:"disabled,omitempty"`
228
229	Model SelectedModelType `json:"model" jsonschema:"required,description=The model type to use for this agent,enum=large,enum=small,default=large"`
230
231	// The available tools for the agent
232	//  if this is nil, all tools are available
233	AllowedTools []string `json:"allowed_tools,omitempty"`
234
235	// this tells us which MCPs are available for this agent
236	//  if this is empty all mcps are available
237	//  the string array is the list of tools from the AllowedMCP the agent has available
238	//  if the string array is nil, all tools from the AllowedMCP are available
239	AllowedMCP map[string][]string `json:"allowed_mcp,omitempty"`
240
241	// The list of LSPs that this agent can use
242	//  if this is nil, all LSPs are available
243	AllowedLSP []string `json:"allowed_lsp,omitempty"`
244
245	// Overrides the context paths for this agent
246	ContextPaths []string `json:"context_paths,omitempty"`
247}
248
249// Config holds the configuration for crush.
250type Config struct {
251	Schema string `json:"$schema,omitempty"`
252
253	// We currently only support large/small as values here.
254	Models map[SelectedModelType]SelectedModel `json:"models,omitempty" jsonschema:"description=Model configurations for different model types,example={\"large\":{\"model\":\"gpt-4o\",\"provider\":\"openai\"}}"`
255
256	// The providers that are configured
257	Providers *csync.Map[string, ProviderConfig] `json:"providers,omitempty" jsonschema:"description=AI provider configurations"`
258
259	MCP MCPs `json:"mcp,omitempty" jsonschema:"description=Model Context Protocol server configurations"`
260
261	LSP LSPs `json:"lsp,omitempty" jsonschema:"description=Language Server Protocol configurations"`
262
263	Options *Options `json:"options,omitempty" jsonschema:"description=General application options"`
264
265	Permissions *Permissions `json:"permissions,omitempty" jsonschema:"description=Permission settings for tool usage"`
266
267	// Internal
268	workingDir string `json:"-"`
269	// TODO: most likely remove this concept when I come back to it
270	Agents map[string]Agent `json:"-"`
271	// TODO: find a better way to do this this should probably not be part of the config
272	resolver       VariableResolver
273	dataConfigDir  string             `json:"-"`
274	knownProviders []catwalk.Provider `json:"-"`
275}
276
277func (c *Config) WorkingDir() string {
278	return c.workingDir
279}
280
281func (c *Config) EnabledProviders() []ProviderConfig {
282	var enabled []ProviderConfig
283	for p := range c.Providers.Seq() {
284		if !p.Disable {
285			enabled = append(enabled, p)
286		}
287	}
288	return enabled
289}
290
291// IsConfigured  return true if at least one provider is configured
292func (c *Config) IsConfigured() bool {
293	return len(c.EnabledProviders()) > 0
294}
295
296func (c *Config) GetModel(provider, model string) *catwalk.Model {
297	if providerConfig, ok := c.Providers.Get(provider); ok {
298		for _, m := range providerConfig.Models {
299			if m.ID == model {
300				return &m
301			}
302		}
303	}
304	return nil
305}
306
307func (c *Config) GetProviderForModel(modelType SelectedModelType) *ProviderConfig {
308	model, ok := c.Models[modelType]
309	if !ok {
310		return nil
311	}
312	if providerConfig, ok := c.Providers.Get(model.Provider); ok {
313		return &providerConfig
314	}
315	return nil
316}
317
318func (c *Config) GetModelByType(modelType SelectedModelType) *catwalk.Model {
319	model, ok := c.Models[modelType]
320	if !ok {
321		return nil
322	}
323	return c.GetModel(model.Provider, model.Model)
324}
325
326func (c *Config) LargeModel() *catwalk.Model {
327	model, ok := c.Models[SelectedModelTypeLarge]
328	if !ok {
329		return nil
330	}
331	return c.GetModel(model.Provider, model.Model)
332}
333
334func (c *Config) SmallModel() *catwalk.Model {
335	model, ok := c.Models[SelectedModelTypeSmall]
336	if !ok {
337		return nil
338	}
339	return c.GetModel(model.Provider, model.Model)
340}
341
342func (c *Config) SetCompactMode(enabled bool) error {
343	if c.Options == nil {
344		c.Options = &Options{}
345	}
346	c.Options.TUI.CompactMode = enabled
347	return c.SetConfigField("options.tui.compact_mode", enabled)
348}
349
350func (c *Config) Resolve(key string) (string, error) {
351	if c.resolver == nil {
352		return "", fmt.Errorf("no variable resolver configured")
353	}
354	return c.resolver.ResolveValue(key)
355}
356
357func (c *Config) UpdatePreferredModel(modelType SelectedModelType, model SelectedModel) error {
358	c.Models[modelType] = model
359	if err := c.SetConfigField(fmt.Sprintf("models.%s", modelType), model); err != nil {
360		return fmt.Errorf("failed to update preferred model: %w", err)
361	}
362	return nil
363}
364
365func (c *Config) SetConfigField(key string, value any) error {
366	// read the data
367	data, err := os.ReadFile(c.dataConfigDir)
368	if err != nil {
369		if os.IsNotExist(err) {
370			data = []byte("{}")
371		} else {
372			return fmt.Errorf("failed to read config file: %w", err)
373		}
374	}
375
376	newValue, err := sjson.Set(string(data), key, value)
377	if err != nil {
378		return fmt.Errorf("failed to set config field %s: %w", key, err)
379	}
380	if err := os.WriteFile(c.dataConfigDir, []byte(newValue), 0o600); err != nil {
381		return fmt.Errorf("failed to write config file: %w", err)
382	}
383	return nil
384}
385
386func (c *Config) SetProviderAPIKey(providerID, apiKey string) error {
387	// First save to the config file
388	err := c.SetConfigField("providers."+providerID+".api_key", apiKey)
389	if err != nil {
390		return fmt.Errorf("failed to save API key to config file: %w", err)
391	}
392
393	providerConfig, exists := c.Providers.Get(providerID)
394	if exists {
395		providerConfig.APIKey = apiKey
396		c.Providers.Set(providerID, providerConfig)
397		return nil
398	}
399
400	var foundProvider *catwalk.Provider
401	for _, p := range c.knownProviders {
402		if string(p.ID) == providerID {
403			foundProvider = &p
404			break
405		}
406	}
407
408	if foundProvider != nil {
409		// Create new provider config based on known provider
410		providerConfig = ProviderConfig{
411			ID:           providerID,
412			Name:         foundProvider.Name,
413			BaseURL:      foundProvider.APIEndpoint,
414			Type:         foundProvider.Type,
415			APIKey:       apiKey,
416			Disable:      false,
417			ExtraHeaders: make(map[string]string),
418			ExtraParams:  make(map[string]string),
419			Models:       foundProvider.Models,
420		}
421	} else {
422		return fmt.Errorf("provider with ID %s not found in known providers", providerID)
423	}
424	// Store the updated provider config
425	c.Providers.Set(providerID, providerConfig)
426	return nil
427}
428
429func allToolNames() []string {
430	return []string{
431		"agent",
432		"bash",
433		"download",
434		"edit",
435		"multiedit",
436		"fetch",
437		"glob",
438		"grep",
439		"ls",
440		"sourcegraph",
441		"view",
442		"write",
443	}
444}
445
446func resolveAllowedTools(allTools []string, disabledTools []string) []string {
447	if disabledTools == nil {
448		return allTools
449	}
450	// filter out disabled tools (exclude mode)
451	return filterSlice(allTools, disabledTools, false)
452}
453
454func resolveReadOnlyTools(tools []string) []string {
455	readOnlyTools := []string{"glob", "grep", "ls", "sourcegraph", "view"}
456	// filter to only include tools that are in allowedtools (include mode)
457	return filterSlice(tools, readOnlyTools, true)
458}
459
460func filterSlice(data []string, mask []string, include bool) []string {
461	filtered := []string{}
462	for _, s := range data {
463		// if include is true, we include items that ARE in the mask
464		// if include is false, we include items that are NOT in the mask
465		if include == slices.Contains(mask, s) {
466			filtered = append(filtered, s)
467		}
468	}
469	return filtered
470}
471
472func (c *Config) SetupAgents() {
473	allowedTools := resolveAllowedTools(allToolNames(), c.Options.DisabledTools)
474
475	agents := map[string]Agent{
476		"coder": {
477			ID:           "coder",
478			Name:         "Coder",
479			Description:  "An agent that helps with executing coding tasks.",
480			Model:        SelectedModelTypeLarge,
481			ContextPaths: c.Options.ContextPaths,
482			AllowedTools: allowedTools,
483		},
484		"task": {
485			ID:           "task",
486			Name:         "Task",
487			Description:  "An agent that helps with searching for context and finding implementation details.",
488			Model:        SelectedModelTypeLarge,
489			ContextPaths: c.Options.ContextPaths,
490			AllowedTools: resolveReadOnlyTools(allowedTools),
491			// NO MCPs or LSPs by default
492			AllowedMCP: map[string][]string{},
493			AllowedLSP: []string{},
494		},
495	}
496	c.Agents = agents
497}
498
499func (c *Config) Resolver() VariableResolver {
500	return c.resolver
501}
502
503func (c *ProviderConfig) TestConnection(resolver VariableResolver) error {
504	testURL := ""
505	headers := make(map[string]string)
506	apiKey, _ := resolver.ResolveValue(c.APIKey)
507	switch c.Type {
508	case catwalk.TypeOpenAI:
509		baseURL, _ := resolver.ResolveValue(c.BaseURL)
510		if baseURL == "" {
511			baseURL = "https://api.openai.com/v1"
512		}
513		if c.ID == string(catwalk.InferenceProviderOpenRouter) {
514			testURL = baseURL + "/credits"
515		} else {
516			testURL = baseURL + "/models"
517		}
518		headers["Authorization"] = "Bearer " + apiKey
519	case catwalk.TypeAnthropic:
520		baseURL, _ := resolver.ResolveValue(c.BaseURL)
521		if baseURL == "" {
522			baseURL = "https://api.anthropic.com/v1"
523		}
524		testURL = baseURL + "/models"
525		headers["x-api-key"] = apiKey
526		headers["anthropic-version"] = "2023-06-01"
527	case catwalk.TypeGemini:
528		baseURL, _ := resolver.ResolveValue(c.BaseURL)
529		if baseURL == "" {
530			baseURL = "https://generativelanguage.googleapis.com"
531		}
532		testURL = baseURL + "/v1beta/models?key=" + url.QueryEscape(apiKey)
533	}
534	ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
535	defer cancel()
536	client := &http.Client{}
537	req, err := http.NewRequestWithContext(ctx, "GET", testURL, nil)
538	if err != nil {
539		return fmt.Errorf("failed to create request for provider %s: %w", c.ID, err)
540	}
541	for k, v := range headers {
542		req.Header.Set(k, v)
543	}
544	for k, v := range c.ExtraHeaders {
545		req.Header.Set(k, v)
546	}
547	b, err := client.Do(req)
548	if err != nil {
549		return fmt.Errorf("failed to create request for provider %s: %w", c.ID, err)
550	}
551	if c.ID == string(catwalk.InferenceProviderZAI) {
552		if b.StatusCode == http.StatusUnauthorized {
553			// for z.ai just check if the http response is not 401
554			return fmt.Errorf("failed to connect to provider %s: %s", c.ID, b.Status)
555		}
556	} else {
557		if b.StatusCode != http.StatusOK {
558			return fmt.Errorf("failed to connect to provider %s: %s", c.ID, b.Status)
559		}
560	}
561	_ = b.Body.Close()
562	return nil
563}
564
565func resolveEnvs(envs map[string]string) []string {
566	resolver := NewShellVariableResolver(env.New())
567	for e, v := range envs {
568		var err error
569		envs[e], err = resolver.ResolveValue(v)
570		if err != nil {
571			slog.Error("error resolving environment variable", "error", err, "variable", e, "value", v)
572			continue
573		}
574	}
575
576	res := make([]string, 0, len(envs))
577	for k, v := range envs {
578		res = append(res, fmt.Sprintf("%s=%s", k, v))
579	}
580	return res
581}