1package config
2
3import (
4 "context"
5 "fmt"
6 "log/slog"
7 "net/http"
8 "net/url"
9 "os"
10 "slices"
11 "strings"
12 "time"
13
14 "github.com/charmbracelet/catwalk/pkg/catwalk"
15 "github.com/charmbracelet/crush/internal/csync"
16 "github.com/tidwall/sjson"
17)
18
19const (
20 defaultDataDirectory = ".crush"
21)
22
23var defaultContextPaths = []string{
24 ".github/copilot-instructions.md",
25 ".cursorrules",
26 ".cursor/rules/",
27 "CLAUDE.md",
28 "CLAUDE.local.md",
29 "GEMINI.md",
30 "gemini.md",
31 "crush.md",
32 "crush.local.md",
33 "Crush.md",
34 "Crush.local.md",
35 "CRUSH.md",
36 "CRUSH.local.md",
37 "AGENTS.md",
38 "agents.md",
39 "Agents.md",
40}
41
42type SelectedModelType string
43
44const (
45 SelectedModelTypeLarge SelectedModelType = "large"
46 SelectedModelTypeSmall SelectedModelType = "small"
47)
48
49type SelectedModel struct {
50 // The model id as used by the provider API.
51 // Required.
52 Model string `json:"model" jsonschema:"required,description=The model ID as used by the provider API,example=gpt-4o"`
53 // The model provider, same as the key/id used in the providers config.
54 // Required.
55 Provider string `json:"provider" jsonschema:"required,description=The model provider ID that matches a key in the providers config,example=openai"`
56
57 // Only used by models that use the openai provider and need this set.
58 ReasoningEffort string `json:"reasoning_effort,omitempty" jsonschema:"description=Reasoning effort level for OpenAI models that support it,enum=low,enum=medium,enum=high"`
59
60 // Overrides the default model configuration.
61 MaxTokens int64 `json:"max_tokens,omitempty" jsonschema:"description=Maximum number of tokens for model responses,minimum=1,maximum=200000,example=4096"`
62
63 // Used by anthropic models that can reason to indicate if the model should think.
64 Think bool `json:"think,omitempty" jsonschema:"description=Enable thinking mode for Anthropic models that support reasoning"`
65}
66
67type ProviderConfig struct {
68 // The provider's id.
69 ID string `json:"id,omitempty" jsonschema:"description=Unique identifier for the provider,example=openai"`
70 // The provider's name, used for display purposes.
71 Name string `json:"name,omitempty" jsonschema:"description=Human-readable name for the provider,example=OpenAI"`
72 // The provider's API endpoint.
73 BaseURL string `json:"base_url,omitempty" jsonschema:"description=Base URL for the provider's API,format=uri,example=https://api.openai.com/v1"`
74 // The provider type, e.g. "openai", "anthropic", etc. if empty it defaults to openai.
75 Type catwalk.Type `json:"type,omitempty" jsonschema:"description=Provider type that determines the API format,enum=openai,enum=anthropic,enum=gemini,enum=azure,enum=vertexai,default=openai"`
76 // The provider's API key.
77 APIKey string `json:"api_key,omitempty" jsonschema:"description=API key for authentication with the provider,example=$OPENAI_API_KEY"`
78 // Marks the provider as disabled.
79 Disable bool `json:"disable,omitempty" jsonschema:"description=Whether this provider is disabled,default=false"`
80
81 // Custom system prompt prefix.
82 SystemPromptPrefix string `json:"system_prompt_prefix,omitempty" jsonschema:"description=Custom prefix to add to system prompts for this provider"`
83
84 // Extra headers to send with each request to the provider.
85 ExtraHeaders map[string]string `json:"extra_headers,omitempty" jsonschema:"description=Additional HTTP headers to send with requests"`
86 // Extra body
87 ExtraBody map[string]any `json:"extra_body,omitempty" jsonschema:"description=Additional fields to include in request bodies"`
88
89 // Used to pass extra parameters to the provider.
90 ExtraParams map[string]string `json:"-"`
91
92 // The provider models
93 Models []catwalk.Model `json:"models,omitempty" jsonschema:"description=List of models available from this provider"`
94}
95
96type MCPType string
97
98const (
99 MCPStdio MCPType = "stdio"
100 MCPSse MCPType = "sse"
101 MCPHttp MCPType = "http"
102)
103
104type MCPConfig struct {
105 Command string `json:"command,omitempty" jsonschema:"description=Command to execute for stdio MCP servers,example=npx"`
106 Env map[string]string `json:"env,omitempty" jsonschema:"description=Environment variables to set for the MCP server"`
107 Args []string `json:"args,omitempty" jsonschema:"description=Arguments to pass to the MCP server command"`
108 Type MCPType `json:"type" jsonschema:"required,description=Type of MCP connection,enum=stdio,enum=sse,enum=http,default=stdio"`
109 URL string `json:"url,omitempty" jsonschema:"description=URL for HTTP or SSE MCP servers,format=uri,example=http://localhost:3000/mcp"`
110 Disabled bool `json:"disabled,omitempty" jsonschema:"description=Whether this MCP server is disabled,default=false"`
111 Timeout int `json:"timeout,omitempty" jsonschema:"description=Timeout in seconds for MCP server connections,default=15,example=30,example=60,example=120"`
112
113 // TODO: maybe make it possible to get the value from the env
114 Headers map[string]string `json:"headers,omitempty" jsonschema:"description=HTTP headers for HTTP/SSE MCP servers"`
115}
116
117type LSPConfig struct {
118 Disabled bool `json:"disabled,omitempty" jsonschema:"description=Whether this LSP server is disabled,default=false"`
119 Command string `json:"command,omitempty" jsonschema:"required,description=Command to execute for the LSP server,example=gopls"`
120 Args []string `json:"args,omitempty" jsonschema:"description=Arguments to pass to the LSP server command"`
121 Env map[string]string `json:"env,omitempty" jsonschema:"description=Environment variables to set to the LSP server command"`
122 FileTypes []string `json:"filetypes,omitempty" jsonschema:"description=File types this LSP server handles,example=go,example=mod,example=rs,example=c,example=js,example=ts"`
123 RootMarkers []string `json:"root_markers,omitempty" jsonschema:"description=Files or directories that indicate the project root,example=go.mod,example=package.json,example=Cargo.toml"`
124 InitOptions map[string]any `json:"init_options,omitempty" jsonschema:"description=Initialization options passed to the LSP server during initialize request"`
125 Options map[string]any `json:"options,omitempty" jsonschema:"description=LSP server-specific settings passed during initialization"`
126}
127
128type TUIOptions struct {
129 CompactMode bool `json:"compact_mode,omitempty" jsonschema:"description=Enable compact mode for the TUI interface,default=false"`
130 DiffMode string `json:"diff_mode,omitempty" jsonschema:"description=Diff mode for the TUI interface,enum=unified,enum=split"`
131 // Here we can add themes later or any TUI related options
132}
133
134type Permissions struct {
135 AllowedTools []string `json:"allowed_tools,omitempty" jsonschema:"description=List of tools that don't require permission prompts,example=bash,example=view"` // Tools that don't require permission prompts
136 SkipRequests bool `json:"-"` // Automatically accept all permissions (YOLO mode)
137}
138
139type Attribution struct {
140 CoAuthoredBy bool `json:"co_authored_by,omitempty" jsonschema:"description=Add Co-Authored-By trailer to commit messages,default=true"`
141 GeneratedWith bool `json:"generated_with,omitempty" jsonschema:"description=Add Generated with Crush line to commit messages and issues and PRs,default=true"`
142}
143
144type Options struct {
145 ContextPaths []string `json:"context_paths,omitempty" jsonschema:"description=Paths to files containing context information for the AI,example=.cursorrules,example=CRUSH.md"`
146 TUI *TUIOptions `json:"tui,omitempty" jsonschema:"description=Terminal user interface options"`
147 Debug bool `json:"debug,omitempty" jsonschema:"description=Enable debug logging,default=false"`
148 DebugLSP bool `json:"debug_lsp,omitempty" jsonschema:"description=Enable debug logging for LSP servers,default=false"`
149 DisableAutoSummarize bool `json:"disable_auto_summarize,omitempty" jsonschema:"description=Disable automatic conversation summarization,default=false"`
150 DataDirectory string `json:"data_directory,omitempty" jsonschema:"description=Directory for storing application data (relative to working directory),default=.crush,example=.crush"` // Relative to the cwd
151 DisabledTools []string `json:"disabled_tools" jsonschema:"description=Tools to disable"`
152 DisableProviderAutoUpdate bool `json:"disable_provider_auto_update,omitempty" jsonschema:"description=Disable providers auto-update,default=false"`
153 Attribution *Attribution `json:"attribution,omitempty" jsonschema:"description=Attribution settings for generated content"`
154 DisableMetrics bool `json:"disable_metrics,omitempty" jsonschema:"description=Disable sending metrics,default=false"`
155}
156
157type MCPs map[string]MCPConfig
158
159type MCP struct {
160 Name string `json:"name"`
161 MCP MCPConfig `json:"mcp"`
162}
163
164func (m MCPs) Sorted() []MCP {
165 sorted := make([]MCP, 0, len(m))
166 for k, v := range m {
167 sorted = append(sorted, MCP{
168 Name: k,
169 MCP: v,
170 })
171 }
172 slices.SortFunc(sorted, func(a, b MCP) int {
173 return strings.Compare(a.Name, b.Name)
174 })
175 return sorted
176}
177
178type LSPs map[string]LSPConfig
179
180type LSP struct {
181 Name string `json:"name"`
182 LSP LSPConfig `json:"lsp"`
183}
184
185func (l LSPs) Sorted() []LSP {
186 sorted := make([]LSP, 0, len(l))
187 for k, v := range l {
188 sorted = append(sorted, LSP{
189 Name: k,
190 LSP: v,
191 })
192 }
193 slices.SortFunc(sorted, func(a, b LSP) int {
194 return strings.Compare(a.Name, b.Name)
195 })
196 return sorted
197}
198
199func (l LSPConfig) ResolvedEnv() []string {
200 return resolveEnvs(l.Env)
201}
202
203func (m MCPConfig) ResolvedEnv() []string {
204 return resolveEnvs(m.Env)
205}
206
207func (m MCPConfig) ResolvedHeaders() map[string]string {
208 resolver := NewShellVariableResolver(os.Environ())
209 for e, v := range m.Headers {
210 var err error
211 m.Headers[e], err = resolver.ResolveValue(v)
212 if err != nil {
213 slog.Error("error resolving header variable", "error", err, "variable", e, "value", v)
214 continue
215 }
216 }
217 return m.Headers
218}
219
220type Agent struct {
221 ID string `json:"id,omitempty"`
222 Name string `json:"name,omitempty"`
223 Description string `json:"description,omitempty"`
224 // This is the id of the system prompt used by the agent
225 Disabled bool `json:"disabled,omitempty"`
226
227 Model SelectedModelType `json:"model" jsonschema:"required,description=The model type to use for this agent,enum=large,enum=small,default=large"`
228
229 // The available tools for the agent
230 // if this is nil, all tools are available
231 AllowedTools []string `json:"allowed_tools,omitempty"`
232
233 // this tells us which MCPs are available for this agent
234 // if this is empty all mcps are available
235 // the string array is the list of tools from the AllowedMCP the agent has available
236 // if the string array is nil, all tools from the AllowedMCP are available
237 AllowedMCP map[string][]string `json:"allowed_mcp,omitempty"`
238
239 // The list of LSPs that this agent can use
240 // if this is nil, all LSPs are available
241 AllowedLSP []string `json:"allowed_lsp,omitempty"`
242
243 // Overrides the context paths for this agent
244 ContextPaths []string `json:"context_paths,omitempty"`
245}
246
247// Config holds the configuration for crush.
248type Config struct {
249 Schema string `json:"$schema,omitempty"`
250
251 // We currently only support large/small as values here.
252 Models map[SelectedModelType]SelectedModel `json:"models,omitempty" jsonschema:"description=Model configurations for different model types,example={\"large\":{\"model\":\"gpt-4o\",\"provider\":\"openai\"}}"`
253
254 // The providers that are configured
255 Providers *csync.Map[string, ProviderConfig] `json:"providers,omitempty" jsonschema:"description=AI provider configurations"`
256
257 MCP MCPs `json:"mcp,omitempty" jsonschema:"description=Model Context Protocol server configurations"`
258
259 LSP LSPs `json:"lsp,omitempty" jsonschema:"description=Language Server Protocol configurations"`
260
261 Options *Options `json:"options,omitempty" jsonschema:"description=General application options"`
262
263 Permissions *Permissions `json:"permissions,omitempty" jsonschema:"description=Permission settings for tool usage"`
264
265 // Internal
266 workingDir string `json:"-"`
267 // TODO: most likely remove this concept when I come back to it
268 Agents map[string]Agent `json:"-"`
269 // TODO: find a better way to do this this should probably not be part of the config
270 resolver VariableResolver
271 dataConfigDir string `json:"-"`
272 knownProviders []catwalk.Provider `json:"-"`
273}
274
275func (c *Config) WorkingDir() string {
276 return c.workingDir
277}
278
279func (c *Config) EnabledProviders() []ProviderConfig {
280 var enabled []ProviderConfig
281 for p := range c.Providers.Seq() {
282 if !p.Disable {
283 enabled = append(enabled, p)
284 }
285 }
286 return enabled
287}
288
289// IsConfigured return true if at least one provider is configured
290func (c *Config) IsConfigured() bool {
291 return len(c.EnabledProviders()) > 0
292}
293
294func (c *Config) GetModel(provider, model string) *catwalk.Model {
295 if providerConfig, ok := c.Providers.Get(provider); ok {
296 for _, m := range providerConfig.Models {
297 if m.ID == model {
298 return &m
299 }
300 }
301 }
302 return nil
303}
304
305func (c *Config) GetProviderForModel(modelType SelectedModelType) *ProviderConfig {
306 model, ok := c.Models[modelType]
307 if !ok {
308 return nil
309 }
310 if providerConfig, ok := c.Providers.Get(model.Provider); ok {
311 return &providerConfig
312 }
313 return nil
314}
315
316func (c *Config) GetModelByType(modelType SelectedModelType) *catwalk.Model {
317 model, ok := c.Models[modelType]
318 if !ok {
319 return nil
320 }
321 return c.GetModel(model.Provider, model.Model)
322}
323
324func (c *Config) LargeModel() *catwalk.Model {
325 model, ok := c.Models[SelectedModelTypeLarge]
326 if !ok {
327 return nil
328 }
329 return c.GetModel(model.Provider, model.Model)
330}
331
332func (c *Config) SmallModel() *catwalk.Model {
333 model, ok := c.Models[SelectedModelTypeSmall]
334 if !ok {
335 return nil
336 }
337 return c.GetModel(model.Provider, model.Model)
338}
339
340func (c *Config) SetCompactMode(enabled bool) error {
341 if c.Options == nil {
342 c.Options = &Options{}
343 }
344 c.Options.TUI.CompactMode = enabled
345 return c.SetConfigField("options.tui.compact_mode", enabled)
346}
347
348func (c *Config) Resolve(key string) (string, error) {
349 if c.resolver == nil {
350 return "", fmt.Errorf("no variable resolver configured")
351 }
352 return c.resolver.ResolveValue(key)
353}
354
355func (c *Config) UpdatePreferredModel(modelType SelectedModelType, model SelectedModel) error {
356 c.Models[modelType] = model
357 if err := c.SetConfigField(fmt.Sprintf("models.%s", modelType), model); err != nil {
358 return fmt.Errorf("failed to update preferred model: %w", err)
359 }
360 return nil
361}
362
363func (c *Config) SetConfigField(key string, value any) error {
364 // read the data
365 data, err := os.ReadFile(c.dataConfigDir)
366 if err != nil {
367 if os.IsNotExist(err) {
368 data = []byte("{}")
369 } else {
370 return fmt.Errorf("failed to read config file: %w", err)
371 }
372 }
373
374 newValue, err := sjson.Set(string(data), key, value)
375 if err != nil {
376 return fmt.Errorf("failed to set config field %s: %w", key, err)
377 }
378 if err := os.WriteFile(c.dataConfigDir, []byte(newValue), 0o600); err != nil {
379 return fmt.Errorf("failed to write config file: %w", err)
380 }
381 return nil
382}
383
384func (c *Config) SetProviderAPIKey(providerID, apiKey string) error {
385 // First save to the config file
386 err := c.SetConfigField("providers."+providerID+".api_key", apiKey)
387 if err != nil {
388 return fmt.Errorf("failed to save API key to config file: %w", err)
389 }
390
391 providerConfig, exists := c.Providers.Get(providerID)
392 if exists {
393 providerConfig.APIKey = apiKey
394 c.Providers.Set(providerID, providerConfig)
395 return nil
396 }
397
398 var foundProvider *catwalk.Provider
399 for _, p := range c.knownProviders {
400 if string(p.ID) == providerID {
401 foundProvider = &p
402 break
403 }
404 }
405
406 if foundProvider != nil {
407 // Create new provider config based on known provider
408 providerConfig = ProviderConfig{
409 ID: providerID,
410 Name: foundProvider.Name,
411 BaseURL: foundProvider.APIEndpoint,
412 Type: foundProvider.Type,
413 APIKey: apiKey,
414 Disable: false,
415 ExtraHeaders: make(map[string]string),
416 ExtraParams: make(map[string]string),
417 Models: foundProvider.Models,
418 }
419 } else {
420 return fmt.Errorf("provider with ID %s not found in known providers", providerID)
421 }
422 // Store the updated provider config
423 c.Providers.Set(providerID, providerConfig)
424 return nil
425}
426
427func allToolNames() []string {
428 return []string{
429 "agent",
430 "bash",
431 "download",
432 "edit",
433 "multiedit",
434 "fetch",
435 "glob",
436 "grep",
437 "ls",
438 "sourcegraph",
439 "view",
440 "write",
441 }
442}
443
444func resolveAllowedTools(allTools []string, disabledTools []string) []string {
445 if disabledTools == nil {
446 return allTools
447 }
448 // filter out disabled tools (exclude mode)
449 return filterSlice(allTools, disabledTools, false)
450}
451
452func resolveReadOnlyTools(tools []string) []string {
453 readOnlyTools := []string{"glob", "grep", "ls", "sourcegraph", "view"}
454 // filter to only include tools that are in allowedtools (include mode)
455 return filterSlice(tools, readOnlyTools, true)
456}
457
458func filterSlice(data []string, mask []string, include bool) []string {
459 filtered := []string{}
460 for _, s := range data {
461 // if include is true, we include items that ARE in the mask
462 // if include is false, we include items that are NOT in the mask
463 if include == slices.Contains(mask, s) {
464 filtered = append(filtered, s)
465 }
466 }
467 return filtered
468}
469
470func (c *Config) SetupAgents() {
471 allowedTools := resolveAllowedTools(allToolNames(), c.Options.DisabledTools)
472
473 agents := map[string]Agent{
474 "coder": {
475 ID: "coder",
476 Name: "Coder",
477 Description: "An agent that helps with executing coding tasks.",
478 Model: SelectedModelTypeLarge,
479 ContextPaths: c.Options.ContextPaths,
480 AllowedTools: allowedTools,
481 },
482 "task": {
483 ID: "task",
484 Name: "Task",
485 Description: "An agent that helps with searching for context and finding implementation details.",
486 Model: SelectedModelTypeLarge,
487 ContextPaths: c.Options.ContextPaths,
488 AllowedTools: resolveReadOnlyTools(allowedTools),
489 // NO MCPs or LSPs by default
490 AllowedMCP: map[string][]string{},
491 AllowedLSP: []string{},
492 },
493 }
494 c.Agents = agents
495}
496
497func (c *Config) Resolver() VariableResolver {
498 return c.resolver
499}
500
501func (c *ProviderConfig) TestConnection(resolver VariableResolver) error {
502 testURL := ""
503 headers := make(map[string]string)
504 apiKey, _ := resolver.ResolveValue(c.APIKey)
505 switch c.Type {
506 case catwalk.TypeOpenAI:
507 baseURL, _ := resolver.ResolveValue(c.BaseURL)
508 if baseURL == "" {
509 baseURL = "https://api.openai.com/v1"
510 }
511 if c.ID == string(catwalk.InferenceProviderOpenRouter) {
512 testURL = baseURL + "/credits"
513 } else {
514 testURL = baseURL + "/models"
515 }
516 headers["Authorization"] = "Bearer " + apiKey
517 case catwalk.TypeAnthropic:
518 baseURL, _ := resolver.ResolveValue(c.BaseURL)
519 if baseURL == "" {
520 baseURL = "https://api.anthropic.com/v1"
521 }
522 testURL = baseURL + "/models"
523 headers["x-api-key"] = apiKey
524 headers["anthropic-version"] = "2023-06-01"
525 case catwalk.TypeGemini:
526 baseURL, _ := resolver.ResolveValue(c.BaseURL)
527 if baseURL == "" {
528 baseURL = "https://generativelanguage.googleapis.com"
529 }
530 testURL = baseURL + "/v1beta/models?key=" + url.QueryEscape(apiKey)
531 }
532 ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
533 defer cancel()
534 client := &http.Client{}
535 req, err := http.NewRequestWithContext(ctx, "GET", testURL, nil)
536 if err != nil {
537 return fmt.Errorf("failed to create request for provider %s: %w", c.ID, err)
538 }
539 for k, v := range headers {
540 req.Header.Set(k, v)
541 }
542 for k, v := range c.ExtraHeaders {
543 req.Header.Set(k, v)
544 }
545 b, err := client.Do(req)
546 if err != nil {
547 return fmt.Errorf("failed to create request for provider %s: %w", c.ID, err)
548 }
549 if c.ID == string(catwalk.InferenceProviderZAI) {
550 if b.StatusCode == http.StatusUnauthorized {
551 // for z.ai just check if the http response is not 401
552 return fmt.Errorf("failed to connect to provider %s: %s", c.ID, b.Status)
553 }
554 } else {
555 if b.StatusCode != http.StatusOK {
556 return fmt.Errorf("failed to connect to provider %s: %s", c.ID, b.Status)
557 }
558 }
559 _ = b.Body.Close()
560 return nil
561}
562
563func resolveEnvs(envs map[string]string) []string {
564 resolver := NewShellVariableResolver(os.Environ())
565 for e, v := range envs {
566 var err error
567 envs[e], err = resolver.ResolveValue(v)
568 if err != nil {
569 slog.Error("error resolving environment variable", "error", err, "variable", e, "value", v)
570 continue
571 }
572 }
573
574 res := make([]string, 0, len(envs))
575 for k, v := range envs {
576 res = append(res, fmt.Sprintf("%s=%s", k, v))
577 }
578 return res
579}
580
581type contextKey struct{}
582
583var configKey = contextKey{}
584
585// WithContext returns a copy of the provided context with the given config.
586func WithContext(ctx context.Context, cfg *Config) context.Context {
587 return context.WithValue(ctx, configKey, cfg)
588}
589
590// FromContextConfig retrieves the config from the context, if present.
591func FromContext(ctx context.Context) (*Config, bool) {
592 cfg, ok := ctx.Value(configKey).(*Config)
593 return cfg, ok
594}