1package config
2
3import (
4 "cmp"
5 "context"
6 "fmt"
7 "log/slog"
8 "net/http"
9 "net/url"
10 "os"
11 "slices"
12 "strings"
13 "time"
14
15 "git.secluded.site/crush/internal/csync"
16 "git.secluded.site/crush/internal/env"
17 "git.secluded.site/crush/internal/oauth"
18 "github.com/charmbracelet/catwalk/pkg/catwalk"
19 "github.com/invopop/jsonschema"
20 "github.com/tidwall/sjson"
21)
22
23const (
24 appName = "crush"
25 defaultDataDirectory = ".crush"
26 defaultInitializeAs = "AGENTS.md"
27)
28
29var defaultContextPaths = []string{
30 ".github/copilot-instructions.md",
31 ".cursorrules",
32 ".cursor/rules/",
33 "CLAUDE.md",
34 "CLAUDE.local.md",
35 "GEMINI.md",
36 "gemini.md",
37 "crush.md",
38 "crush.local.md",
39 "Crush.md",
40 "Crush.local.md",
41 "CRUSH.md",
42 "CRUSH.local.md",
43 "AGENTS.md",
44 "agents.md",
45 "Agents.md",
46}
47
48type SelectedModelType string
49
50const (
51 SelectedModelTypeLarge SelectedModelType = "large"
52 SelectedModelTypeSmall SelectedModelType = "small"
53)
54
55const (
56 AgentCoder string = "coder"
57 AgentTask string = "task"
58)
59
60type SelectedModel struct {
61 // The model id as used by the provider API.
62 // Required.
63 Model string `json:"model" jsonschema:"required,description=The model ID as used by the provider API,example=gpt-4o"`
64 // The model provider, same as the key/id used in the providers config.
65 // Required.
66 Provider string `json:"provider" jsonschema:"required,description=The model provider ID that matches a key in the providers config,example=openai"`
67
68 // Only used by models that use the openai provider and need this set.
69 ReasoningEffort string `json:"reasoning_effort,omitempty" jsonschema:"description=Reasoning effort level for OpenAI models that support it,enum=low,enum=medium,enum=high"`
70
71 // Used by anthropic models that can reason to indicate if the model should think.
72 Think bool `json:"think,omitempty" jsonschema:"description=Enable thinking mode for Anthropic models that support reasoning"`
73
74 // Overrides the default model configuration.
75 MaxTokens int64 `json:"max_tokens,omitempty" jsonschema:"description=Maximum number of tokens for model responses,minimum=1,maximum=200000,example=4096"`
76 Temperature *float64 `json:"temperature,omitempty" jsonschema:"description=Sampling temperature,minimum=0,maximum=1,example=0.7"`
77 TopP *float64 `json:"top_p,omitempty" jsonschema:"description=Top-p (nucleus) sampling parameter,minimum=0,maximum=1,example=0.9"`
78 TopK *int64 `json:"top_k,omitempty" jsonschema:"description=Top-k sampling parameter"`
79 FrequencyPenalty *float64 `json:"frequency_penalty,omitempty" jsonschema:"description=Frequency penalty to reduce repetition"`
80 PresencePenalty *float64 `json:"presence_penalty,omitempty" jsonschema:"description=Presence penalty to increase topic diversity"`
81
82 // Override provider specific options.
83 ProviderOptions map[string]any `json:"provider_options,omitempty" jsonschema:"description=Additional provider-specific options for the model"`
84}
85
86type ProviderConfig struct {
87 // The provider's id.
88 ID string `json:"id,omitempty" jsonschema:"description=Unique identifier for the provider,example=openai"`
89 // The provider's name, used for display purposes.
90 Name string `json:"name,omitempty" jsonschema:"description=Human-readable name for the provider,example=OpenAI"`
91 // The provider's API endpoint.
92 BaseURL string `json:"base_url,omitempty" jsonschema:"description=Base URL for the provider's API,format=uri,example=https://api.openai.com/v1"`
93 // The provider type, e.g. "openai", "anthropic", etc. if empty it defaults to openai.
94 Type catwalk.Type `json:"type,omitempty" jsonschema:"description=Provider type that determines the API format,enum=openai,enum=openai-compat,enum=anthropic,enum=gemini,enum=azure,enum=vertexai,default=openai"`
95 // The provider's API key.
96 APIKey string `json:"api_key,omitempty" jsonschema:"description=API key for authentication with the provider,example=$OPENAI_API_KEY"`
97 // OAuthToken for providers that use OAuth2 authentication.
98 OAuthToken *oauth.Token `json:"oauth,omitempty" jsonschema:"description=OAuth2 token for authentication with the provider"`
99 // Marks the provider as disabled.
100 Disable bool `json:"disable,omitempty" jsonschema:"description=Whether this provider is disabled,default=false"`
101
102 // Custom system prompt prefix.
103 SystemPromptPrefix string `json:"system_prompt_prefix,omitempty" jsonschema:"description=Custom prefix to add to system prompts for this provider"`
104
105 // Extra headers to send with each request to the provider.
106 ExtraHeaders map[string]string `json:"extra_headers,omitempty" jsonschema:"description=Additional HTTP headers to send with requests"`
107 // Extra body
108 ExtraBody map[string]any `json:"extra_body,omitempty" jsonschema:"description=Additional fields to include in request bodies, only works with openai-compatible providers"`
109
110 ProviderOptions map[string]any `json:"provider_options,omitempty" jsonschema:"description=Additional provider-specific options for this provider"`
111
112 // Used to pass extra parameters to the provider.
113 ExtraParams map[string]string `json:"-"`
114
115 // The provider models
116 Models []catwalk.Model `json:"models,omitempty" jsonschema:"description=List of models available from this provider"`
117}
118
119func (pc *ProviderConfig) SetupClaudeCode() {
120 if !strings.HasPrefix(pc.APIKey, "Bearer ") {
121 pc.APIKey = fmt.Sprintf("Bearer %s", pc.APIKey)
122 }
123 pc.SystemPromptPrefix = "You are Claude Code, Anthropic's official CLI for Claude."
124 pc.ExtraHeaders["anthropic-version"] = "2023-06-01"
125
126 value := pc.ExtraHeaders["anthropic-beta"]
127 const want = "oauth-2025-04-20"
128 if !strings.Contains(value, want) {
129 if value != "" {
130 value += ","
131 }
132 value += want
133 }
134 pc.ExtraHeaders["anthropic-beta"] = value
135}
136
137type MCPType string
138
139const (
140 MCPStdio MCPType = "stdio"
141 MCPSSE MCPType = "sse"
142 MCPHttp MCPType = "http"
143)
144
145type MCPConfig struct {
146 Command string `json:"command,omitempty" jsonschema:"description=Command to execute for stdio MCP servers,example=npx"`
147 Env map[string]string `json:"env,omitempty" jsonschema:"description=Environment variables to set for the MCP server"`
148 Args []string `json:"args,omitempty" jsonschema:"description=Arguments to pass to the MCP server command"`
149 Type MCPType `json:"type" jsonschema:"required,description=Type of MCP connection,enum=stdio,enum=sse,enum=http,default=stdio"`
150 URL string `json:"url,omitempty" jsonschema:"description=URL for HTTP or SSE MCP servers,format=uri,example=http://localhost:3000/mcp"`
151 Disabled bool `json:"disabled,omitempty" jsonschema:"description=Whether this MCP server is disabled,default=false"`
152 Timeout int `json:"timeout,omitempty" jsonschema:"description=Timeout in seconds for MCP server connections,default=15,example=30,example=60,example=120"`
153
154 // TODO: maybe make it possible to get the value from the env
155 Headers map[string]string `json:"headers,omitempty" jsonschema:"description=HTTP headers for HTTP/SSE MCP servers"`
156}
157
158type LSPConfig struct {
159 Disabled bool `json:"disabled,omitempty" jsonschema:"description=Whether this LSP server is disabled,default=false"`
160 Command string `json:"command,omitempty" jsonschema:"required,description=Command to execute for the LSP server,example=gopls"`
161 Args []string `json:"args,omitempty" jsonschema:"description=Arguments to pass to the LSP server command"`
162 Env map[string]string `json:"env,omitempty" jsonschema:"description=Environment variables to set to the LSP server command"`
163 FileTypes []string `json:"filetypes,omitempty" jsonschema:"description=File types this LSP server handles,example=go,example=mod,example=rs,example=c,example=js,example=ts"`
164 RootMarkers []string `json:"root_markers,omitempty" jsonschema:"description=Files or directories that indicate the project root,example=go.mod,example=package.json,example=Cargo.toml"`
165 InitOptions map[string]any `json:"init_options,omitempty" jsonschema:"description=Initialization options passed to the LSP server during initialize request"`
166 Options map[string]any `json:"options,omitempty" jsonschema:"description=LSP server-specific settings passed during initialization"`
167}
168
169type TUIOptions struct {
170 CompactMode bool `json:"compact_mode,omitempty" jsonschema:"description=Enable compact mode for the TUI interface,default=false"`
171 DiffMode string `json:"diff_mode,omitempty" jsonschema:"description=Diff mode for the TUI interface,enum=unified,enum=split"`
172 // Here we can add themes later or any TUI related options
173 //
174
175 Completions Completions `json:"completions,omitzero" jsonschema:"description=Completions UI options"`
176}
177
178// Completions defines options for the completions UI.
179type Completions struct {
180 MaxDepth *int `json:"max_depth,omitempty" jsonschema:"description=Maximum depth for the ls tool,default=0,example=10"`
181 MaxItems *int `json:"max_items,omitempty" jsonschema:"description=Maximum number of items to return for the ls tool,default=1000,example=100"`
182}
183
184func (c Completions) Limits() (depth, items int) {
185 return ptrValOr(c.MaxDepth, 0), ptrValOr(c.MaxItems, 0)
186}
187
188type Permissions struct {
189 AllowedTools []string `json:"allowed_tools,omitempty" jsonschema:"description=List of tools that don't require permission prompts,example=bash,example=view"` // Tools that don't require permission prompts
190 SkipRequests bool `json:"-"` // Automatically accept all permissions (YOLO mode)
191}
192
193type TrailerStyle string
194
195const (
196 TrailerStyleNone TrailerStyle = "none"
197 TrailerStyleCoAuthoredBy TrailerStyle = "co-authored-by"
198 TrailerStyleAssistedBy TrailerStyle = "assisted-by"
199)
200
201type Attribution struct {
202 TrailerStyle TrailerStyle `json:"trailer_style,omitempty" jsonschema:"description=Style of attribution trailer to add to commits,enum=none,enum=co-authored-by,enum=assisted-by,default=assisted-by"`
203 CoAuthoredBy *bool `json:"co_authored_by,omitempty" jsonschema:"description=Deprecated: use trailer_style instead"`
204 GeneratedWith bool `json:"generated_with,omitempty" jsonschema:"description=Add Generated with Crush line to commit messages and issues and PRs,default=true"`
205}
206
207// JSONSchemaExtend marks the co_authored_by field as deprecated in the schema.
208func (Attribution) JSONSchemaExtend(schema *jsonschema.Schema) {
209 if schema.Properties != nil {
210 if prop, ok := schema.Properties.Get("co_authored_by"); ok {
211 prop.Deprecated = true
212 }
213 }
214}
215
216type Options struct {
217 ContextPaths []string `json:"context_paths,omitempty" jsonschema:"description=Paths to files containing context information for the AI,example=.cursorrules,example=CRUSH.md"`
218 MemoryPaths []string `json:"memory_paths,omitempty" jsonschema:"description=Paths to files containing memory information for the AI,default=~/.config/crush/CRUSH.md,default=~/.config/AGENTS.md"`
219 TUI *TUIOptions `json:"tui,omitempty" jsonschema:"description=Terminal user interface options"`
220 Debug bool `json:"debug,omitempty" jsonschema:"description=Enable debug logging,default=false"`
221 DebugLSP bool `json:"debug_lsp,omitempty" jsonschema:"description=Enable debug logging for LSP servers,default=false"`
222 DisableAutoSummarize bool `json:"disable_auto_summarize,omitempty" jsonschema:"description=Disable automatic conversation summarization,default=false"`
223 DataDirectory string `json:"data_directory,omitempty" jsonschema:"description=Directory for storing application data (relative to working directory),default=.crush,example=.crush"` // Relative to the cwd
224 DisabledTools []string `json:"disabled_tools" jsonschema:"description=Tools to disable"`
225 DisableProviderAutoUpdate bool `json:"disable_provider_auto_update,omitempty" jsonschema:"description=Disable providers auto-update,default=false"`
226 Attribution *Attribution `json:"attribution,omitempty" jsonschema:"description=Attribution settings for generated content"`
227 DisableMetrics bool `json:"disable_metrics,omitempty" jsonschema:"description=Disable sending metrics,default=false"`
228 InitializeAs string `json:"initialize_as,omitempty" jsonschema:"description=Name of the context file to create/update during project initialization,default=AGENTS.md,example=AGENTS.md,example=CRUSH.md,example=CLAUDE.md,example=docs/LLMs.md"`
229 DisableNotifications bool `json:"disable_notifications,omitempty" jsonschema:"description=Disable desktop notifications,default=false"`
230}
231
232type MCPs map[string]MCPConfig
233
234type MCP struct {
235 Name string `json:"name"`
236 MCP MCPConfig `json:"mcp"`
237}
238
239func (m MCPs) Sorted() []MCP {
240 sorted := make([]MCP, 0, len(m))
241 for k, v := range m {
242 sorted = append(sorted, MCP{
243 Name: k,
244 MCP: v,
245 })
246 }
247 slices.SortFunc(sorted, func(a, b MCP) int {
248 return strings.Compare(a.Name, b.Name)
249 })
250 return sorted
251}
252
253type LSPs map[string]LSPConfig
254
255type LSP struct {
256 Name string `json:"name"`
257 LSP LSPConfig `json:"lsp"`
258}
259
260func (l LSPs) Sorted() []LSP {
261 sorted := make([]LSP, 0, len(l))
262 for k, v := range l {
263 sorted = append(sorted, LSP{
264 Name: k,
265 LSP: v,
266 })
267 }
268 slices.SortFunc(sorted, func(a, b LSP) int {
269 return strings.Compare(a.Name, b.Name)
270 })
271 return sorted
272}
273
274func (l LSPConfig) ResolvedEnv() []string {
275 return resolveEnvs(l.Env)
276}
277
278func (m MCPConfig) ResolvedEnv() []string {
279 return resolveEnvs(m.Env)
280}
281
282func (m MCPConfig) ResolvedHeaders() map[string]string {
283 resolver := NewShellVariableResolver(env.New())
284 for e, v := range m.Headers {
285 var err error
286 m.Headers[e], err = resolver.ResolveValue(v)
287 if err != nil {
288 slog.Error("error resolving header variable", "error", err, "variable", e, "value", v)
289 continue
290 }
291 }
292 return m.Headers
293}
294
295type Agent struct {
296 ID string `json:"id,omitempty"`
297 Name string `json:"name,omitempty"`
298 Description string `json:"description,omitempty"`
299 // This is the id of the system prompt used by the agent
300 Disabled bool `json:"disabled,omitempty"`
301
302 Model SelectedModelType `json:"model" jsonschema:"required,description=The model type to use for this agent,enum=large,enum=small,default=large"`
303
304 // The available tools for the agent
305 // if this is nil, all tools are available
306 AllowedTools []string `json:"allowed_tools,omitempty"`
307
308 // this tells us which MCPs are available for this agent
309 // if this is empty all mcps are available
310 // the string array is the list of tools from the AllowedMCP the agent has available
311 // if the string array is nil, all tools from the AllowedMCP are available
312 AllowedMCP map[string][]string `json:"allowed_mcp,omitempty"`
313
314 // Overrides the context paths for this agent
315 ContextPaths []string `json:"context_paths,omitempty"`
316}
317
318type Tools struct {
319 Ls ToolLs `json:"ls,omitzero"`
320}
321
322type ToolLs struct {
323 MaxDepth *int `json:"max_depth,omitempty" jsonschema:"description=Maximum depth for the ls tool,default=0,example=10"`
324 MaxItems *int `json:"max_items,omitempty" jsonschema:"description=Maximum number of items to return for the ls tool,default=1000,example=100"`
325}
326
327func (t ToolLs) Limits() (depth, items int) {
328 return ptrValOr(t.MaxDepth, 0), ptrValOr(t.MaxItems, 0)
329}
330
331// Config holds the configuration for crush.
332type Config struct {
333 Schema string `json:"$schema,omitempty"`
334
335 // We currently only support large/small as values here.
336 Models map[SelectedModelType]SelectedModel `json:"models,omitempty" jsonschema:"description=Model configurations for different model types,example={\"large\":{\"model\":\"gpt-4o\",\"provider\":\"openai\"}}"`
337 // Recently used models stored in the data directory config.
338 RecentModels map[SelectedModelType][]SelectedModel `json:"recent_models,omitempty" jsonschema:"description=Recently used models sorted by most recent first"`
339
340 // The providers that are configured
341 Providers *csync.Map[string, ProviderConfig] `json:"providers,omitempty" jsonschema:"description=AI provider configurations"`
342
343 MCP MCPs `json:"mcp,omitempty" jsonschema:"description=Model Context Protocol server configurations"`
344
345 LSP LSPs `json:"lsp,omitempty" jsonschema:"description=Language Server Protocol configurations"`
346
347 Options *Options `json:"options,omitempty" jsonschema:"description=General application options"`
348
349 Permissions *Permissions `json:"permissions,omitempty" jsonschema:"description=Permission settings for tool usage"`
350
351 Tools Tools `json:"tools,omitzero" jsonschema:"description=Tool configurations"`
352
353 Agents map[string]Agent `json:"-"`
354
355 // Internal
356 workingDir string `json:"-"`
357 // TODO: find a better way to do this this should probably not be part of the config
358 resolver VariableResolver
359 dataConfigDir string `json:"-"`
360 knownProviders []catwalk.Provider `json:"-"`
361}
362
363func (c *Config) WorkingDir() string {
364 return c.workingDir
365}
366
367func (c *Config) EnabledProviders() []ProviderConfig {
368 var enabled []ProviderConfig
369 for p := range c.Providers.Seq() {
370 if !p.Disable {
371 enabled = append(enabled, p)
372 }
373 }
374 return enabled
375}
376
377// IsConfigured return true if at least one provider is configured
378func (c *Config) IsConfigured() bool {
379 return len(c.EnabledProviders()) > 0
380}
381
382func (c *Config) GetModel(provider, model string) *catwalk.Model {
383 if providerConfig, ok := c.Providers.Get(provider); ok {
384 for _, m := range providerConfig.Models {
385 if m.ID == model {
386 return &m
387 }
388 }
389 }
390 return nil
391}
392
393func (c *Config) GetProviderForModel(modelType SelectedModelType) *ProviderConfig {
394 model, ok := c.Models[modelType]
395 if !ok {
396 return nil
397 }
398 if providerConfig, ok := c.Providers.Get(model.Provider); ok {
399 return &providerConfig
400 }
401 return nil
402}
403
404func (c *Config) GetModelByType(modelType SelectedModelType) *catwalk.Model {
405 model, ok := c.Models[modelType]
406 if !ok {
407 return nil
408 }
409 return c.GetModel(model.Provider, model.Model)
410}
411
412func (c *Config) LargeModel() *catwalk.Model {
413 model, ok := c.Models[SelectedModelTypeLarge]
414 if !ok {
415 return nil
416 }
417 return c.GetModel(model.Provider, model.Model)
418}
419
420func (c *Config) SmallModel() *catwalk.Model {
421 model, ok := c.Models[SelectedModelTypeSmall]
422 if !ok {
423 return nil
424 }
425 return c.GetModel(model.Provider, model.Model)
426}
427
428func (c *Config) SetCompactMode(enabled bool) error {
429 if c.Options == nil {
430 c.Options = &Options{}
431 }
432 c.Options.TUI.CompactMode = enabled
433 return c.SetConfigField("options.tui.compact_mode", enabled)
434}
435
436func (c *Config) Resolve(key string) (string, error) {
437 if c.resolver == nil {
438 return "", fmt.Errorf("no variable resolver configured")
439 }
440 return c.resolver.ResolveValue(key)
441}
442
443func (c *Config) UpdatePreferredModel(modelType SelectedModelType, model SelectedModel) error {
444 c.Models[modelType] = model
445 if err := c.SetConfigField(fmt.Sprintf("models.%s", modelType), model); err != nil {
446 return fmt.Errorf("failed to update preferred model: %w", err)
447 }
448 if err := c.recordRecentModel(modelType, model); err != nil {
449 return err
450 }
451 return nil
452}
453
454func (c *Config) SetConfigField(key string, value any) error {
455 // read the data
456 data, err := os.ReadFile(c.dataConfigDir)
457 if err != nil {
458 if os.IsNotExist(err) {
459 data = []byte("{}")
460 } else {
461 return fmt.Errorf("failed to read config file: %w", err)
462 }
463 }
464
465 newValue, err := sjson.Set(string(data), key, value)
466 if err != nil {
467 return fmt.Errorf("failed to set config field %s: %w", key, err)
468 }
469 if err := os.WriteFile(c.dataConfigDir, []byte(newValue), 0o600); err != nil {
470 return fmt.Errorf("failed to write config file: %w", err)
471 }
472 return nil
473}
474
475func (c *Config) SetProviderAPIKey(providerID string, apiKey any) error {
476 var providerConfig ProviderConfig
477 var exists bool
478 var setKeyOrToken func()
479
480 switch v := apiKey.(type) {
481 case string:
482 if err := c.SetConfigField(fmt.Sprintf("providers.%s.api_key", providerID), v); err != nil {
483 return fmt.Errorf("failed to save api key to config file: %w", err)
484 }
485 setKeyOrToken = func() { providerConfig.APIKey = v }
486 case *oauth.Token:
487 if err := cmp.Or(
488 c.SetConfigField(fmt.Sprintf("providers.%s.api_key", providerID), v.AccessToken),
489 c.SetConfigField(fmt.Sprintf("providers.%s.oauth", providerID), v),
490 ); err != nil {
491 return err
492 }
493 setKeyOrToken = func() {
494 providerConfig.APIKey = v.AccessToken
495 providerConfig.OAuthToken = v
496 providerConfig.SetupClaudeCode()
497 }
498 }
499
500 providerConfig, exists = c.Providers.Get(providerID)
501 if exists {
502 setKeyOrToken()
503 c.Providers.Set(providerID, providerConfig)
504 return nil
505 }
506
507 var foundProvider *catwalk.Provider
508 for _, p := range c.knownProviders {
509 if string(p.ID) == providerID {
510 foundProvider = &p
511 break
512 }
513 }
514
515 if foundProvider != nil {
516 // Create new provider config based on known provider
517 providerConfig = ProviderConfig{
518 ID: providerID,
519 Name: foundProvider.Name,
520 BaseURL: foundProvider.APIEndpoint,
521 Type: foundProvider.Type,
522 Disable: false,
523 ExtraHeaders: make(map[string]string),
524 ExtraParams: make(map[string]string),
525 Models: foundProvider.Models,
526 }
527 setKeyOrToken()
528 } else {
529 return fmt.Errorf("provider with ID %s not found in known providers", providerID)
530 }
531 // Store the updated provider config
532 c.Providers.Set(providerID, providerConfig)
533 return nil
534}
535
536const maxRecentModelsPerType = 5
537
538func (c *Config) recordRecentModel(modelType SelectedModelType, model SelectedModel) error {
539 if model.Provider == "" || model.Model == "" {
540 return nil
541 }
542
543 if c.RecentModels == nil {
544 c.RecentModels = make(map[SelectedModelType][]SelectedModel)
545 }
546
547 eq := func(a, b SelectedModel) bool {
548 return a.Provider == b.Provider && a.Model == b.Model
549 }
550
551 entry := SelectedModel{
552 Provider: model.Provider,
553 Model: model.Model,
554 }
555
556 current := c.RecentModels[modelType]
557 withoutCurrent := slices.DeleteFunc(slices.Clone(current), func(existing SelectedModel) bool {
558 return eq(existing, entry)
559 })
560
561 updated := append([]SelectedModel{entry}, withoutCurrent...)
562 if len(updated) > maxRecentModelsPerType {
563 updated = updated[:maxRecentModelsPerType]
564 }
565
566 if slices.EqualFunc(current, updated, eq) {
567 return nil
568 }
569
570 c.RecentModels[modelType] = updated
571
572 if err := c.SetConfigField(fmt.Sprintf("recent_models.%s", modelType), updated); err != nil {
573 return fmt.Errorf("failed to persist recent models: %w", err)
574 }
575
576 return nil
577}
578
579func allToolNames() []string {
580 return []string{
581 "agent",
582 "bash",
583 "job_output",
584 "job_kill",
585 "download",
586 "edit",
587 "multiedit",
588 "lsp_diagnostics",
589 "lsp_references",
590 "fetch",
591 "agentic_fetch",
592 "glob",
593 "grep",
594 "ls",
595 "sourcegraph",
596 "view",
597 "write",
598 }
599}
600
601func resolveAllowedTools(allTools []string, disabledTools []string) []string {
602 if disabledTools == nil {
603 return allTools
604 }
605 // filter out disabled tools (exclude mode)
606 return filterSlice(allTools, disabledTools, false)
607}
608
609func resolveReadOnlyTools(tools []string) []string {
610 readOnlyTools := []string{"glob", "grep", "ls", "sourcegraph", "view"}
611 // filter to only include tools that are in allowedtools (include mode)
612 return filterSlice(tools, readOnlyTools, true)
613}
614
615func filterSlice(data []string, mask []string, include bool) []string {
616 filtered := []string{}
617 for _, s := range data {
618 // if include is true, we include items that ARE in the mask
619 // if include is false, we include items that are NOT in the mask
620 if include == slices.Contains(mask, s) {
621 filtered = append(filtered, s)
622 }
623 }
624 return filtered
625}
626
627func (c *Config) SetupAgents() {
628 allowedTools := resolveAllowedTools(allToolNames(), c.Options.DisabledTools)
629
630 agents := map[string]Agent{
631 AgentCoder: {
632 ID: AgentCoder,
633 Name: "Coder",
634 Description: "An agent that helps with executing coding tasks.",
635 Model: SelectedModelTypeLarge,
636 ContextPaths: c.Options.ContextPaths,
637 AllowedTools: allowedTools,
638 },
639
640 AgentTask: {
641 ID: AgentCoder,
642 Name: "Task",
643 Description: "An agent that helps with searching for context and finding implementation details.",
644 Model: SelectedModelTypeLarge,
645 ContextPaths: c.Options.ContextPaths,
646 AllowedTools: resolveReadOnlyTools(allowedTools),
647 // NO MCPs or LSPs by default
648 AllowedMCP: map[string][]string{},
649 },
650 }
651 c.Agents = agents
652}
653
654func (c *Config) Resolver() VariableResolver {
655 return c.resolver
656}
657
658func (c *ProviderConfig) TestConnection(resolver VariableResolver) error {
659 testURL := ""
660 headers := make(map[string]string)
661 apiKey, _ := resolver.ResolveValue(c.APIKey)
662 switch c.Type {
663 case catwalk.TypeOpenAI, catwalk.TypeOpenAICompat, catwalk.TypeOpenRouter:
664 baseURL, _ := resolver.ResolveValue(c.BaseURL)
665 if baseURL == "" {
666 baseURL = "https://api.openai.com/v1"
667 }
668 if c.ID == string(catwalk.InferenceProviderOpenRouter) {
669 testURL = baseURL + "/credits"
670 } else {
671 testURL = baseURL + "/models"
672 }
673 headers["Authorization"] = "Bearer " + apiKey
674 case catwalk.TypeAnthropic:
675 baseURL, _ := resolver.ResolveValue(c.BaseURL)
676 if baseURL == "" {
677 baseURL = "https://api.anthropic.com/v1"
678 }
679 testURL = baseURL + "/models"
680 // TODO: replace with const when catwalk is released
681 if c.ID == "kimi-coding" {
682 testURL = baseURL + "/v1/models"
683 }
684 headers["x-api-key"] = apiKey
685 headers["anthropic-version"] = "2023-06-01"
686 case catwalk.TypeGoogle:
687 baseURL, _ := resolver.ResolveValue(c.BaseURL)
688 if baseURL == "" {
689 baseURL = "https://generativelanguage.googleapis.com"
690 }
691 testURL = baseURL + "/v1beta/models?key=" + url.QueryEscape(apiKey)
692 }
693 ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
694 defer cancel()
695 client := &http.Client{}
696 req, err := http.NewRequestWithContext(ctx, "GET", testURL, nil)
697 if err != nil {
698 return fmt.Errorf("failed to create request for provider %s: %w", c.ID, err)
699 }
700 for k, v := range headers {
701 req.Header.Set(k, v)
702 }
703 for k, v := range c.ExtraHeaders {
704 req.Header.Set(k, v)
705 }
706 b, err := client.Do(req)
707 if err != nil {
708 return fmt.Errorf("failed to create request for provider %s: %w", c.ID, err)
709 }
710 if c.ID == string(catwalk.InferenceProviderZAI) {
711 if b.StatusCode == http.StatusUnauthorized {
712 // for z.ai just check if the http response is not 401
713 return fmt.Errorf("failed to connect to provider %s: %s", c.ID, b.Status)
714 }
715 } else {
716 if b.StatusCode != http.StatusOK {
717 return fmt.Errorf("failed to connect to provider %s: %s", c.ID, b.Status)
718 }
719 }
720 _ = b.Body.Close()
721 return nil
722}
723
724func resolveEnvs(envs map[string]string) []string {
725 resolver := NewShellVariableResolver(env.New())
726 for e, v := range envs {
727 var err error
728 envs[e], err = resolver.ResolveValue(v)
729 if err != nil {
730 slog.Error("error resolving environment variable", "error", err, "variable", e, "value", v)
731 continue
732 }
733 }
734
735 res := make([]string, 0, len(envs))
736 for k, v := range envs {
737 res = append(res, fmt.Sprintf("%s=%s", k, v))
738 }
739 return res
740}
741
742func ptrValOr[T any](t *T, el T) T {
743 if t == nil {
744 return el
745 }
746 return *t
747}