1package config
2
3import (
4 "context"
5 "fmt"
6 "log/slog"
7 "net/http"
8 "net/url"
9 "os"
10 "slices"
11 "strings"
12 "time"
13
14 "github.com/charmbracelet/catwalk/pkg/catwalk"
15 "github.com/charmbracelet/crush/internal/csync"
16 "github.com/charmbracelet/crush/internal/env"
17 "github.com/charmbracelet/crush/internal/hooks"
18 "github.com/invopop/jsonschema"
19 "github.com/tidwall/sjson"
20)
21
22const (
23 appName = "crush"
24 defaultDataDirectory = ".crush"
25 defaultInitializeAs = "AGENTS.md"
26)
27
28var defaultContextPaths = []string{
29 ".github/copilot-instructions.md",
30 ".cursorrules",
31 ".cursor/rules/",
32 "CLAUDE.md",
33 "CLAUDE.local.md",
34 "GEMINI.md",
35 "gemini.md",
36 "crush.md",
37 "crush.local.md",
38 "Crush.md",
39 "Crush.local.md",
40 "CRUSH.md",
41 "CRUSH.local.md",
42 "AGENTS.md",
43 "agents.md",
44 "Agents.md",
45}
46
47type SelectedModelType string
48
49const (
50 SelectedModelTypeLarge SelectedModelType = "large"
51 SelectedModelTypeSmall SelectedModelType = "small"
52)
53
54const (
55 AgentCoder string = "coder"
56 AgentTask string = "task"
57)
58
59type SelectedModel struct {
60 // The model id as used by the provider API.
61 // Required.
62 Model string `json:"model" jsonschema:"required,description=The model ID as used by the provider API,example=gpt-4o"`
63 // The model provider, same as the key/id used in the providers config.
64 // Required.
65 Provider string `json:"provider" jsonschema:"required,description=The model provider ID that matches a key in the providers config,example=openai"`
66
67 // Only used by models that use the openai provider and need this set.
68 ReasoningEffort string `json:"reasoning_effort,omitempty" jsonschema:"description=Reasoning effort level for OpenAI models that support it,enum=low,enum=medium,enum=high"`
69
70 // Used by anthropic models that can reason to indicate if the model should think.
71 Think bool `json:"think,omitempty" jsonschema:"description=Enable thinking mode for Anthropic models that support reasoning"`
72
73 // Overrides the default model configuration.
74 MaxTokens int64 `json:"max_tokens,omitempty" jsonschema:"description=Maximum number of tokens for model responses,minimum=1,maximum=200000,example=4096"`
75 Temperature *float64 `json:"temperature,omitempty" jsonschema:"description=Sampling temperature,minimum=0,maximum=1,example=0.7"`
76 TopP *float64 `json:"top_p,omitempty" jsonschema:"description=Top-p (nucleus) sampling parameter,minimum=0,maximum=1,example=0.9"`
77 TopK *int64 `json:"top_k,omitempty" jsonschema:"description=Top-k sampling parameter"`
78 FrequencyPenalty *float64 `json:"frequency_penalty,omitempty" jsonschema:"description=Frequency penalty to reduce repetition"`
79 PresencePenalty *float64 `json:"presence_penalty,omitempty" jsonschema:"description=Presence penalty to increase topic diversity"`
80
81 // Override provider specific options.
82 ProviderOptions map[string]any `json:"provider_options,omitempty" jsonschema:"description=Additional provider-specific options for the model"`
83}
84
85type ProviderConfig struct {
86 // The provider's id.
87 ID string `json:"id,omitempty" jsonschema:"description=Unique identifier for the provider,example=openai"`
88 // The provider's name, used for display purposes.
89 Name string `json:"name,omitempty" jsonschema:"description=Human-readable name for the provider,example=OpenAI"`
90 // The provider's API endpoint.
91 BaseURL string `json:"base_url,omitempty" jsonschema:"description=Base URL for the provider's API,format=uri,example=https://api.openai.com/v1"`
92 // The provider type, e.g. "openai", "anthropic", etc. if empty it defaults to openai.
93 Type catwalk.Type `json:"type,omitempty" jsonschema:"description=Provider type that determines the API format,enum=openai,enum=openai-compat,enum=anthropic,enum=gemini,enum=azure,enum=vertexai,default=openai"`
94 // The provider's API key.
95 APIKey string `json:"api_key,omitempty" jsonschema:"description=API key for authentication with the provider,example=$OPENAI_API_KEY"`
96 // Marks the provider as disabled.
97 Disable bool `json:"disable,omitempty" jsonschema:"description=Whether this provider is disabled,default=false"`
98
99 // Custom system prompt prefix.
100 SystemPromptPrefix string `json:"system_prompt_prefix,omitempty" jsonschema:"description=Custom prefix to add to system prompts for this provider"`
101
102 // Extra headers to send with each request to the provider.
103 ExtraHeaders map[string]string `json:"extra_headers,omitempty" jsonschema:"description=Additional HTTP headers to send with requests"`
104 // Extra body
105 ExtraBody map[string]any `json:"extra_body,omitempty" jsonschema:"description=Additional fields to include in request bodies, only works with openai-compatible providers"`
106
107 ProviderOptions map[string]any `json:"provider_options,omitempty" jsonschema:"description=Additional provider-specific options for this provider"`
108
109 // Used to pass extra parameters to the provider.
110 ExtraParams map[string]string `json:"-"`
111
112 // The provider models
113 Models []catwalk.Model `json:"models,omitempty" jsonschema:"description=List of models available from this provider"`
114}
115
116type MCPType string
117
118const (
119 MCPStdio MCPType = "stdio"
120 MCPSSE MCPType = "sse"
121 MCPHttp MCPType = "http"
122)
123
124type MCPConfig struct {
125 Command string `json:"command,omitempty" jsonschema:"description=Command to execute for stdio MCP servers,example=npx"`
126 Env map[string]string `json:"env,omitempty" jsonschema:"description=Environment variables to set for the MCP server"`
127 Args []string `json:"args,omitempty" jsonschema:"description=Arguments to pass to the MCP server command"`
128 Type MCPType `json:"type" jsonschema:"required,description=Type of MCP connection,enum=stdio,enum=sse,enum=http,default=stdio"`
129 URL string `json:"url,omitempty" jsonschema:"description=URL for HTTP or SSE MCP servers,format=uri,example=http://localhost:3000/mcp"`
130 Disabled bool `json:"disabled,omitempty" jsonschema:"description=Whether this MCP server is disabled,default=false"`
131 Timeout int `json:"timeout,omitempty" jsonschema:"description=Timeout in seconds for MCP server connections,default=15,example=30,example=60,example=120"`
132
133 // TODO: maybe make it possible to get the value from the env
134 Headers map[string]string `json:"headers,omitempty" jsonschema:"description=HTTP headers for HTTP/SSE MCP servers"`
135}
136
137type LSPConfig struct {
138 Disabled bool `json:"disabled,omitempty" jsonschema:"description=Whether this LSP server is disabled,default=false"`
139 Command string `json:"command,omitempty" jsonschema:"required,description=Command to execute for the LSP server,example=gopls"`
140 Args []string `json:"args,omitempty" jsonschema:"description=Arguments to pass to the LSP server command"`
141 Env map[string]string `json:"env,omitempty" jsonschema:"description=Environment variables to set to the LSP server command"`
142 FileTypes []string `json:"filetypes,omitempty" jsonschema:"description=File types this LSP server handles,example=go,example=mod,example=rs,example=c,example=js,example=ts"`
143 RootMarkers []string `json:"root_markers,omitempty" jsonschema:"description=Files or directories that indicate the project root,example=go.mod,example=package.json,example=Cargo.toml"`
144 InitOptions map[string]any `json:"init_options,omitempty" jsonschema:"description=Initialization options passed to the LSP server during initialize request"`
145 Options map[string]any `json:"options,omitempty" jsonschema:"description=LSP server-specific settings passed during initialization"`
146}
147
148type TUIOptions struct {
149 CompactMode bool `json:"compact_mode,omitempty" jsonschema:"description=Enable compact mode for the TUI interface,default=false"`
150 DiffMode string `json:"diff_mode,omitempty" jsonschema:"description=Diff mode for the TUI interface,enum=unified,enum=split"`
151 // Here we can add themes later or any TUI related options
152 //
153
154 Completions Completions `json:"completions,omitzero" jsonschema:"description=Completions UI options"`
155}
156
157// Completions defines options for the completions UI.
158type Completions struct {
159 MaxDepth *int `json:"max_depth,omitempty" jsonschema:"description=Maximum depth for the ls tool,default=0,example=10"`
160 MaxItems *int `json:"max_items,omitempty" jsonschema:"description=Maximum number of items to return for the ls tool,default=1000,example=100"`
161}
162
163func (c Completions) Limits() (depth, items int) {
164 return ptrValOr(c.MaxDepth, 0), ptrValOr(c.MaxItems, 0)
165}
166
167type Permissions struct {
168 AllowedTools []string `json:"allowed_tools,omitempty" jsonschema:"description=List of tools that don't require permission prompts,example=bash,example=view"` // Tools that don't require permission prompts
169 SkipRequests bool `json:"-"` // Automatically accept all permissions (YOLO mode)
170}
171
172type TrailerStyle string
173
174const (
175 TrailerStyleNone TrailerStyle = "none"
176 TrailerStyleCoAuthoredBy TrailerStyle = "co-authored-by"
177 TrailerStyleAssistedBy TrailerStyle = "assisted-by"
178)
179
180type Attribution struct {
181 TrailerStyle TrailerStyle `json:"trailer_style,omitempty" jsonschema:"description=Style of attribution trailer to add to commits,enum=none,enum=co-authored-by,enum=assisted-by,default=assisted-by"`
182 CoAuthoredBy *bool `json:"co_authored_by,omitempty" jsonschema:"description=Deprecated: use trailer_style instead"`
183 GeneratedWith bool `json:"generated_with,omitempty" jsonschema:"description=Add Generated with Crush line to commit messages and issues and PRs,default=true"`
184}
185
186// JSONSchemaExtend marks the co_authored_by field as deprecated in the schema.
187func (Attribution) JSONSchemaExtend(schema *jsonschema.Schema) {
188 if schema.Properties != nil {
189 if prop, ok := schema.Properties.Get("co_authored_by"); ok {
190 prop.Deprecated = true
191 }
192 }
193}
194
195type Options struct {
196 ContextPaths []string `json:"context_paths,omitempty" jsonschema:"description=Paths to files containing context information for the AI,example=.cursorrules,example=CRUSH.md"`
197 TUI *TUIOptions `json:"tui,omitempty" jsonschema:"description=Terminal user interface options"`
198 Debug bool `json:"debug,omitempty" jsonschema:"description=Enable debug logging,default=false"`
199 DebugLSP bool `json:"debug_lsp,omitempty" jsonschema:"description=Enable debug logging for LSP servers,default=false"`
200 DisableAutoSummarize bool `json:"disable_auto_summarize,omitempty" jsonschema:"description=Disable automatic conversation summarization,default=false"`
201 DataDirectory string `json:"data_directory,omitempty" jsonschema:"description=Directory for storing application data (relative to working directory),default=.crush,example=.crush"` // Relative to the cwd
202 DisabledTools []string `json:"disabled_tools" jsonschema:"description=Tools to disable"`
203 DisableProviderAutoUpdate bool `json:"disable_provider_auto_update,omitempty" jsonschema:"description=Disable providers auto-update,default=false"`
204 Attribution *Attribution `json:"attribution,omitempty" jsonschema:"description=Attribution settings for generated content"`
205 DisableMetrics bool `json:"disable_metrics,omitempty" jsonschema:"description=Disable sending metrics,default=false"`
206 InitializeAs string `json:"initialize_as,omitempty" jsonschema:"description=Name of the context file to create/update during project initialization,default=AGENTS.md,example=AGENTS.md,example=CRUSH.md,example=CLAUDE.md,example=docs/LLMs.md"`
207}
208
209type MCPs map[string]MCPConfig
210
211type MCP struct {
212 Name string `json:"name"`
213 MCP MCPConfig `json:"mcp"`
214}
215
216func (m MCPs) Sorted() []MCP {
217 sorted := make([]MCP, 0, len(m))
218 for k, v := range m {
219 sorted = append(sorted, MCP{
220 Name: k,
221 MCP: v,
222 })
223 }
224 slices.SortFunc(sorted, func(a, b MCP) int {
225 return strings.Compare(a.Name, b.Name)
226 })
227 return sorted
228}
229
230type LSPs map[string]LSPConfig
231
232type LSP struct {
233 Name string `json:"name"`
234 LSP LSPConfig `json:"lsp"`
235}
236
237func (l LSPs) Sorted() []LSP {
238 sorted := make([]LSP, 0, len(l))
239 for k, v := range l {
240 sorted = append(sorted, LSP{
241 Name: k,
242 LSP: v,
243 })
244 }
245 slices.SortFunc(sorted, func(a, b LSP) int {
246 return strings.Compare(a.Name, b.Name)
247 })
248 return sorted
249}
250
251func (l LSPConfig) ResolvedEnv() []string {
252 return resolveEnvs(l.Env)
253}
254
255func (m MCPConfig) ResolvedEnv() []string {
256 return resolveEnvs(m.Env)
257}
258
259func (m MCPConfig) ResolvedHeaders() map[string]string {
260 resolver := NewShellVariableResolver(env.New())
261 for e, v := range m.Headers {
262 var err error
263 m.Headers[e], err = resolver.ResolveValue(v)
264 if err != nil {
265 slog.Error("error resolving header variable", "error", err, "variable", e, "value", v)
266 continue
267 }
268 }
269 return m.Headers
270}
271
272type Agent struct {
273 ID string `json:"id,omitempty"`
274 Name string `json:"name,omitempty"`
275 Description string `json:"description,omitempty"`
276 // This is the id of the system prompt used by the agent
277 Disabled bool `json:"disabled,omitempty"`
278
279 Model SelectedModelType `json:"model" jsonschema:"required,description=The model type to use for this agent,enum=large,enum=small,default=large"`
280
281 // The available tools for the agent
282 // if this is nil, all tools are available
283 AllowedTools []string `json:"allowed_tools,omitempty"`
284
285 // this tells us which MCPs are available for this agent
286 // if this is empty all mcps are available
287 // the string array is the list of tools from the AllowedMCP the agent has available
288 // if the string array is nil, all tools from the AllowedMCP are available
289 AllowedMCP map[string][]string `json:"allowed_mcp,omitempty"`
290
291 // Overrides the context paths for this agent
292 ContextPaths []string `json:"context_paths,omitempty"`
293}
294
295type Tools struct {
296 Ls ToolLs `json:"ls,omitzero"`
297}
298
299type ToolLs struct {
300 MaxDepth *int `json:"max_depth,omitempty" jsonschema:"description=Maximum depth for the ls tool,default=0,example=10"`
301 MaxItems *int `json:"max_items,omitempty" jsonschema:"description=Maximum number of items to return for the ls tool,default=1000,example=100"`
302}
303
304func (t ToolLs) Limits() (depth, items int) {
305 return ptrValOr(t.MaxDepth, 0), ptrValOr(t.MaxItems, 0)
306}
307
308// Config holds the configuration for crush.
309type Config struct {
310 Schema string `json:"$schema,omitempty"`
311
312 // We currently only support large/small as values here.
313 Models map[SelectedModelType]SelectedModel `json:"models,omitempty" jsonschema:"description=Model configurations for different model types,example={\"large\":{\"model\":\"gpt-4o\",\"provider\":\"openai\"}}"`
314 // Recently used models stored in the data directory config.
315 RecentModels map[SelectedModelType][]SelectedModel `json:"recent_models,omitempty" jsonschema:"description=Recently used models sorted by most recent first"`
316
317 // The providers that are configured
318 Providers *csync.Map[string, ProviderConfig] `json:"providers,omitempty" jsonschema:"description=AI provider configurations"`
319
320 MCP MCPs `json:"mcp,omitempty" jsonschema:"description=Model Context Protocol server configurations"`
321
322 LSP LSPs `json:"lsp,omitempty" jsonschema:"description=Language Server Protocol configurations"`
323
324 Options *Options `json:"options,omitempty" jsonschema:"description=General application options"`
325
326 Permissions *Permissions `json:"permissions,omitempty" jsonschema:"description=Permission settings for tool usage"`
327
328 Tools Tools `json:"tools,omitzero" jsonschema:"description=Tool configurations"`
329
330 Hooks *hooks.Config `json:"hooks,omitempty" jsonschema:"description=Hook system configuration"`
331
332 Agents map[string]Agent `json:"-"`
333
334 // Internal
335 workingDir string `json:"-"`
336 // TODO: find a better way to do this this should probably not be part of the config
337 resolver VariableResolver
338 dataConfigDir string `json:"-"`
339 knownProviders []catwalk.Provider `json:"-"`
340}
341
342func (c *Config) WorkingDir() string {
343 return c.workingDir
344}
345
346func (c *Config) EnabledProviders() []ProviderConfig {
347 var enabled []ProviderConfig
348 for p := range c.Providers.Seq() {
349 if !p.Disable {
350 enabled = append(enabled, p)
351 }
352 }
353 return enabled
354}
355
356// IsConfigured return true if at least one provider is configured
357func (c *Config) IsConfigured() bool {
358 return len(c.EnabledProviders()) > 0
359}
360
361func (c *Config) GetModel(provider, model string) *catwalk.Model {
362 if providerConfig, ok := c.Providers.Get(provider); ok {
363 for _, m := range providerConfig.Models {
364 if m.ID == model {
365 return &m
366 }
367 }
368 }
369 return nil
370}
371
372func (c *Config) GetProviderForModel(modelType SelectedModelType) *ProviderConfig {
373 model, ok := c.Models[modelType]
374 if !ok {
375 return nil
376 }
377 if providerConfig, ok := c.Providers.Get(model.Provider); ok {
378 return &providerConfig
379 }
380 return nil
381}
382
383func (c *Config) GetModelByType(modelType SelectedModelType) *catwalk.Model {
384 model, ok := c.Models[modelType]
385 if !ok {
386 return nil
387 }
388 return c.GetModel(model.Provider, model.Model)
389}
390
391func (c *Config) LargeModel() *catwalk.Model {
392 model, ok := c.Models[SelectedModelTypeLarge]
393 if !ok {
394 return nil
395 }
396 return c.GetModel(model.Provider, model.Model)
397}
398
399func (c *Config) SmallModel() *catwalk.Model {
400 model, ok := c.Models[SelectedModelTypeSmall]
401 if !ok {
402 return nil
403 }
404 return c.GetModel(model.Provider, model.Model)
405}
406
407func (c *Config) SetCompactMode(enabled bool) error {
408 if c.Options == nil {
409 c.Options = &Options{}
410 }
411 c.Options.TUI.CompactMode = enabled
412 return c.SetConfigField("options.tui.compact_mode", enabled)
413}
414
415func (c *Config) Resolve(key string) (string, error) {
416 if c.resolver == nil {
417 return "", fmt.Errorf("no variable resolver configured")
418 }
419 return c.resolver.ResolveValue(key)
420}
421
422func (c *Config) UpdatePreferredModel(modelType SelectedModelType, model SelectedModel) error {
423 c.Models[modelType] = model
424 if err := c.SetConfigField(fmt.Sprintf("models.%s", modelType), model); err != nil {
425 return fmt.Errorf("failed to update preferred model: %w", err)
426 }
427 if err := c.recordRecentModel(modelType, model); err != nil {
428 return err
429 }
430 return nil
431}
432
433func (c *Config) SetConfigField(key string, value any) error {
434 // read the data
435 data, err := os.ReadFile(c.dataConfigDir)
436 if err != nil {
437 if os.IsNotExist(err) {
438 data = []byte("{}")
439 } else {
440 return fmt.Errorf("failed to read config file: %w", err)
441 }
442 }
443
444 newValue, err := sjson.Set(string(data), key, value)
445 if err != nil {
446 return fmt.Errorf("failed to set config field %s: %w", key, err)
447 }
448 if err := os.WriteFile(c.dataConfigDir, []byte(newValue), 0o600); err != nil {
449 return fmt.Errorf("failed to write config file: %w", err)
450 }
451 return nil
452}
453
454func (c *Config) SetProviderAPIKey(providerID, apiKey string) error {
455 // First save to the config file
456 err := c.SetConfigField("providers."+providerID+".api_key", apiKey)
457 if err != nil {
458 return fmt.Errorf("failed to save API key to config file: %w", err)
459 }
460
461 providerConfig, exists := c.Providers.Get(providerID)
462 if exists {
463 providerConfig.APIKey = apiKey
464 c.Providers.Set(providerID, providerConfig)
465 return nil
466 }
467
468 var foundProvider *catwalk.Provider
469 for _, p := range c.knownProviders {
470 if string(p.ID) == providerID {
471 foundProvider = &p
472 break
473 }
474 }
475
476 if foundProvider != nil {
477 // Create new provider config based on known provider
478 providerConfig = ProviderConfig{
479 ID: providerID,
480 Name: foundProvider.Name,
481 BaseURL: foundProvider.APIEndpoint,
482 Type: foundProvider.Type,
483 APIKey: apiKey,
484 Disable: false,
485 ExtraHeaders: make(map[string]string),
486 ExtraParams: make(map[string]string),
487 Models: foundProvider.Models,
488 }
489 } else {
490 return fmt.Errorf("provider with ID %s not found in known providers", providerID)
491 }
492 // Store the updated provider config
493 c.Providers.Set(providerID, providerConfig)
494 return nil
495}
496
497const maxRecentModelsPerType = 5
498
499func (c *Config) recordRecentModel(modelType SelectedModelType, model SelectedModel) error {
500 if model.Provider == "" || model.Model == "" {
501 return nil
502 }
503
504 if c.RecentModels == nil {
505 c.RecentModels = make(map[SelectedModelType][]SelectedModel)
506 }
507
508 eq := func(a, b SelectedModel) bool {
509 return a.Provider == b.Provider && a.Model == b.Model
510 }
511
512 entry := SelectedModel{
513 Provider: model.Provider,
514 Model: model.Model,
515 }
516
517 current := c.RecentModels[modelType]
518 withoutCurrent := slices.DeleteFunc(slices.Clone(current), func(existing SelectedModel) bool {
519 return eq(existing, entry)
520 })
521
522 updated := append([]SelectedModel{entry}, withoutCurrent...)
523 if len(updated) > maxRecentModelsPerType {
524 updated = updated[:maxRecentModelsPerType]
525 }
526
527 if slices.EqualFunc(current, updated, eq) {
528 return nil
529 }
530
531 c.RecentModels[modelType] = updated
532
533 if err := c.SetConfigField(fmt.Sprintf("recent_models.%s", modelType), updated); err != nil {
534 return fmt.Errorf("failed to persist recent models: %w", err)
535 }
536
537 return nil
538}
539
540func allToolNames() []string {
541 return []string{
542 "agent",
543 "bash",
544 "job_output",
545 "job_kill",
546 "download",
547 "edit",
548 "multiedit",
549 "lsp_diagnostics",
550 "lsp_references",
551 "fetch",
552 "agentic_fetch",
553 "glob",
554 "grep",
555 "ls",
556 "sourcegraph",
557 "view",
558 "write",
559 }
560}
561
562func resolveAllowedTools(allTools []string, disabledTools []string) []string {
563 if disabledTools == nil {
564 return allTools
565 }
566 // filter out disabled tools (exclude mode)
567 return filterSlice(allTools, disabledTools, false)
568}
569
570func resolveReadOnlyTools(tools []string) []string {
571 readOnlyTools := []string{"glob", "grep", "ls", "sourcegraph", "view"}
572 // filter to only include tools that are in allowedtools (include mode)
573 return filterSlice(tools, readOnlyTools, true)
574}
575
576func filterSlice(data []string, mask []string, include bool) []string {
577 filtered := []string{}
578 for _, s := range data {
579 // if include is true, we include items that ARE in the mask
580 // if include is false, we include items that are NOT in the mask
581 if include == slices.Contains(mask, s) {
582 filtered = append(filtered, s)
583 }
584 }
585 return filtered
586}
587
588func (c *Config) SetupAgents() {
589 allowedTools := resolveAllowedTools(allToolNames(), c.Options.DisabledTools)
590
591 agents := map[string]Agent{
592 AgentCoder: {
593 ID: AgentCoder,
594 Name: "Coder",
595 Description: "An agent that helps with executing coding tasks.",
596 Model: SelectedModelTypeLarge,
597 ContextPaths: c.Options.ContextPaths,
598 AllowedTools: allowedTools,
599 },
600
601 AgentTask: {
602 ID: AgentCoder,
603 Name: "Task",
604 Description: "An agent that helps with searching for context and finding implementation details.",
605 Model: SelectedModelTypeLarge,
606 ContextPaths: c.Options.ContextPaths,
607 AllowedTools: resolveReadOnlyTools(allowedTools),
608 // NO MCPs or LSPs by default
609 AllowedMCP: map[string][]string{},
610 },
611 }
612 c.Agents = agents
613}
614
615func (c *Config) Resolver() VariableResolver {
616 return c.resolver
617}
618
619func (c *ProviderConfig) TestConnection(resolver VariableResolver) error {
620 testURL := ""
621 headers := make(map[string]string)
622 apiKey, _ := resolver.ResolveValue(c.APIKey)
623 switch c.Type {
624 case catwalk.TypeOpenAI, catwalk.TypeOpenAICompat, catwalk.TypeOpenRouter:
625 baseURL, _ := resolver.ResolveValue(c.BaseURL)
626 if baseURL == "" {
627 baseURL = "https://api.openai.com/v1"
628 }
629 if c.ID == string(catwalk.InferenceProviderOpenRouter) {
630 testURL = baseURL + "/credits"
631 } else {
632 testURL = baseURL + "/models"
633 }
634 headers["Authorization"] = "Bearer " + apiKey
635 case catwalk.TypeAnthropic:
636 baseURL, _ := resolver.ResolveValue(c.BaseURL)
637 if baseURL == "" {
638 baseURL = "https://api.anthropic.com/v1"
639 }
640 testURL = baseURL + "/models"
641 // TODO: replace with const when catwalk is released
642 if c.ID == "kimi-coding" {
643 testURL = baseURL + "/v1/models"
644 }
645 headers["x-api-key"] = apiKey
646 headers["anthropic-version"] = "2023-06-01"
647 case catwalk.TypeGoogle:
648 baseURL, _ := resolver.ResolveValue(c.BaseURL)
649 if baseURL == "" {
650 baseURL = "https://generativelanguage.googleapis.com"
651 }
652 testURL = baseURL + "/v1beta/models?key=" + url.QueryEscape(apiKey)
653 }
654 ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
655 defer cancel()
656 client := &http.Client{}
657 req, err := http.NewRequestWithContext(ctx, "GET", testURL, nil)
658 if err != nil {
659 return fmt.Errorf("failed to create request for provider %s: %w", c.ID, err)
660 }
661 for k, v := range headers {
662 req.Header.Set(k, v)
663 }
664 for k, v := range c.ExtraHeaders {
665 req.Header.Set(k, v)
666 }
667 b, err := client.Do(req)
668 if err != nil {
669 return fmt.Errorf("failed to create request for provider %s: %w", c.ID, err)
670 }
671 if c.ID == string(catwalk.InferenceProviderZAI) {
672 if b.StatusCode == http.StatusUnauthorized {
673 // for z.ai just check if the http response is not 401
674 return fmt.Errorf("failed to connect to provider %s: %s", c.ID, b.Status)
675 }
676 } else {
677 if b.StatusCode != http.StatusOK {
678 return fmt.Errorf("failed to connect to provider %s: %s", c.ID, b.Status)
679 }
680 }
681 _ = b.Body.Close()
682 return nil
683}
684
685func resolveEnvs(envs map[string]string) []string {
686 resolver := NewShellVariableResolver(env.New())
687 for e, v := range envs {
688 var err error
689 envs[e], err = resolver.ResolveValue(v)
690 if err != nil {
691 slog.Error("error resolving environment variable", "error", err, "variable", e, "value", v)
692 continue
693 }
694 }
695
696 res := make([]string, 0, len(envs))
697 for k, v := range envs {
698 res = append(res, fmt.Sprintf("%s=%s", k, v))
699 }
700 return res
701}
702
703func ptrValOr[T any](t *T, el T) T {
704 if t == nil {
705 return el
706 }
707 return *t
708}