config.go

  1package config
  2
  3import (
  4	"context"
  5	"fmt"
  6	"log/slog"
  7	"net/http"
  8	"net/url"
  9	"os"
 10	"slices"
 11	"strings"
 12	"time"
 13
 14	"github.com/charmbracelet/catwalk/pkg/catwalk"
 15	"github.com/charmbracelet/crush/internal/csync"
 16	"github.com/charmbracelet/crush/internal/env"
 17	"github.com/tidwall/sjson"
 18)
 19
 20const (
 21	appName              = "crush"
 22	defaultDataDirectory = ".crush"
 23)
 24
 25var defaultContextPaths = []string{
 26	".github/copilot-instructions.md",
 27	".cursorrules",
 28	".cursor/rules/",
 29	"CLAUDE.md",
 30	"CLAUDE.local.md",
 31	"GEMINI.md",
 32	"gemini.md",
 33	"crush.md",
 34	"crush.local.md",
 35	"Crush.md",
 36	"Crush.local.md",
 37	"CRUSH.md",
 38	"CRUSH.local.md",
 39	"AGENTS.md",
 40	"agents.md",
 41	"Agents.md",
 42}
 43
 44type SelectedModelType string
 45
 46const (
 47	SelectedModelTypeLarge SelectedModelType = "large"
 48	SelectedModelTypeSmall SelectedModelType = "small"
 49)
 50
 51const (
 52	AgentCoder string = "coder"
 53	AgentTask  string = "task"
 54)
 55
 56type SelectedModel struct {
 57	// The model id as used by the provider API.
 58	// Required.
 59	Model string `json:"model" jsonschema:"required,description=The model ID as used by the provider API,example=gpt-4o"`
 60	// The model provider, same as the key/id used in the providers config.
 61	// Required.
 62	Provider string `json:"provider" jsonschema:"required,description=The model provider ID that matches a key in the providers config,example=openai"`
 63
 64	// Only used by models that use the openai provider and need this set.
 65	ReasoningEffort string `json:"reasoning_effort,omitempty" jsonschema:"description=Reasoning effort level for OpenAI models that support it,enum=low,enum=medium,enum=high"`
 66
 67	// Used by anthropic models that can reason to indicate if the model should think.
 68	Think bool `json:"think,omitempty" jsonschema:"description=Enable thinking mode for Anthropic models that support reasoning"`
 69
 70	// Overrides the default model configuration.
 71	MaxTokens        int64    `json:"max_tokens,omitempty" jsonschema:"description=Maximum number of tokens for model responses,minimum=1,maximum=200000,example=4096"`
 72	Temperature      *float64 `json:"temperature,omitempty" jsonschema:"description=Sampling temperature,minimum=0,maximum=1,example=0.7"`
 73	TopP             *float64 `json:"top_p,omitempty" jsonschema:"description=Top-p (nucleus) sampling parameter,minimum=0,maximum=1,example=0.9"`
 74	TopK             *int64   `json:"top_k,omitempty" jsonschema:"description=Top-k sampling parameter"`
 75	FrequencyPenalty *float64 `json:"frequency_penalty,omitempty" jsonschema:"description=Frequency penalty to reduce repetition"`
 76	PresencePenalty  *float64 `json:"presence_penalty,omitempty" jsonschema:"description=Presence penalty to increase topic diversity"`
 77
 78	// Override provider specific options.
 79	ProviderOptions map[string]any `json:"provider_options,omitempty" jsonschema:"description=Additional provider-specific options for the model"`
 80}
 81
 82type ProviderConfig struct {
 83	// The provider's id.
 84	ID string `json:"id,omitempty" jsonschema:"description=Unique identifier for the provider,example=openai"`
 85	// The provider's name, used for display purposes.
 86	Name string `json:"name,omitempty" jsonschema:"description=Human-readable name for the provider,example=OpenAI"`
 87	// The provider's API endpoint.
 88	BaseURL string `json:"base_url,omitempty" jsonschema:"description=Base URL for the provider's API,format=uri,example=https://api.openai.com/v1"`
 89	// The provider type, e.g. "openai", "anthropic", etc. if empty it defaults to openai.
 90	Type catwalk.Type `json:"type,omitempty" jsonschema:"description=Provider type that determines the API format,enum=openai,enum=anthropic,enum=gemini,enum=azure,enum=vertexai,default=openai"`
 91	// The provider's API key.
 92	APIKey string `json:"api_key,omitempty" jsonschema:"description=API key for authentication with the provider,example=$OPENAI_API_KEY"`
 93	// Marks the provider as disabled.
 94	Disable bool `json:"disable,omitempty" jsonschema:"description=Whether this provider is disabled,default=false"`
 95
 96	// Custom system prompt prefix.
 97	SystemPromptPrefix string `json:"system_prompt_prefix,omitempty" jsonschema:"description=Custom prefix to add to system prompts for this provider"`
 98
 99	// Extra headers to send with each request to the provider.
100	ExtraHeaders map[string]string `json:"extra_headers,omitempty" jsonschema:"description=Additional HTTP headers to send with requests"`
101	// Extra body
102	ExtraBody map[string]any `json:"extra_body,omitempty" jsonschema:"description=Additional fields to include in request bodies, only works with openai-compatible providers"`
103
104	ProviderOptions map[string]any `json:"provider_options,omitempty" jsonschema:"description=Additional provider-specific options for this provider"`
105
106	// Used to pass extra parameters to the provider.
107	ExtraParams map[string]string `json:"-"`
108
109	// The provider models
110	Models []catwalk.Model `json:"models,omitempty" jsonschema:"description=List of models available from this provider"`
111}
112
113type MCPType string
114
115const (
116	MCPStdio MCPType = "stdio"
117	MCPSSE   MCPType = "sse"
118	MCPHttp  MCPType = "http"
119)
120
121type MCPConfig struct {
122	Command  string            `json:"command,omitempty" jsonschema:"description=Command to execute for stdio MCP servers,example=npx"`
123	Env      map[string]string `json:"env,omitempty" jsonschema:"description=Environment variables to set for the MCP server"`
124	Args     []string          `json:"args,omitempty" jsonschema:"description=Arguments to pass to the MCP server command"`
125	Type     MCPType           `json:"type" jsonschema:"required,description=Type of MCP connection,enum=stdio,enum=sse,enum=http,default=stdio"`
126	URL      string            `json:"url,omitempty" jsonschema:"description=URL for HTTP or SSE MCP servers,format=uri,example=http://localhost:3000/mcp"`
127	Disabled bool              `json:"disabled,omitempty" jsonschema:"description=Whether this MCP server is disabled,default=false"`
128	Timeout  int               `json:"timeout,omitempty" jsonschema:"description=Timeout in seconds for MCP server connections,default=15,example=30,example=60,example=120"`
129
130	// TODO: maybe make it possible to get the value from the env
131	Headers map[string]string `json:"headers,omitempty" jsonschema:"description=HTTP headers for HTTP/SSE MCP servers"`
132}
133
134type LSPConfig struct {
135	Disabled    bool              `json:"disabled,omitempty" jsonschema:"description=Whether this LSP server is disabled,default=false"`
136	Command     string            `json:"command,omitempty" jsonschema:"required,description=Command to execute for the LSP server,example=gopls"`
137	Args        []string          `json:"args,omitempty" jsonschema:"description=Arguments to pass to the LSP server command"`
138	Env         map[string]string `json:"env,omitempty" jsonschema:"description=Environment variables to set to the LSP server command"`
139	FileTypes   []string          `json:"filetypes,omitempty" jsonschema:"description=File types this LSP server handles,example=go,example=mod,example=rs,example=c,example=js,example=ts"`
140	RootMarkers []string          `json:"root_markers,omitempty" jsonschema:"description=Files or directories that indicate the project root,example=go.mod,example=package.json,example=Cargo.toml"`
141	InitOptions map[string]any    `json:"init_options,omitempty" jsonschema:"description=Initialization options passed to the LSP server during initialize request"`
142	Options     map[string]any    `json:"options,omitempty" jsonschema:"description=LSP server-specific settings passed during initialization"`
143}
144
145type TUIOptions struct {
146	CompactMode bool   `json:"compact_mode,omitempty" jsonschema:"description=Enable compact mode for the TUI interface,default=false"`
147	DiffMode    string `json:"diff_mode,omitempty" jsonschema:"description=Diff mode for the TUI interface,enum=unified,enum=split"`
148	// Here we can add themes later or any TUI related options
149	//
150
151	Completions Completions `json:"completions,omitzero" jsonschema:"description=Completions UI options"`
152}
153
154// Completions defines options for the completions UI.
155type Completions struct {
156	MaxDepth *int `json:"max_depth,omitempty" jsonschema:"description=Maximum depth for the ls tool,default=0,example=10"`
157	MaxItems *int `json:"max_items,omitempty" jsonschema:"description=Maximum number of items to return for the ls tool,default=1000,example=100"`
158}
159
160func (c Completions) Limits() (depth, items int) {
161	return ptrValOr(c.MaxDepth, 0), ptrValOr(c.MaxItems, 0)
162}
163
164type Permissions struct {
165	AllowedTools []string `json:"allowed_tools,omitempty" jsonschema:"description=List of tools that don't require permission prompts,example=bash,example=view"` // Tools that don't require permission prompts
166	SkipRequests bool     `json:"-"`                                                                                                                              // Automatically accept all permissions (YOLO mode)
167}
168
169type TrailerStyle string
170
171const (
172	TrailerStyleNone         TrailerStyle = "none"
173	TrailerStyleCoAuthoredBy TrailerStyle = "co-authored-by"
174	TrailerStyleAssistedBy   TrailerStyle = "assisted-by"
175)
176
177type Attribution struct {
178	TrailerStyle  TrailerStyle `json:"trailer_style,omitempty" jsonschema:"description=Style of attribution trailer to add to commits,enum=none,enum=co-authored-by,enum=assisted-by,default=co-authored-by"`
179	GeneratedWith bool         `json:"generated_with,omitempty" jsonschema:"description=Add Generated with Crush line to commit messages and issues and PRs,default=true"`
180}
181
182type Options struct {
183	ContextPaths              []string     `json:"context_paths,omitempty" jsonschema:"description=Paths to files containing context information for the AI,example=.cursorrules,example=CRUSH.md"`
184	TUI                       *TUIOptions  `json:"tui,omitempty" jsonschema:"description=Terminal user interface options"`
185	Debug                     bool         `json:"debug,omitempty" jsonschema:"description=Enable debug logging,default=false"`
186	DebugLSP                  bool         `json:"debug_lsp,omitempty" jsonschema:"description=Enable debug logging for LSP servers,default=false"`
187	DisableAutoSummarize      bool         `json:"disable_auto_summarize,omitempty" jsonschema:"description=Disable automatic conversation summarization,default=false"`
188	DataDirectory             string       `json:"data_directory,omitempty" jsonschema:"description=Directory for storing application data (relative to working directory),default=.crush,example=.crush"` // Relative to the cwd
189	DisabledTools             []string     `json:"disabled_tools" jsonschema:"description=Tools to disable"`
190	DisableProviderAutoUpdate bool         `json:"disable_provider_auto_update,omitempty" jsonschema:"description=Disable providers auto-update,default=false"`
191	Attribution               *Attribution `json:"attribution,omitempty" jsonschema:"description=Attribution settings for generated content"`
192	DisableMetrics            bool         `json:"disable_metrics,omitempty" jsonschema:"description=Disable sending metrics,default=false"`
193}
194
195type MCPs map[string]MCPConfig
196
197type MCP struct {
198	Name string    `json:"name"`
199	MCP  MCPConfig `json:"mcp"`
200}
201
202func (m MCPs) Sorted() []MCP {
203	sorted := make([]MCP, 0, len(m))
204	for k, v := range m {
205		sorted = append(sorted, MCP{
206			Name: k,
207			MCP:  v,
208		})
209	}
210	slices.SortFunc(sorted, func(a, b MCP) int {
211		return strings.Compare(a.Name, b.Name)
212	})
213	return sorted
214}
215
216type LSPs map[string]LSPConfig
217
218type LSP struct {
219	Name string    `json:"name"`
220	LSP  LSPConfig `json:"lsp"`
221}
222
223func (l LSPs) Sorted() []LSP {
224	sorted := make([]LSP, 0, len(l))
225	for k, v := range l {
226		sorted = append(sorted, LSP{
227			Name: k,
228			LSP:  v,
229		})
230	}
231	slices.SortFunc(sorted, func(a, b LSP) int {
232		return strings.Compare(a.Name, b.Name)
233	})
234	return sorted
235}
236
237func (l LSPConfig) ResolvedEnv() []string {
238	return resolveEnvs(l.Env)
239}
240
241func (m MCPConfig) ResolvedEnv() []string {
242	return resolveEnvs(m.Env)
243}
244
245func (m MCPConfig) ResolvedHeaders() map[string]string {
246	resolver := NewShellVariableResolver(env.New())
247	for e, v := range m.Headers {
248		var err error
249		m.Headers[e], err = resolver.ResolveValue(v)
250		if err != nil {
251			slog.Error("error resolving header variable", "error", err, "variable", e, "value", v)
252			continue
253		}
254	}
255	return m.Headers
256}
257
258type Agent struct {
259	ID          string `json:"id,omitempty"`
260	Name        string `json:"name,omitempty"`
261	Description string `json:"description,omitempty"`
262	// This is the id of the system prompt used by the agent
263	Disabled bool `json:"disabled,omitempty"`
264
265	Model SelectedModelType `json:"model" jsonschema:"required,description=The model type to use for this agent,enum=large,enum=small,default=large"`
266
267	// The available tools for the agent
268	//  if this is nil, all tools are available
269	AllowedTools []string `json:"allowed_tools,omitempty"`
270
271	// this tells us which MCPs are available for this agent
272	//  if this is empty all mcps are available
273	//  the string array is the list of tools from the AllowedMCP the agent has available
274	//  if the string array is nil, all tools from the AllowedMCP are available
275	AllowedMCP map[string][]string `json:"allowed_mcp,omitempty"`
276
277	// Overrides the context paths for this agent
278	ContextPaths []string `json:"context_paths,omitempty"`
279}
280
281type Tools struct {
282	Ls ToolLs `json:"ls,omitzero"`
283}
284
285type ToolLs struct {
286	MaxDepth *int `json:"max_depth,omitempty" jsonschema:"description=Maximum depth for the ls tool,default=0,example=10"`
287	MaxItems *int `json:"max_items,omitempty" jsonschema:"description=Maximum number of items to return for the ls tool,default=1000,example=100"`
288}
289
290func (t ToolLs) Limits() (depth, items int) {
291	return ptrValOr(t.MaxDepth, 0), ptrValOr(t.MaxItems, 0)
292}
293
294// Config holds the configuration for crush.
295type Config struct {
296	Schema string `json:"$schema,omitempty"`
297
298	// We currently only support large/small as values here.
299	Models map[SelectedModelType]SelectedModel `json:"models,omitempty" jsonschema:"description=Model configurations for different model types,example={\"large\":{\"model\":\"gpt-4o\",\"provider\":\"openai\"}}"`
300	// Recently used models stored in the data directory config.
301	RecentModels map[SelectedModelType][]SelectedModel `json:"recent_models,omitempty" jsonschema:"description=Recently used models sorted by most recent first"`
302
303	// The providers that are configured
304	Providers *csync.Map[string, ProviderConfig] `json:"providers,omitempty" jsonschema:"description=AI provider configurations"`
305
306	MCP MCPs `json:"mcp,omitempty" jsonschema:"description=Model Context Protocol server configurations"`
307
308	LSP LSPs `json:"lsp,omitempty" jsonschema:"description=Language Server Protocol configurations"`
309
310	Options *Options `json:"options,omitempty" jsonschema:"description=General application options"`
311
312	Permissions *Permissions `json:"permissions,omitempty" jsonschema:"description=Permission settings for tool usage"`
313
314	Tools Tools `json:"tools,omitzero" jsonschema:"description=Tool configurations"`
315
316	Agents map[string]Agent `json:"-"`
317
318	// Internal
319	workingDir string `json:"-"`
320	// TODO: find a better way to do this this should probably not be part of the config
321	resolver       VariableResolver
322	dataConfigDir  string             `json:"-"`
323	knownProviders []catwalk.Provider `json:"-"`
324}
325
326func (c *Config) WorkingDir() string {
327	return c.workingDir
328}
329
330func (c *Config) EnabledProviders() []ProviderConfig {
331	var enabled []ProviderConfig
332	for p := range c.Providers.Seq() {
333		if !p.Disable {
334			enabled = append(enabled, p)
335		}
336	}
337	return enabled
338}
339
340// IsConfigured  return true if at least one provider is configured
341func (c *Config) IsConfigured() bool {
342	return len(c.EnabledProviders()) > 0
343}
344
345func (c *Config) GetModel(provider, model string) *catwalk.Model {
346	if providerConfig, ok := c.Providers.Get(provider); ok {
347		for _, m := range providerConfig.Models {
348			if m.ID == model {
349				return &m
350			}
351		}
352	}
353	return nil
354}
355
356func (c *Config) GetProviderForModel(modelType SelectedModelType) *ProviderConfig {
357	model, ok := c.Models[modelType]
358	if !ok {
359		return nil
360	}
361	if providerConfig, ok := c.Providers.Get(model.Provider); ok {
362		return &providerConfig
363	}
364	return nil
365}
366
367func (c *Config) GetModelByType(modelType SelectedModelType) *catwalk.Model {
368	model, ok := c.Models[modelType]
369	if !ok {
370		return nil
371	}
372	return c.GetModel(model.Provider, model.Model)
373}
374
375func (c *Config) LargeModel() *catwalk.Model {
376	model, ok := c.Models[SelectedModelTypeLarge]
377	if !ok {
378		return nil
379	}
380	return c.GetModel(model.Provider, model.Model)
381}
382
383func (c *Config) SmallModel() *catwalk.Model {
384	model, ok := c.Models[SelectedModelTypeSmall]
385	if !ok {
386		return nil
387	}
388	return c.GetModel(model.Provider, model.Model)
389}
390
391func (c *Config) SetCompactMode(enabled bool) error {
392	if c.Options == nil {
393		c.Options = &Options{}
394	}
395	c.Options.TUI.CompactMode = enabled
396	return c.SetConfigField("options.tui.compact_mode", enabled)
397}
398
399func (c *Config) Resolve(key string) (string, error) {
400	if c.resolver == nil {
401		return "", fmt.Errorf("no variable resolver configured")
402	}
403	return c.resolver.ResolveValue(key)
404}
405
406func (c *Config) UpdatePreferredModel(modelType SelectedModelType, model SelectedModel) error {
407	c.Models[modelType] = model
408	if err := c.SetConfigField(fmt.Sprintf("models.%s", modelType), model); err != nil {
409		return fmt.Errorf("failed to update preferred model: %w", err)
410	}
411	if err := c.recordRecentModel(modelType, model); err != nil {
412		return err
413	}
414	return nil
415}
416
417func (c *Config) SetConfigField(key string, value any) error {
418	// read the data
419	data, err := os.ReadFile(c.dataConfigDir)
420	if err != nil {
421		if os.IsNotExist(err) {
422			data = []byte("{}")
423		} else {
424			return fmt.Errorf("failed to read config file: %w", err)
425		}
426	}
427
428	newValue, err := sjson.Set(string(data), key, value)
429	if err != nil {
430		return fmt.Errorf("failed to set config field %s: %w", key, err)
431	}
432	if err := os.WriteFile(c.dataConfigDir, []byte(newValue), 0o600); err != nil {
433		return fmt.Errorf("failed to write config file: %w", err)
434	}
435	return nil
436}
437
438func (c *Config) SetProviderAPIKey(providerID, apiKey string) error {
439	// First save to the config file
440	err := c.SetConfigField("providers."+providerID+".api_key", apiKey)
441	if err != nil {
442		return fmt.Errorf("failed to save API key to config file: %w", err)
443	}
444
445	providerConfig, exists := c.Providers.Get(providerID)
446	if exists {
447		providerConfig.APIKey = apiKey
448		c.Providers.Set(providerID, providerConfig)
449		return nil
450	}
451
452	var foundProvider *catwalk.Provider
453	for _, p := range c.knownProviders {
454		if string(p.ID) == providerID {
455			foundProvider = &p
456			break
457		}
458	}
459
460	if foundProvider != nil {
461		// Create new provider config based on known provider
462		providerConfig = ProviderConfig{
463			ID:           providerID,
464			Name:         foundProvider.Name,
465			BaseURL:      foundProvider.APIEndpoint,
466			Type:         foundProvider.Type,
467			APIKey:       apiKey,
468			Disable:      false,
469			ExtraHeaders: make(map[string]string),
470			ExtraParams:  make(map[string]string),
471			Models:       foundProvider.Models,
472		}
473	} else {
474		return fmt.Errorf("provider with ID %s not found in known providers", providerID)
475	}
476	// Store the updated provider config
477	c.Providers.Set(providerID, providerConfig)
478	return nil
479}
480
481const maxRecentModelsPerType = 5
482
483func (c *Config) recordRecentModel(modelType SelectedModelType, model SelectedModel) error {
484	if model.Provider == "" || model.Model == "" {
485		return nil
486	}
487
488	if c.RecentModels == nil {
489		c.RecentModels = make(map[SelectedModelType][]SelectedModel)
490	}
491
492	eq := func(a, b SelectedModel) bool {
493		return a.Provider == b.Provider && a.Model == b.Model
494	}
495
496	entry := SelectedModel{
497		Provider: model.Provider,
498		Model:    model.Model,
499	}
500
501	current := c.RecentModels[modelType]
502	withoutCurrent := slices.DeleteFunc(slices.Clone(current), func(existing SelectedModel) bool {
503		return eq(existing, entry)
504	})
505
506	updated := append([]SelectedModel{entry}, withoutCurrent...)
507	if len(updated) > maxRecentModelsPerType {
508		updated = updated[:maxRecentModelsPerType]
509	}
510
511	if slices.EqualFunc(current, updated, eq) {
512		return nil
513	}
514
515	c.RecentModels[modelType] = updated
516
517	if err := c.SetConfigField(fmt.Sprintf("recent_models.%s", modelType), updated); err != nil {
518		return fmt.Errorf("failed to persist recent models: %w", err)
519	}
520
521	return nil
522}
523
524func allToolNames() []string {
525	return []string{
526		"agent",
527		"bash",
528		"job_output",
529		"job_kill",
530		"download",
531		"edit",
532		"multiedit",
533		"lsp_diagnostics",
534		"lsp_references",
535		"fetch",
536		"agentic_fetch",
537		"glob",
538		"grep",
539		"ls",
540		"sourcegraph",
541		"view",
542		"write",
543	}
544}
545
546func resolveAllowedTools(allTools []string, disabledTools []string) []string {
547	if disabledTools == nil {
548		return allTools
549	}
550	// filter out disabled tools (exclude mode)
551	return filterSlice(allTools, disabledTools, false)
552}
553
554func resolveReadOnlyTools(tools []string) []string {
555	readOnlyTools := []string{"glob", "grep", "ls", "sourcegraph", "view"}
556	// filter to only include tools that are in allowedtools (include mode)
557	return filterSlice(tools, readOnlyTools, true)
558}
559
560func filterSlice(data []string, mask []string, include bool) []string {
561	filtered := []string{}
562	for _, s := range data {
563		// if include is true, we include items that ARE in the mask
564		// if include is false, we include items that are NOT in the mask
565		if include == slices.Contains(mask, s) {
566			filtered = append(filtered, s)
567		}
568	}
569	return filtered
570}
571
572func (c *Config) SetupAgents() {
573	allowedTools := resolveAllowedTools(allToolNames(), c.Options.DisabledTools)
574
575	agents := map[string]Agent{
576		AgentCoder: {
577			ID:           AgentCoder,
578			Name:         "Coder",
579			Description:  "An agent that helps with executing coding tasks.",
580			Model:        SelectedModelTypeLarge,
581			ContextPaths: c.Options.ContextPaths,
582			AllowedTools: allowedTools,
583		},
584
585		AgentTask: {
586			ID:           AgentCoder,
587			Name:         "Task",
588			Description:  "An agent that helps with searching for context and finding implementation details.",
589			Model:        SelectedModelTypeLarge,
590			ContextPaths: c.Options.ContextPaths,
591			AllowedTools: resolveReadOnlyTools(allowedTools),
592			// NO MCPs or LSPs by default
593			AllowedMCP: map[string][]string{},
594		},
595	}
596	c.Agents = agents
597}
598
599func (c *Config) Resolver() VariableResolver {
600	return c.resolver
601}
602
603func (c *ProviderConfig) TestConnection(resolver VariableResolver) error {
604	testURL := ""
605	headers := make(map[string]string)
606	apiKey, _ := resolver.ResolveValue(c.APIKey)
607	switch c.Type {
608	case catwalk.TypeOpenAI, catwalk.TypeOpenAICompat, catwalk.TypeOpenRouter:
609		baseURL, _ := resolver.ResolveValue(c.BaseURL)
610		if baseURL == "" {
611			baseURL = "https://api.openai.com/v1"
612		}
613		if c.ID == string(catwalk.InferenceProviderOpenRouter) {
614			testURL = baseURL + "/credits"
615		} else {
616			testURL = baseURL + "/models"
617		}
618		headers["Authorization"] = "Bearer " + apiKey
619	case catwalk.TypeAnthropic:
620		baseURL, _ := resolver.ResolveValue(c.BaseURL)
621		if baseURL == "" {
622			baseURL = "https://api.anthropic.com/v1"
623		}
624		testURL = baseURL + "/models"
625		headers["x-api-key"] = apiKey
626		headers["anthropic-version"] = "2023-06-01"
627	case catwalk.TypeGoogle:
628		baseURL, _ := resolver.ResolveValue(c.BaseURL)
629		if baseURL == "" {
630			baseURL = "https://generativelanguage.googleapis.com"
631		}
632		testURL = baseURL + "/v1beta/models?key=" + url.QueryEscape(apiKey)
633	}
634	ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
635	defer cancel()
636	client := &http.Client{}
637	req, err := http.NewRequestWithContext(ctx, "GET", testURL, nil)
638	if err != nil {
639		return fmt.Errorf("failed to create request for provider %s: %w", c.ID, err)
640	}
641	for k, v := range headers {
642		req.Header.Set(k, v)
643	}
644	for k, v := range c.ExtraHeaders {
645		req.Header.Set(k, v)
646	}
647	b, err := client.Do(req)
648	if err != nil {
649		return fmt.Errorf("failed to create request for provider %s: %w", c.ID, err)
650	}
651	if c.ID == string(catwalk.InferenceProviderZAI) {
652		if b.StatusCode == http.StatusUnauthorized {
653			// for z.ai just check if the http response is not 401
654			return fmt.Errorf("failed to connect to provider %s: %s", c.ID, b.Status)
655		}
656	} else {
657		if b.StatusCode != http.StatusOK {
658			return fmt.Errorf("failed to connect to provider %s: %s", c.ID, b.Status)
659		}
660	}
661	_ = b.Body.Close()
662	return nil
663}
664
665func resolveEnvs(envs map[string]string) []string {
666	resolver := NewShellVariableResolver(env.New())
667	for e, v := range envs {
668		var err error
669		envs[e], err = resolver.ResolveValue(v)
670		if err != nil {
671			slog.Error("error resolving environment variable", "error", err, "variable", e, "value", v)
672			continue
673		}
674	}
675
676	res := make([]string, 0, len(envs))
677	for k, v := range envs {
678		res = append(res, fmt.Sprintf("%s=%s", k, v))
679	}
680	return res
681}
682
683func ptrValOr[T any](t *T, el T) T {
684	if t == nil {
685		return el
686	}
687	return *t
688}