config.go

  1// Package config manages application configuration from various sources.
  2package config
  3
  4import (
  5	"encoding/json"
  6	"fmt"
  7	"log/slog"
  8	"os"
  9	"path/filepath"
 10	"strings"
 11
 12	"github.com/charmbracelet/crush/internal/llm/models"
 13	"github.com/charmbracelet/crush/internal/logging"
 14	"github.com/spf13/viper"
 15)
 16
 17// MCPType defines the type of MCP (Model Control Protocol) server.
 18type MCPType string
 19
 20// Supported MCP types
 21const (
 22	MCPStdio MCPType = "stdio"
 23	MCPSse   MCPType = "sse"
 24)
 25
 26// MCPServer defines the configuration for a Model Control Protocol server.
 27type MCPServer struct {
 28	Command string            `json:"command"`
 29	Env     []string          `json:"env"`
 30	Args    []string          `json:"args"`
 31	Type    MCPType           `json:"type"`
 32	URL     string            `json:"url"`
 33	Headers map[string]string `json:"headers"`
 34}
 35
 36type AgentName string
 37
 38const (
 39	AgentCoder      AgentName = "coder"
 40	AgentSummarizer AgentName = "summarizer"
 41	AgentTask       AgentName = "task"
 42	AgentTitle      AgentName = "title"
 43)
 44
 45// Agent defines configuration for different LLM models and their token limits.
 46type Agent struct {
 47	Model           models.ModelID `json:"model"`
 48	MaxTokens       int64          `json:"maxTokens"`
 49	ReasoningEffort string         `json:"reasoningEffort"` // For openai models low,medium,heigh
 50}
 51
 52// Provider defines configuration for an LLM provider.
 53type Provider struct {
 54	APIKey   string `json:"apiKey"`
 55	Disabled bool   `json:"disabled"`
 56}
 57
 58// Data defines storage configuration.
 59type Data struct {
 60	Directory string `json:"directory,omitempty"`
 61}
 62
 63// LSPConfig defines configuration for Language Server Protocol integration.
 64type LSPConfig struct {
 65	Disabled bool     `json:"enabled"`
 66	Command  string   `json:"command"`
 67	Args     []string `json:"args"`
 68	Options  any      `json:"options"`
 69}
 70
 71// TUIConfig defines the configuration for the Terminal User Interface.
 72type TUIConfig struct {
 73	Theme string `json:"theme,omitempty"`
 74}
 75
 76// Config is the main configuration structure for the application.
 77type Config struct {
 78	Data         Data                              `json:"data"`
 79	WorkingDir   string                            `json:"wd,omitempty"`
 80	MCPServers   map[string]MCPServer              `json:"mcpServers,omitempty"`
 81	Providers    map[models.ModelProvider]Provider `json:"providers,omitempty"`
 82	LSP          map[string]LSPConfig              `json:"lsp,omitempty"`
 83	Agents       map[AgentName]Agent               `json:"agents,omitempty"`
 84	Debug        bool                              `json:"debug,omitempty"`
 85	DebugLSP     bool                              `json:"debugLSP,omitempty"`
 86	ContextPaths []string                          `json:"contextPaths,omitempty"`
 87	TUI          TUIConfig                         `json:"tui"`
 88	AutoCompact  bool                              `json:"autoCompact,omitempty"`
 89}
 90
 91// Application constants
 92const (
 93	defaultDataDirectory = ".crush"
 94	defaultLogLevel      = "info"
 95	appName              = "crush"
 96
 97	MaxTokensFallbackDefault = 4096
 98)
 99
100var defaultContextPaths = []string{
101	".github/copilot-instructions.md",
102	".cursorrules",
103	".cursor/rules/",
104	"CLAUDE.md",
105	"CLAUDE.local.md",
106	"GEMINI.md",
107	"gemini.md",
108	"crush.md",
109	"crush.local.md",
110	"Crush.md",
111	"Crush.local.md",
112	"CRUSH.md",
113	"CRUSH.local.md",
114}
115
116// Global configuration instance
117var cfg *Config
118
119// Load initializes the configuration from environment variables and config files.
120// If debug is true, debug mode is enabled and log level is set to debug.
121// It returns an error if configuration loading fails.
122func Load(workingDir string, debug bool) (*Config, error) {
123	if cfg != nil {
124		return cfg, nil
125	}
126
127	cfg = &Config{
128		WorkingDir: workingDir,
129		MCPServers: make(map[string]MCPServer),
130		Providers:  make(map[models.ModelProvider]Provider),
131		LSP:        make(map[string]LSPConfig),
132	}
133
134	configureViper()
135	setDefaults(debug)
136
137	// Read global config
138	if err := readConfig(viper.ReadInConfig()); err != nil {
139		return cfg, err
140	}
141
142	// Load and merge local config
143	mergeLocalConfig(workingDir)
144
145	setProviderDefaults()
146
147	// Apply configuration to the struct
148	if err := viper.Unmarshal(cfg); err != nil {
149		return cfg, fmt.Errorf("failed to unmarshal config: %w", err)
150	}
151
152	applyDefaultValues()
153	defaultLevel := slog.LevelInfo
154	if cfg.Debug {
155		defaultLevel = slog.LevelDebug
156	}
157	if os.Getenv("CRUSH_DEV_DEBUG") == "true" {
158		loggingFile := fmt.Sprintf("%s/%s", cfg.Data.Directory, "debug.log")
159
160		// if file does not exist create it
161		if _, err := os.Stat(loggingFile); os.IsNotExist(err) {
162			if err := os.MkdirAll(cfg.Data.Directory, 0o755); err != nil {
163				return cfg, fmt.Errorf("failed to create directory: %w", err)
164			}
165			if _, err := os.Create(loggingFile); err != nil {
166				return cfg, fmt.Errorf("failed to create log file: %w", err)
167			}
168		}
169
170		sloggingFileWriter, err := os.OpenFile(loggingFile, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0o666)
171		if err != nil {
172			return cfg, fmt.Errorf("failed to open log file: %w", err)
173		}
174		// Configure logger
175		logger := slog.New(slog.NewTextHandler(sloggingFileWriter, &slog.HandlerOptions{
176			Level: defaultLevel,
177		}))
178		slog.SetDefault(logger)
179	} else {
180		// Configure logger
181		logger := slog.New(slog.NewTextHandler(logging.NewWriter(), &slog.HandlerOptions{
182			Level: defaultLevel,
183		}))
184		slog.SetDefault(logger)
185	}
186
187	// Validate configuration
188	if err := Validate(); err != nil {
189		return cfg, fmt.Errorf("config validation failed: %w", err)
190	}
191
192	if cfg.Agents == nil {
193		cfg.Agents = make(map[AgentName]Agent)
194	}
195
196	// Override the max tokens for title agent
197	cfg.Agents[AgentTitle] = Agent{
198		Model:     cfg.Agents[AgentTitle].Model,
199		MaxTokens: 80,
200	}
201	return cfg, nil
202}
203
204// configureViper sets up viper's configuration paths and environment variables.
205func configureViper() {
206	viper.SetConfigName(fmt.Sprintf(".%s", appName))
207	viper.SetConfigType("json")
208
209	// Unix-style paths
210	viper.AddConfigPath("$HOME")
211	viper.AddConfigPath(fmt.Sprintf("$XDG_CONFIG_HOME/%s", appName))
212	viper.AddConfigPath(fmt.Sprintf("$HOME/.config/%s", appName))
213
214	// Windows-style paths
215	viper.AddConfigPath(fmt.Sprintf("$USERPROFILE"))
216	viper.AddConfigPath(fmt.Sprintf("$APPDATA/%s", appName))
217	viper.AddConfigPath(fmt.Sprintf("$LOCALAPPDATA/%s", appName))
218
219	viper.SetEnvPrefix(strings.ToUpper(appName))
220	viper.AutomaticEnv()
221}
222
223// setDefaults configures default values for configuration options.
224func setDefaults(debug bool) {
225	viper.SetDefault("data.directory", defaultDataDirectory)
226	viper.SetDefault("contextPaths", defaultContextPaths)
227	viper.SetDefault("tui.theme", "crush")
228	viper.SetDefault("autoCompact", true)
229
230	if debug {
231		viper.SetDefault("debug", true)
232		viper.Set("log.level", "debug")
233	} else {
234		viper.SetDefault("debug", false)
235		viper.SetDefault("log.level", defaultLogLevel)
236	}
237}
238
239// setProviderDefaults configures LLM provider defaults based on provider provided by
240// environment variables and configuration file.
241func setProviderDefaults() {
242	// Set all API keys we can find in the environment
243	if apiKey := os.Getenv("ANTHROPIC_API_KEY"); apiKey != "" {
244		viper.SetDefault("providers.anthropic.apiKey", apiKey)
245	}
246	if apiKey := os.Getenv("OPENAI_API_KEY"); apiKey != "" {
247		viper.SetDefault("providers.openai.apiKey", apiKey)
248	}
249	if apiKey := os.Getenv("GEMINI_API_KEY"); apiKey != "" {
250		viper.SetDefault("providers.gemini.apiKey", apiKey)
251	}
252	if apiKey := os.Getenv("GROQ_API_KEY"); apiKey != "" {
253		viper.SetDefault("providers.groq.apiKey", apiKey)
254	}
255	if apiKey := os.Getenv("OPENROUTER_API_KEY"); apiKey != "" {
256		viper.SetDefault("providers.openrouter.apiKey", apiKey)
257	}
258	if apiKey := os.Getenv("XAI_API_KEY"); apiKey != "" {
259		viper.SetDefault("providers.xai.apiKey", apiKey)
260	}
261	if apiKey := os.Getenv("AZURE_OPENAI_ENDPOINT"); apiKey != "" {
262		// api-key may be empty when using Entra ID credentials – that's okay
263		viper.SetDefault("providers.azure.apiKey", os.Getenv("AZURE_OPENAI_API_KEY"))
264	}
265
266	// Use this order to set the default models
267	// 1. Anthropic
268	// 2. OpenAI
269	// 3. Google Gemini
270	// 4. Groq
271	// 5. OpenRouter
272	// 6. AWS Bedrock
273	// 7. Azure
274	// 8. Google Cloud VertexAI
275
276	// Anthropic configuration
277	if key := viper.GetString("providers.anthropic.apiKey"); strings.TrimSpace(key) != "" {
278		viper.SetDefault("agents.coder.model", models.Claude4Sonnet)
279		viper.SetDefault("agents.summarizer.model", models.Claude4Sonnet)
280		viper.SetDefault("agents.task.model", models.Claude4Sonnet)
281		viper.SetDefault("agents.title.model", models.Claude4Sonnet)
282		return
283	}
284
285	// OpenAI configuration
286	if key := viper.GetString("providers.openai.apiKey"); strings.TrimSpace(key) != "" {
287		viper.SetDefault("agents.coder.model", models.GPT41)
288		viper.SetDefault("agents.summarizer.model", models.GPT41)
289		viper.SetDefault("agents.task.model", models.GPT41Mini)
290		viper.SetDefault("agents.title.model", models.GPT41Mini)
291		return
292	}
293
294	// Google Gemini configuration
295	if key := viper.GetString("providers.gemini.apiKey"); strings.TrimSpace(key) != "" {
296		viper.SetDefault("agents.coder.model", models.Gemini25)
297		viper.SetDefault("agents.summarizer.model", models.Gemini25)
298		viper.SetDefault("agents.task.model", models.Gemini25Flash)
299		viper.SetDefault("agents.title.model", models.Gemini25Flash)
300		return
301	}
302
303	// Groq configuration
304	if key := viper.GetString("providers.groq.apiKey"); strings.TrimSpace(key) != "" {
305		viper.SetDefault("agents.coder.model", models.QWENQwq)
306		viper.SetDefault("agents.summarizer.model", models.QWENQwq)
307		viper.SetDefault("agents.task.model", models.QWENQwq)
308		viper.SetDefault("agents.title.model", models.QWENQwq)
309		return
310	}
311
312	// OpenRouter configuration
313	if key := viper.GetString("providers.openrouter.apiKey"); strings.TrimSpace(key) != "" {
314		viper.SetDefault("agents.coder.model", models.OpenRouterClaude37Sonnet)
315		viper.SetDefault("agents.summarizer.model", models.OpenRouterClaude37Sonnet)
316		viper.SetDefault("agents.task.model", models.OpenRouterClaude37Sonnet)
317		viper.SetDefault("agents.title.model", models.OpenRouterClaude35Haiku)
318		return
319	}
320
321	// XAI configuration
322	if key := viper.GetString("providers.xai.apiKey"); strings.TrimSpace(key) != "" {
323		viper.SetDefault("agents.coder.model", models.XAIGrok3Beta)
324		viper.SetDefault("agents.summarizer.model", models.XAIGrok3Beta)
325		viper.SetDefault("agents.task.model", models.XAIGrok3Beta)
326		viper.SetDefault("agents.title.model", models.XAiGrok3MiniFastBeta)
327		return
328	}
329
330	// AWS Bedrock configuration
331	if hasAWSCredentials() {
332		viper.SetDefault("agents.coder.model", models.BedrockClaude37Sonnet)
333		viper.SetDefault("agents.summarizer.model", models.BedrockClaude37Sonnet)
334		viper.SetDefault("agents.task.model", models.BedrockClaude37Sonnet)
335		viper.SetDefault("agents.title.model", models.BedrockClaude37Sonnet)
336		return
337	}
338
339	// Azure OpenAI configuration
340	if os.Getenv("AZURE_OPENAI_ENDPOINT") != "" {
341		viper.SetDefault("agents.coder.model", models.AzureGPT41)
342		viper.SetDefault("agents.summarizer.model", models.AzureGPT41)
343		viper.SetDefault("agents.task.model", models.AzureGPT41Mini)
344		viper.SetDefault("agents.title.model", models.AzureGPT41Mini)
345		return
346	}
347
348	// Google Cloud VertexAI configuration
349	if hasVertexAICredentials() {
350		viper.SetDefault("agents.coder.model", models.VertexAIGemini25)
351		viper.SetDefault("agents.summarizer.model", models.VertexAIGemini25)
352		viper.SetDefault("agents.task.model", models.VertexAIGemini25Flash)
353		viper.SetDefault("agents.title.model", models.VertexAIGemini25Flash)
354		return
355	}
356}
357
358// hasAWSCredentials checks if AWS credentials are available in the environment.
359func hasAWSCredentials() bool {
360	// Check for explicit AWS credentials
361	if os.Getenv("AWS_ACCESS_KEY_ID") != "" && os.Getenv("AWS_SECRET_ACCESS_KEY") != "" {
362		return true
363	}
364
365	// Check for AWS profile
366	if os.Getenv("AWS_PROFILE") != "" || os.Getenv("AWS_DEFAULT_PROFILE") != "" {
367		return true
368	}
369
370	// Check for AWS region
371	if os.Getenv("AWS_REGION") != "" || os.Getenv("AWS_DEFAULT_REGION") != "" {
372		return true
373	}
374
375	// Check if running on EC2 with instance profile
376	if os.Getenv("AWS_CONTAINER_CREDENTIALS_RELATIVE_URI") != "" ||
377		os.Getenv("AWS_CONTAINER_CREDENTIALS_FULL_URI") != "" {
378		return true
379	}
380
381	return false
382}
383
384// hasVertexAICredentials checks if VertexAI credentials are available in the environment.
385func hasVertexAICredentials() bool {
386	// Check for explicit VertexAI parameters
387	if os.Getenv("VERTEXAI_PROJECT") != "" && os.Getenv("VERTEXAI_LOCATION") != "" {
388		return true
389	}
390	// Check for Google Cloud project and location
391	if os.Getenv("GOOGLE_CLOUD_PROJECT") != "" && (os.Getenv("GOOGLE_CLOUD_REGION") != "" || os.Getenv("GOOGLE_CLOUD_LOCATION") != "") {
392		return true
393	}
394	return false
395}
396
397// readConfig handles the result of reading a configuration file.
398func readConfig(err error) error {
399	if err == nil {
400		return nil
401	}
402
403	// It's okay if the config file doesn't exist
404	if _, ok := err.(viper.ConfigFileNotFoundError); ok {
405		return nil
406	}
407
408	return fmt.Errorf("failed to read config: %w", err)
409}
410
411// mergeLocalConfig loads and merges configuration from the local directory.
412func mergeLocalConfig(workingDir string) {
413	local := viper.New()
414	local.SetConfigName(fmt.Sprintf(".%s", appName))
415	local.SetConfigType("json")
416	local.AddConfigPath(workingDir)
417
418	// Merge local config if it exists
419	if err := local.ReadInConfig(); err == nil {
420		viper.MergeConfigMap(local.AllSettings())
421	}
422}
423
424// applyDefaultValues sets default values for configuration fields that need processing.
425func applyDefaultValues() {
426	// Set default MCP type if not specified
427	for k, v := range cfg.MCPServers {
428		if v.Type == "" {
429			v.Type = MCPStdio
430			cfg.MCPServers[k] = v
431		}
432	}
433}
434
435// It validates model IDs and providers, ensuring they are supported.
436func validateAgent(cfg *Config, name AgentName, agent Agent) error {
437	// Check if model exists
438	model, modelExists := models.SupportedModels[agent.Model]
439	if !modelExists {
440		logging.Warn("unsupported model configured, reverting to default",
441			"agent", name,
442			"configured_model", agent.Model)
443
444		// Set default model based on available providers
445		if setDefaultModelForAgent(name) {
446			logging.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
447		} else {
448			return fmt.Errorf("no valid provider available for agent %s", name)
449		}
450		return nil
451	}
452
453	// Check if provider for the model is configured
454	provider := model.Provider
455	providerCfg, providerExists := cfg.Providers[provider]
456
457	if !providerExists {
458		// Provider not configured, check if we have environment variables
459		apiKey := getProviderAPIKey(provider)
460		if apiKey == "" {
461			logging.Warn("provider not configured for model, reverting to default",
462				"agent", name,
463				"model", agent.Model,
464				"provider", provider)
465
466			// Set default model based on available providers
467			if setDefaultModelForAgent(name) {
468				logging.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
469			} else {
470				return fmt.Errorf("no valid provider available for agent %s", name)
471			}
472		} else {
473			// Add provider with API key from environment
474			cfg.Providers[provider] = Provider{
475				APIKey: apiKey,
476			}
477			logging.Info("added provider from environment", "provider", provider)
478		}
479	} else if providerCfg.Disabled || providerCfg.APIKey == "" {
480		// Provider is disabled or has no API key
481		logging.Warn("provider is disabled or has no API key, reverting to default",
482			"agent", name,
483			"model", agent.Model,
484			"provider", provider)
485
486		// Set default model based on available providers
487		if setDefaultModelForAgent(name) {
488			logging.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
489		} else {
490			return fmt.Errorf("no valid provider available for agent %s", name)
491		}
492	}
493
494	// Validate max tokens
495	if agent.MaxTokens <= 0 {
496		logging.Warn("invalid max tokens, setting to default",
497			"agent", name,
498			"model", agent.Model,
499			"max_tokens", agent.MaxTokens)
500
501		// Update the agent with default max tokens
502		updatedAgent := cfg.Agents[name]
503		if model.DefaultMaxTokens > 0 {
504			updatedAgent.MaxTokens = model.DefaultMaxTokens
505		} else {
506			updatedAgent.MaxTokens = MaxTokensFallbackDefault
507		}
508		cfg.Agents[name] = updatedAgent
509	} else if model.ContextWindow > 0 && agent.MaxTokens > model.ContextWindow/2 {
510		// Ensure max tokens doesn't exceed half the context window (reasonable limit)
511		logging.Warn("max tokens exceeds half the context window, adjusting",
512			"agent", name,
513			"model", agent.Model,
514			"max_tokens", agent.MaxTokens,
515			"context_window", model.ContextWindow)
516
517		// Update the agent with adjusted max tokens
518		updatedAgent := cfg.Agents[name]
519		updatedAgent.MaxTokens = model.ContextWindow / 2
520		cfg.Agents[name] = updatedAgent
521	}
522
523	// Validate reasoning effort for models that support reasoning
524	if model.CanReason && provider == models.ProviderOpenAI || provider == models.ProviderLocal {
525		if agent.ReasoningEffort == "" {
526			// Set default reasoning effort for models that support it
527			logging.Info("setting default reasoning effort for model that supports reasoning",
528				"agent", name,
529				"model", agent.Model)
530
531			// Update the agent with default reasoning effort
532			updatedAgent := cfg.Agents[name]
533			updatedAgent.ReasoningEffort = "medium"
534			cfg.Agents[name] = updatedAgent
535		} else {
536			// Check if reasoning effort is valid (low, medium, high)
537			effort := strings.ToLower(agent.ReasoningEffort)
538			if effort != "low" && effort != "medium" && effort != "high" {
539				logging.Warn("invalid reasoning effort, setting to medium",
540					"agent", name,
541					"model", agent.Model,
542					"reasoning_effort", agent.ReasoningEffort)
543
544				// Update the agent with valid reasoning effort
545				updatedAgent := cfg.Agents[name]
546				updatedAgent.ReasoningEffort = "medium"
547				cfg.Agents[name] = updatedAgent
548			}
549		}
550	} else if !model.CanReason && agent.ReasoningEffort != "" {
551		// Model doesn't support reasoning but reasoning effort is set
552		logging.Warn("model doesn't support reasoning but reasoning effort is set, ignoring",
553			"agent", name,
554			"model", agent.Model,
555			"reasoning_effort", agent.ReasoningEffort)
556
557		// Update the agent to remove reasoning effort
558		updatedAgent := cfg.Agents[name]
559		updatedAgent.ReasoningEffort = ""
560		cfg.Agents[name] = updatedAgent
561	}
562
563	return nil
564}
565
566// Validate checks if the configuration is valid and applies defaults where needed.
567func Validate() error {
568	if cfg == nil {
569		return fmt.Errorf("config not loaded")
570	}
571
572	// Validate agent models
573	for name, agent := range cfg.Agents {
574		if err := validateAgent(cfg, name, agent); err != nil {
575			return err
576		}
577	}
578
579	// Validate providers
580	for provider, providerCfg := range cfg.Providers {
581		if providerCfg.APIKey == "" && !providerCfg.Disabled {
582			logging.Warn("provider has no API key, marking as disabled", "provider", provider)
583			providerCfg.Disabled = true
584			cfg.Providers[provider] = providerCfg
585		}
586	}
587
588	// Validate LSP configurations
589	for language, lspConfig := range cfg.LSP {
590		if lspConfig.Command == "" && !lspConfig.Disabled {
591			logging.Warn("LSP configuration has no command, marking as disabled", "language", language)
592			lspConfig.Disabled = true
593			cfg.LSP[language] = lspConfig
594		}
595	}
596
597	return nil
598}
599
600// getProviderAPIKey gets the API key for a provider from environment variables
601func getProviderAPIKey(provider models.ModelProvider) string {
602	switch provider {
603	case models.ProviderAnthropic:
604		return os.Getenv("ANTHROPIC_API_KEY")
605	case models.ProviderOpenAI:
606		return os.Getenv("OPENAI_API_KEY")
607	case models.ProviderGemini:
608		return os.Getenv("GEMINI_API_KEY")
609	case models.ProviderGROQ:
610		return os.Getenv("GROQ_API_KEY")
611	case models.ProviderAzure:
612		return os.Getenv("AZURE_OPENAI_API_KEY")
613	case models.ProviderOpenRouter:
614		return os.Getenv("OPENROUTER_API_KEY")
615	case models.ProviderBedrock:
616		if hasAWSCredentials() {
617			return "aws-credentials-available"
618		}
619	case models.ProviderVertexAI:
620		if hasVertexAICredentials() {
621			return "vertex-ai-credentials-available"
622		}
623	}
624	return ""
625}
626
627// setDefaultModelForAgent sets a default model for an agent based on available providers
628func setDefaultModelForAgent(agent AgentName) bool {
629	// Check providers in order of preference
630	if apiKey := os.Getenv("ANTHROPIC_API_KEY"); apiKey != "" {
631		maxTokens := int64(5000)
632		if agent == AgentTitle {
633			maxTokens = 80
634		}
635		cfg.Agents[agent] = Agent{
636			Model:     models.Claude37Sonnet,
637			MaxTokens: maxTokens,
638		}
639		return true
640	}
641
642	if apiKey := os.Getenv("OPENAI_API_KEY"); apiKey != "" {
643		var model models.ModelID
644		maxTokens := int64(5000)
645		reasoningEffort := ""
646
647		switch agent {
648		case AgentTitle:
649			model = models.GPT41Mini
650			maxTokens = 80
651		case AgentTask:
652			model = models.GPT41Mini
653		default:
654			model = models.GPT41
655		}
656
657		// Check if model supports reasoning
658		if modelInfo, ok := models.SupportedModels[model]; ok && modelInfo.CanReason {
659			reasoningEffort = "medium"
660		}
661
662		cfg.Agents[agent] = Agent{
663			Model:           model,
664			MaxTokens:       maxTokens,
665			ReasoningEffort: reasoningEffort,
666		}
667		return true
668	}
669
670	if apiKey := os.Getenv("OPENROUTER_API_KEY"); apiKey != "" {
671		var model models.ModelID
672		maxTokens := int64(5000)
673		reasoningEffort := ""
674
675		switch agent {
676		case AgentTitle:
677			model = models.OpenRouterClaude35Haiku
678			maxTokens = 80
679		case AgentTask:
680			model = models.OpenRouterClaude37Sonnet
681		default:
682			model = models.OpenRouterClaude37Sonnet
683		}
684
685		// Check if model supports reasoning
686		if modelInfo, ok := models.SupportedModels[model]; ok && modelInfo.CanReason {
687			reasoningEffort = "medium"
688		}
689
690		cfg.Agents[agent] = Agent{
691			Model:           model,
692			MaxTokens:       maxTokens,
693			ReasoningEffort: reasoningEffort,
694		}
695		return true
696	}
697
698	if apiKey := os.Getenv("GEMINI_API_KEY"); apiKey != "" {
699		var model models.ModelID
700		maxTokens := int64(5000)
701
702		if agent == AgentTitle {
703			model = models.Gemini25Flash
704			maxTokens = 80
705		} else {
706			model = models.Gemini25
707		}
708
709		cfg.Agents[agent] = Agent{
710			Model:     model,
711			MaxTokens: maxTokens,
712		}
713		return true
714	}
715
716	if apiKey := os.Getenv("GROQ_API_KEY"); apiKey != "" {
717		maxTokens := int64(5000)
718		if agent == AgentTitle {
719			maxTokens = 80
720		}
721
722		cfg.Agents[agent] = Agent{
723			Model:     models.QWENQwq,
724			MaxTokens: maxTokens,
725		}
726		return true
727	}
728
729	if hasAWSCredentials() {
730		maxTokens := int64(5000)
731		if agent == AgentTitle {
732			maxTokens = 80
733		}
734
735		cfg.Agents[agent] = Agent{
736			Model:           models.BedrockClaude37Sonnet,
737			MaxTokens:       maxTokens,
738			ReasoningEffort: "medium", // Claude models support reasoning
739		}
740		return true
741	}
742
743	if hasVertexAICredentials() {
744		var model models.ModelID
745		maxTokens := int64(5000)
746
747		if agent == AgentTitle {
748			model = models.VertexAIGemini25Flash
749			maxTokens = 80
750		} else {
751			model = models.VertexAIGemini25
752		}
753
754		cfg.Agents[agent] = Agent{
755			Model:     model,
756			MaxTokens: maxTokens,
757		}
758		return true
759	}
760
761	return false
762}
763
764func updateCfgFile(updateCfg func(config *Config)) error {
765	if cfg == nil {
766		return fmt.Errorf("config not loaded")
767	}
768
769	// Get the config file path
770	configFile := viper.ConfigFileUsed()
771	var configData []byte
772	if configFile == "" {
773		homeDir, err := os.UserHomeDir()
774		if err != nil {
775			return fmt.Errorf("failed to get home directory: %w", err)
776		}
777		configFile = filepath.Join(homeDir, fmt.Sprintf(".%s.json", appName))
778		logging.Info("config file not found, creating new one", "path", configFile)
779		configData = []byte(`{}`)
780	} else {
781		// Read the existing config file
782		data, err := os.ReadFile(configFile)
783		if err != nil {
784			return fmt.Errorf("failed to read config file: %w", err)
785		}
786		configData = data
787	}
788
789	// Parse the JSON
790	var userCfg *Config
791	if err := json.Unmarshal(configData, &userCfg); err != nil {
792		return fmt.Errorf("failed to parse config file: %w", err)
793	}
794
795	updateCfg(userCfg)
796
797	// Write the updated config back to file
798	updatedData, err := json.MarshalIndent(userCfg, "", "  ")
799	if err != nil {
800		return fmt.Errorf("failed to marshal config: %w", err)
801	}
802
803	if err := os.WriteFile(configFile, updatedData, 0o644); err != nil {
804		return fmt.Errorf("failed to write config file: %w", err)
805	}
806
807	return nil
808}
809
810// Get returns the current configuration.
811// It's safe to call this function multiple times.
812func Get() *Config {
813	return cfg
814}
815
816// WorkingDirectory returns the current working directory from the configuration.
817func WorkingDirectory() string {
818	if cfg == nil {
819		panic("config not loaded")
820	}
821	return cfg.WorkingDir
822}
823
824func UpdateAgentModel(agentName AgentName, modelID models.ModelID) error {
825	if cfg == nil {
826		panic("config not loaded")
827	}
828
829	existingAgentCfg := cfg.Agents[agentName]
830
831	model, ok := models.SupportedModels[modelID]
832	if !ok {
833		return fmt.Errorf("model %s not supported", modelID)
834	}
835
836	maxTokens := existingAgentCfg.MaxTokens
837	if model.DefaultMaxTokens > 0 {
838		maxTokens = model.DefaultMaxTokens
839	}
840
841	newAgentCfg := Agent{
842		Model:           modelID,
843		MaxTokens:       maxTokens,
844		ReasoningEffort: existingAgentCfg.ReasoningEffort,
845	}
846	cfg.Agents[agentName] = newAgentCfg
847
848	if err := validateAgent(cfg, agentName, newAgentCfg); err != nil {
849		// revert config update on failure
850		cfg.Agents[agentName] = existingAgentCfg
851		return fmt.Errorf("failed to update agent model: %w", err)
852	}
853
854	return updateCfgFile(func(config *Config) {
855		if config.Agents == nil {
856			config.Agents = make(map[AgentName]Agent)
857		}
858		config.Agents[agentName] = newAgentCfg
859	})
860}
861
862// UpdateTheme updates the theme in the configuration and writes it to the config file.
863func UpdateTheme(themeName string) error {
864	if cfg == nil {
865		return fmt.Errorf("config not loaded")
866	}
867
868	// Update the in-memory config
869	cfg.TUI.Theme = themeName
870
871	// Update the file config
872	return updateCfgFile(func(config *Config) {
873		config.TUI.Theme = themeName
874	})
875}