config.go

  1// Package config manages application configuration from various sources.
  2package config
  3
  4import (
  5	"encoding/json"
  6	"fmt"
  7	"log/slog"
  8	"os"
  9	"path/filepath"
 10	"strings"
 11
 12	"github.com/charmbracelet/crush/internal/llm/models"
 13	"github.com/charmbracelet/crush/internal/logging"
 14	"github.com/spf13/viper"
 15)
 16
 17// MCPType defines the type of MCP (Model Control Protocol) server.
 18type MCPType string
 19
 20// Supported MCP types
 21const (
 22	MCPStdio MCPType = "stdio"
 23	MCPSse   MCPType = "sse"
 24)
 25
 26// MCPServer defines the configuration for a Model Control Protocol server.
 27type MCPServer struct {
 28	Command string            `json:"command"`
 29	Env     []string          `json:"env"`
 30	Args    []string          `json:"args"`
 31	Type    MCPType           `json:"type"`
 32	URL     string            `json:"url"`
 33	Headers map[string]string `json:"headers"`
 34}
 35
 36type AgentName string
 37
 38const (
 39	AgentCoder      AgentName = "coder"
 40	AgentSummarizer AgentName = "summarizer"
 41	AgentTask       AgentName = "task"
 42	AgentTitle      AgentName = "title"
 43)
 44
 45// Agent defines configuration for different LLM models and their token limits.
 46type Agent struct {
 47	Model           models.ModelID `json:"model"`
 48	MaxTokens       int64          `json:"maxTokens"`
 49	ReasoningEffort string         `json:"reasoningEffort"` // For openai models low,medium,heigh
 50}
 51
 52// Provider defines configuration for an LLM provider.
 53type Provider struct {
 54	APIKey   string `json:"apiKey"`
 55	Disabled bool   `json:"disabled"`
 56}
 57
 58// Data defines storage configuration.
 59type Data struct {
 60	Directory string `json:"directory,omitempty"`
 61}
 62
 63// LSPConfig defines configuration for Language Server Protocol integration.
 64type LSPConfig struct {
 65	Disabled bool     `json:"enabled"`
 66	Command  string   `json:"command"`
 67	Args     []string `json:"args"`
 68	Options  any      `json:"options"`
 69}
 70
 71// TUIConfig defines the configuration for the Terminal User Interface.
 72type TUIConfig struct {
 73	Theme string `json:"theme,omitempty"`
 74}
 75
 76// Config is the main configuration structure for the application.
 77type Config struct {
 78	Data         Data                              `json:"data"`
 79	WorkingDir   string                            `json:"wd,omitempty"`
 80	MCPServers   map[string]MCPServer              `json:"mcpServers,omitempty"`
 81	Providers    map[models.ModelProvider]Provider `json:"providers,omitempty"`
 82	LSP          map[string]LSPConfig              `json:"lsp,omitempty"`
 83	Agents       map[AgentName]Agent               `json:"agents,omitempty"`
 84	Debug        bool                              `json:"debug,omitempty"`
 85	DebugLSP     bool                              `json:"debugLSP,omitempty"`
 86	ContextPaths []string                          `json:"contextPaths,omitempty"`
 87	TUI          TUIConfig                         `json:"tui"`
 88	AutoCompact  bool                              `json:"autoCompact,omitempty"`
 89}
 90
 91// Application constants
 92const (
 93	defaultDataDirectory = ".crush"
 94	defaultLogLevel      = "info"
 95	appName              = "crush"
 96
 97	MaxTokensFallbackDefault = 4096
 98)
 99
100var defaultContextPaths = []string{
101	".github/copilot-instructions.md",
102	".cursorrules",
103	".cursor/rules/",
104	"CLAUDE.md",
105	"CLAUDE.local.md",
106	"crush.md",
107	"crush.local.md",
108	"Crush.md",
109	"Crush.local.md",
110	"CRUSH.md",
111	"CRUSH.local.md",
112}
113
114// Global configuration instance
115var cfg *Config
116
117// Load initializes the configuration from environment variables and config files.
118// If debug is true, debug mode is enabled and log level is set to debug.
119// It returns an error if configuration loading fails.
120func Load(workingDir string, debug bool) (*Config, error) {
121	if cfg != nil {
122		return cfg, nil
123	}
124
125	cfg = &Config{
126		WorkingDir: workingDir,
127		MCPServers: make(map[string]MCPServer),
128		Providers:  make(map[models.ModelProvider]Provider),
129		LSP:        make(map[string]LSPConfig),
130	}
131
132	configureViper()
133	setDefaults(debug)
134
135	// Read global config
136	if err := readConfig(viper.ReadInConfig()); err != nil {
137		return cfg, err
138	}
139
140	// Load and merge local config
141	mergeLocalConfig(workingDir)
142
143	setProviderDefaults()
144
145	// Apply configuration to the struct
146	if err := viper.Unmarshal(cfg); err != nil {
147		return cfg, fmt.Errorf("failed to unmarshal config: %w", err)
148	}
149
150	applyDefaultValues()
151	defaultLevel := slog.LevelInfo
152	if cfg.Debug {
153		defaultLevel = slog.LevelDebug
154	}
155	if os.Getenv("CRUSH_DEV_DEBUG") == "true" {
156		loggingFile := fmt.Sprintf("%s/%s", cfg.Data.Directory, "debug.log")
157
158		// if file does not exist create it
159		if _, err := os.Stat(loggingFile); os.IsNotExist(err) {
160			if err := os.MkdirAll(cfg.Data.Directory, 0o755); err != nil {
161				return cfg, fmt.Errorf("failed to create directory: %w", err)
162			}
163			if _, err := os.Create(loggingFile); err != nil {
164				return cfg, fmt.Errorf("failed to create log file: %w", err)
165			}
166		}
167
168		sloggingFileWriter, err := os.OpenFile(loggingFile, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0o666)
169		if err != nil {
170			return cfg, fmt.Errorf("failed to open log file: %w", err)
171		}
172		// Configure logger
173		logger := slog.New(slog.NewTextHandler(sloggingFileWriter, &slog.HandlerOptions{
174			Level: defaultLevel,
175		}))
176		slog.SetDefault(logger)
177	} else {
178		// Configure logger
179		logger := slog.New(slog.NewTextHandler(logging.NewWriter(), &slog.HandlerOptions{
180			Level: defaultLevel,
181		}))
182		slog.SetDefault(logger)
183	}
184
185	// Validate configuration
186	if err := Validate(); err != nil {
187		return cfg, fmt.Errorf("config validation failed: %w", err)
188	}
189
190	if cfg.Agents == nil {
191		cfg.Agents = make(map[AgentName]Agent)
192	}
193
194	// Override the max tokens for title agent
195	cfg.Agents[AgentTitle] = Agent{
196		Model:     cfg.Agents[AgentTitle].Model,
197		MaxTokens: 80,
198	}
199	return cfg, nil
200}
201
202// configureViper sets up viper's configuration paths and environment variables.
203func configureViper() {
204	viper.SetConfigName(fmt.Sprintf(".%s", appName))
205	viper.SetConfigType("json")
206	
207	// Unix-style paths
208	viper.AddConfigPath("$HOME")
209	viper.AddConfigPath(fmt.Sprintf("$XDG_CONFIG_HOME/%s", appName))
210	viper.AddConfigPath(fmt.Sprintf("$HOME/.config/%s", appName))
211	
212	// Windows-style paths
213	viper.AddConfigPath(fmt.Sprintf("$USERPROFILE"))
214	viper.AddConfigPath(fmt.Sprintf("$APPDATA/%s", appName))
215	viper.AddConfigPath(fmt.Sprintf("$LOCALAPPDATA/%s", appName))
216	
217	viper.SetEnvPrefix(strings.ToUpper(appName))
218	viper.AutomaticEnv()
219}
220
221// setDefaults configures default values for configuration options.
222func setDefaults(debug bool) {
223	viper.SetDefault("data.directory", defaultDataDirectory)
224	viper.SetDefault("contextPaths", defaultContextPaths)
225	viper.SetDefault("tui.theme", "crush")
226	viper.SetDefault("autoCompact", true)
227
228	if debug {
229		viper.SetDefault("debug", true)
230		viper.Set("log.level", "debug")
231	} else {
232		viper.SetDefault("debug", false)
233		viper.SetDefault("log.level", defaultLogLevel)
234	}
235}
236
237// setProviderDefaults configures LLM provider defaults based on provider provided by
238// environment variables and configuration file.
239func setProviderDefaults() {
240	// Set all API keys we can find in the environment
241	if apiKey := os.Getenv("ANTHROPIC_API_KEY"); apiKey != "" {
242		viper.SetDefault("providers.anthropic.apiKey", apiKey)
243	}
244	if apiKey := os.Getenv("OPENAI_API_KEY"); apiKey != "" {
245		viper.SetDefault("providers.openai.apiKey", apiKey)
246	}
247	if apiKey := os.Getenv("GEMINI_API_KEY"); apiKey != "" {
248		viper.SetDefault("providers.gemini.apiKey", apiKey)
249	}
250	if apiKey := os.Getenv("GROQ_API_KEY"); apiKey != "" {
251		viper.SetDefault("providers.groq.apiKey", apiKey)
252	}
253	if apiKey := os.Getenv("OPENROUTER_API_KEY"); apiKey != "" {
254		viper.SetDefault("providers.openrouter.apiKey", apiKey)
255	}
256	if apiKey := os.Getenv("XAI_API_KEY"); apiKey != "" {
257		viper.SetDefault("providers.xai.apiKey", apiKey)
258	}
259	if apiKey := os.Getenv("AZURE_OPENAI_ENDPOINT"); apiKey != "" {
260		// api-key may be empty when using Entra ID credentials – that's okay
261		viper.SetDefault("providers.azure.apiKey", os.Getenv("AZURE_OPENAI_API_KEY"))
262	}
263
264	// Use this order to set the default models
265	// 1. Anthropic
266	// 2. OpenAI
267	// 3. Google Gemini
268	// 4. Groq
269	// 5. OpenRouter
270	// 6. AWS Bedrock
271	// 7. Azure
272	// 8. Google Cloud VertexAI
273
274	// Anthropic configuration
275	if key := viper.GetString("providers.anthropic.apiKey"); strings.TrimSpace(key) != "" {
276		viper.SetDefault("agents.coder.model", models.Claude4Sonnet)
277		viper.SetDefault("agents.summarizer.model", models.Claude4Sonnet)
278		viper.SetDefault("agents.task.model", models.Claude4Sonnet)
279		viper.SetDefault("agents.title.model", models.Claude4Sonnet)
280		return
281	}
282
283	// OpenAI configuration
284	if key := viper.GetString("providers.openai.apiKey"); strings.TrimSpace(key) != "" {
285		viper.SetDefault("agents.coder.model", models.GPT41)
286		viper.SetDefault("agents.summarizer.model", models.GPT41)
287		viper.SetDefault("agents.task.model", models.GPT41Mini)
288		viper.SetDefault("agents.title.model", models.GPT41Mini)
289		return
290	}
291
292	// Google Gemini configuration
293	if key := viper.GetString("providers.gemini.apiKey"); strings.TrimSpace(key) != "" {
294		viper.SetDefault("agents.coder.model", models.Gemini25)
295		viper.SetDefault("agents.summarizer.model", models.Gemini25)
296		viper.SetDefault("agents.task.model", models.Gemini25Flash)
297		viper.SetDefault("agents.title.model", models.Gemini25Flash)
298		return
299	}
300
301	// Groq configuration
302	if key := viper.GetString("providers.groq.apiKey"); strings.TrimSpace(key) != "" {
303		viper.SetDefault("agents.coder.model", models.QWENQwq)
304		viper.SetDefault("agents.summarizer.model", models.QWENQwq)
305		viper.SetDefault("agents.task.model", models.QWENQwq)
306		viper.SetDefault("agents.title.model", models.QWENQwq)
307		return
308	}
309
310	// OpenRouter configuration
311	if key := viper.GetString("providers.openrouter.apiKey"); strings.TrimSpace(key) != "" {
312		viper.SetDefault("agents.coder.model", models.OpenRouterClaude37Sonnet)
313		viper.SetDefault("agents.summarizer.model", models.OpenRouterClaude37Sonnet)
314		viper.SetDefault("agents.task.model", models.OpenRouterClaude37Sonnet)
315		viper.SetDefault("agents.title.model", models.OpenRouterClaude35Haiku)
316		return
317	}
318
319	// XAI configuration
320	if key := viper.GetString("providers.xai.apiKey"); strings.TrimSpace(key) != "" {
321		viper.SetDefault("agents.coder.model", models.XAIGrok3Beta)
322		viper.SetDefault("agents.summarizer.model", models.XAIGrok3Beta)
323		viper.SetDefault("agents.task.model", models.XAIGrok3Beta)
324		viper.SetDefault("agents.title.model", models.XAiGrok3MiniFastBeta)
325		return
326	}
327
328	// AWS Bedrock configuration
329	if hasAWSCredentials() {
330		viper.SetDefault("agents.coder.model", models.BedrockClaude37Sonnet)
331		viper.SetDefault("agents.summarizer.model", models.BedrockClaude37Sonnet)
332		viper.SetDefault("agents.task.model", models.BedrockClaude37Sonnet)
333		viper.SetDefault("agents.title.model", models.BedrockClaude37Sonnet)
334		return
335	}
336
337	// Azure OpenAI configuration
338	if os.Getenv("AZURE_OPENAI_ENDPOINT") != "" {
339		viper.SetDefault("agents.coder.model", models.AzureGPT41)
340		viper.SetDefault("agents.summarizer.model", models.AzureGPT41)
341		viper.SetDefault("agents.task.model", models.AzureGPT41Mini)
342		viper.SetDefault("agents.title.model", models.AzureGPT41Mini)
343		return
344	}
345
346	// Google Cloud VertexAI configuration
347	if hasVertexAICredentials() {
348		viper.SetDefault("agents.coder.model", models.VertexAIGemini25)
349		viper.SetDefault("agents.summarizer.model", models.VertexAIGemini25)
350		viper.SetDefault("agents.task.model", models.VertexAIGemini25Flash)
351		viper.SetDefault("agents.title.model", models.VertexAIGemini25Flash)
352		return
353	}
354}
355
356// hasAWSCredentials checks if AWS credentials are available in the environment.
357func hasAWSCredentials() bool {
358	// Check for explicit AWS credentials
359	if os.Getenv("AWS_ACCESS_KEY_ID") != "" && os.Getenv("AWS_SECRET_ACCESS_KEY") != "" {
360		return true
361	}
362
363	// Check for AWS profile
364	if os.Getenv("AWS_PROFILE") != "" || os.Getenv("AWS_DEFAULT_PROFILE") != "" {
365		return true
366	}
367
368	// Check for AWS region
369	if os.Getenv("AWS_REGION") != "" || os.Getenv("AWS_DEFAULT_REGION") != "" {
370		return true
371	}
372
373	// Check if running on EC2 with instance profile
374	if os.Getenv("AWS_CONTAINER_CREDENTIALS_RELATIVE_URI") != "" ||
375		os.Getenv("AWS_CONTAINER_CREDENTIALS_FULL_URI") != "" {
376		return true
377	}
378
379	return false
380}
381
382// hasVertexAICredentials checks if VertexAI credentials are available in the environment.
383func hasVertexAICredentials() bool {
384	// Check for explicit VertexAI parameters
385	if os.Getenv("VERTEXAI_PROJECT") != "" && os.Getenv("VERTEXAI_LOCATION") != "" {
386		return true
387	}
388	// Check for Google Cloud project and location
389	if os.Getenv("GOOGLE_CLOUD_PROJECT") != "" && (os.Getenv("GOOGLE_CLOUD_REGION") != "" || os.Getenv("GOOGLE_CLOUD_LOCATION") != "") {
390		return true
391	}
392	return false
393}
394
395// readConfig handles the result of reading a configuration file.
396func readConfig(err error) error {
397	if err == nil {
398		return nil
399	}
400
401	// It's okay if the config file doesn't exist
402	if _, ok := err.(viper.ConfigFileNotFoundError); ok {
403		return nil
404	}
405
406	return fmt.Errorf("failed to read config: %w", err)
407}
408
409// mergeLocalConfig loads and merges configuration from the local directory.
410func mergeLocalConfig(workingDir string) {
411	local := viper.New()
412	local.SetConfigName(fmt.Sprintf(".%s", appName))
413	local.SetConfigType("json")
414	local.AddConfigPath(workingDir)
415
416	// Merge local config if it exists
417	if err := local.ReadInConfig(); err == nil {
418		viper.MergeConfigMap(local.AllSettings())
419	}
420}
421
422// applyDefaultValues sets default values for configuration fields that need processing.
423func applyDefaultValues() {
424	// Set default MCP type if not specified
425	for k, v := range cfg.MCPServers {
426		if v.Type == "" {
427			v.Type = MCPStdio
428			cfg.MCPServers[k] = v
429		}
430	}
431}
432
433// It validates model IDs and providers, ensuring they are supported.
434func validateAgent(cfg *Config, name AgentName, agent Agent) error {
435	// Check if model exists
436	model, modelExists := models.SupportedModels[agent.Model]
437	if !modelExists {
438		logging.Warn("unsupported model configured, reverting to default",
439			"agent", name,
440			"configured_model", agent.Model)
441
442		// Set default model based on available providers
443		if setDefaultModelForAgent(name) {
444			logging.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
445		} else {
446			return fmt.Errorf("no valid provider available for agent %s", name)
447		}
448		return nil
449	}
450
451	// Check if provider for the model is configured
452	provider := model.Provider
453	providerCfg, providerExists := cfg.Providers[provider]
454
455	if !providerExists {
456		// Provider not configured, check if we have environment variables
457		apiKey := getProviderAPIKey(provider)
458		if apiKey == "" {
459			logging.Warn("provider not configured for model, reverting to default",
460				"agent", name,
461				"model", agent.Model,
462				"provider", provider)
463
464			// Set default model based on available providers
465			if setDefaultModelForAgent(name) {
466				logging.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
467			} else {
468				return fmt.Errorf("no valid provider available for agent %s", name)
469			}
470		} else {
471			// Add provider with API key from environment
472			cfg.Providers[provider] = Provider{
473				APIKey: apiKey,
474			}
475			logging.Info("added provider from environment", "provider", provider)
476		}
477	} else if providerCfg.Disabled || providerCfg.APIKey == "" {
478		// Provider is disabled or has no API key
479		logging.Warn("provider is disabled or has no API key, reverting to default",
480			"agent", name,
481			"model", agent.Model,
482			"provider", provider)
483
484		// Set default model based on available providers
485		if setDefaultModelForAgent(name) {
486			logging.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
487		} else {
488			return fmt.Errorf("no valid provider available for agent %s", name)
489		}
490	}
491
492	// Validate max tokens
493	if agent.MaxTokens <= 0 {
494		logging.Warn("invalid max tokens, setting to default",
495			"agent", name,
496			"model", agent.Model,
497			"max_tokens", agent.MaxTokens)
498
499		// Update the agent with default max tokens
500		updatedAgent := cfg.Agents[name]
501		if model.DefaultMaxTokens > 0 {
502			updatedAgent.MaxTokens = model.DefaultMaxTokens
503		} else {
504			updatedAgent.MaxTokens = MaxTokensFallbackDefault
505		}
506		cfg.Agents[name] = updatedAgent
507	} else if model.ContextWindow > 0 && agent.MaxTokens > model.ContextWindow/2 {
508		// Ensure max tokens doesn't exceed half the context window (reasonable limit)
509		logging.Warn("max tokens exceeds half the context window, adjusting",
510			"agent", name,
511			"model", agent.Model,
512			"max_tokens", agent.MaxTokens,
513			"context_window", model.ContextWindow)
514
515		// Update the agent with adjusted max tokens
516		updatedAgent := cfg.Agents[name]
517		updatedAgent.MaxTokens = model.ContextWindow / 2
518		cfg.Agents[name] = updatedAgent
519	}
520
521	// Validate reasoning effort for models that support reasoning
522	if model.CanReason && provider == models.ProviderOpenAI || provider == models.ProviderLocal {
523		if agent.ReasoningEffort == "" {
524			// Set default reasoning effort for models that support it
525			logging.Info("setting default reasoning effort for model that supports reasoning",
526				"agent", name,
527				"model", agent.Model)
528
529			// Update the agent with default reasoning effort
530			updatedAgent := cfg.Agents[name]
531			updatedAgent.ReasoningEffort = "medium"
532			cfg.Agents[name] = updatedAgent
533		} else {
534			// Check if reasoning effort is valid (low, medium, high)
535			effort := strings.ToLower(agent.ReasoningEffort)
536			if effort != "low" && effort != "medium" && effort != "high" {
537				logging.Warn("invalid reasoning effort, setting to medium",
538					"agent", name,
539					"model", agent.Model,
540					"reasoning_effort", agent.ReasoningEffort)
541
542				// Update the agent with valid reasoning effort
543				updatedAgent := cfg.Agents[name]
544				updatedAgent.ReasoningEffort = "medium"
545				cfg.Agents[name] = updatedAgent
546			}
547		}
548	} else if !model.CanReason && agent.ReasoningEffort != "" {
549		// Model doesn't support reasoning but reasoning effort is set
550		logging.Warn("model doesn't support reasoning but reasoning effort is set, ignoring",
551			"agent", name,
552			"model", agent.Model,
553			"reasoning_effort", agent.ReasoningEffort)
554
555		// Update the agent to remove reasoning effort
556		updatedAgent := cfg.Agents[name]
557		updatedAgent.ReasoningEffort = ""
558		cfg.Agents[name] = updatedAgent
559	}
560
561	return nil
562}
563
564// Validate checks if the configuration is valid and applies defaults where needed.
565func Validate() error {
566	if cfg == nil {
567		return fmt.Errorf("config not loaded")
568	}
569
570	// Validate agent models
571	for name, agent := range cfg.Agents {
572		if err := validateAgent(cfg, name, agent); err != nil {
573			return err
574		}
575	}
576
577	// Validate providers
578	for provider, providerCfg := range cfg.Providers {
579		if providerCfg.APIKey == "" && !providerCfg.Disabled {
580			logging.Warn("provider has no API key, marking as disabled", "provider", provider)
581			providerCfg.Disabled = true
582			cfg.Providers[provider] = providerCfg
583		}
584	}
585
586	// Validate LSP configurations
587	for language, lspConfig := range cfg.LSP {
588		if lspConfig.Command == "" && !lspConfig.Disabled {
589			logging.Warn("LSP configuration has no command, marking as disabled", "language", language)
590			lspConfig.Disabled = true
591			cfg.LSP[language] = lspConfig
592		}
593	}
594
595	return nil
596}
597
598// getProviderAPIKey gets the API key for a provider from environment variables
599func getProviderAPIKey(provider models.ModelProvider) string {
600	switch provider {
601	case models.ProviderAnthropic:
602		return os.Getenv("ANTHROPIC_API_KEY")
603	case models.ProviderOpenAI:
604		return os.Getenv("OPENAI_API_KEY")
605	case models.ProviderGemini:
606		return os.Getenv("GEMINI_API_KEY")
607	case models.ProviderGROQ:
608		return os.Getenv("GROQ_API_KEY")
609	case models.ProviderAzure:
610		return os.Getenv("AZURE_OPENAI_API_KEY")
611	case models.ProviderOpenRouter:
612		return os.Getenv("OPENROUTER_API_KEY")
613	case models.ProviderBedrock:
614		if hasAWSCredentials() {
615			return "aws-credentials-available"
616		}
617	case models.ProviderVertexAI:
618		if hasVertexAICredentials() {
619			return "vertex-ai-credentials-available"
620		}
621	}
622	return ""
623}
624
625// setDefaultModelForAgent sets a default model for an agent based on available providers
626func setDefaultModelForAgent(agent AgentName) bool {
627	// Check providers in order of preference
628	if apiKey := os.Getenv("ANTHROPIC_API_KEY"); apiKey != "" {
629		maxTokens := int64(5000)
630		if agent == AgentTitle {
631			maxTokens = 80
632		}
633		cfg.Agents[agent] = Agent{
634			Model:     models.Claude37Sonnet,
635			MaxTokens: maxTokens,
636		}
637		return true
638	}
639
640	if apiKey := os.Getenv("OPENAI_API_KEY"); apiKey != "" {
641		var model models.ModelID
642		maxTokens := int64(5000)
643		reasoningEffort := ""
644
645		switch agent {
646		case AgentTitle:
647			model = models.GPT41Mini
648			maxTokens = 80
649		case AgentTask:
650			model = models.GPT41Mini
651		default:
652			model = models.GPT41
653		}
654
655		// Check if model supports reasoning
656		if modelInfo, ok := models.SupportedModels[model]; ok && modelInfo.CanReason {
657			reasoningEffort = "medium"
658		}
659
660		cfg.Agents[agent] = Agent{
661			Model:           model,
662			MaxTokens:       maxTokens,
663			ReasoningEffort: reasoningEffort,
664		}
665		return true
666	}
667
668	if apiKey := os.Getenv("OPENROUTER_API_KEY"); apiKey != "" {
669		var model models.ModelID
670		maxTokens := int64(5000)
671		reasoningEffort := ""
672
673		switch agent {
674		case AgentTitle:
675			model = models.OpenRouterClaude35Haiku
676			maxTokens = 80
677		case AgentTask:
678			model = models.OpenRouterClaude37Sonnet
679		default:
680			model = models.OpenRouterClaude37Sonnet
681		}
682
683		// Check if model supports reasoning
684		if modelInfo, ok := models.SupportedModels[model]; ok && modelInfo.CanReason {
685			reasoningEffort = "medium"
686		}
687
688		cfg.Agents[agent] = Agent{
689			Model:           model,
690			MaxTokens:       maxTokens,
691			ReasoningEffort: reasoningEffort,
692		}
693		return true
694	}
695
696	if apiKey := os.Getenv("GEMINI_API_KEY"); apiKey != "" {
697		var model models.ModelID
698		maxTokens := int64(5000)
699
700		if agent == AgentTitle {
701			model = models.Gemini25Flash
702			maxTokens = 80
703		} else {
704			model = models.Gemini25
705		}
706
707		cfg.Agents[agent] = Agent{
708			Model:     model,
709			MaxTokens: maxTokens,
710		}
711		return true
712	}
713
714	if apiKey := os.Getenv("GROQ_API_KEY"); apiKey != "" {
715		maxTokens := int64(5000)
716		if agent == AgentTitle {
717			maxTokens = 80
718		}
719
720		cfg.Agents[agent] = Agent{
721			Model:     models.QWENQwq,
722			MaxTokens: maxTokens,
723		}
724		return true
725	}
726
727	if hasAWSCredentials() {
728		maxTokens := int64(5000)
729		if agent == AgentTitle {
730			maxTokens = 80
731		}
732
733		cfg.Agents[agent] = Agent{
734			Model:           models.BedrockClaude37Sonnet,
735			MaxTokens:       maxTokens,
736			ReasoningEffort: "medium", // Claude models support reasoning
737		}
738		return true
739	}
740
741	if hasVertexAICredentials() {
742		var model models.ModelID
743		maxTokens := int64(5000)
744
745		if agent == AgentTitle {
746			model = models.VertexAIGemini25Flash
747			maxTokens = 80
748		} else {
749			model = models.VertexAIGemini25
750		}
751
752		cfg.Agents[agent] = Agent{
753			Model:     model,
754			MaxTokens: maxTokens,
755		}
756		return true
757	}
758
759	return false
760}
761
762func updateCfgFile(updateCfg func(config *Config)) error {
763	if cfg == nil {
764		return fmt.Errorf("config not loaded")
765	}
766
767	// Get the config file path
768	configFile := viper.ConfigFileUsed()
769	var configData []byte
770	if configFile == "" {
771		homeDir, err := os.UserHomeDir()
772		if err != nil {
773			return fmt.Errorf("failed to get home directory: %w", err)
774		}
775		configFile = filepath.Join(homeDir, fmt.Sprintf(".%s.json", appName))
776		logging.Info("config file not found, creating new one", "path", configFile)
777		configData = []byte(`{}`)
778	} else {
779		// Read the existing config file
780		data, err := os.ReadFile(configFile)
781		if err != nil {
782			return fmt.Errorf("failed to read config file: %w", err)
783		}
784		configData = data
785	}
786
787	// Parse the JSON
788	var userCfg *Config
789	if err := json.Unmarshal(configData, &userCfg); err != nil {
790		return fmt.Errorf("failed to parse config file: %w", err)
791	}
792
793	updateCfg(userCfg)
794
795	// Write the updated config back to file
796	updatedData, err := json.MarshalIndent(userCfg, "", "  ")
797	if err != nil {
798		return fmt.Errorf("failed to marshal config: %w", err)
799	}
800
801	if err := os.WriteFile(configFile, updatedData, 0o644); err != nil {
802		return fmt.Errorf("failed to write config file: %w", err)
803	}
804
805	return nil
806}
807
808// Get returns the current configuration.
809// It's safe to call this function multiple times.
810func Get() *Config {
811	return cfg
812}
813
814// WorkingDirectory returns the current working directory from the configuration.
815func WorkingDirectory() string {
816	if cfg == nil {
817		panic("config not loaded")
818	}
819	return cfg.WorkingDir
820}
821
822func UpdateAgentModel(agentName AgentName, modelID models.ModelID) error {
823	if cfg == nil {
824		panic("config not loaded")
825	}
826
827	existingAgentCfg := cfg.Agents[agentName]
828
829	model, ok := models.SupportedModels[modelID]
830	if !ok {
831		return fmt.Errorf("model %s not supported", modelID)
832	}
833
834	maxTokens := existingAgentCfg.MaxTokens
835	if model.DefaultMaxTokens > 0 {
836		maxTokens = model.DefaultMaxTokens
837	}
838
839	newAgentCfg := Agent{
840		Model:           modelID,
841		MaxTokens:       maxTokens,
842		ReasoningEffort: existingAgentCfg.ReasoningEffort,
843	}
844	cfg.Agents[agentName] = newAgentCfg
845
846	if err := validateAgent(cfg, agentName, newAgentCfg); err != nil {
847		// revert config update on failure
848		cfg.Agents[agentName] = existingAgentCfg
849		return fmt.Errorf("failed to update agent model: %w", err)
850	}
851
852	return updateCfgFile(func(config *Config) {
853		if config.Agents == nil {
854			config.Agents = make(map[AgentName]Agent)
855		}
856		config.Agents[agentName] = newAgentCfg
857	})
858}
859
860// UpdateTheme updates the theme in the configuration and writes it to the config file.
861func UpdateTheme(themeName string) error {
862	if cfg == nil {
863		return fmt.Errorf("config not loaded")
864	}
865
866	// Update the in-memory config
867	cfg.TUI.Theme = themeName
868
869	// Update the file config
870	return updateCfgFile(func(config *Config) {
871		config.TUI.Theme = themeName
872	})
873}