1// Package config manages application configuration from various sources.
  2package config
  3
  4import (
  5	"encoding/json"
  6	"fmt"
  7	"log/slog"
  8	"os"
  9	"path/filepath"
 10	"strings"
 11
 12	"github.com/charmbracelet/crush/internal/llm/models"
 13	"github.com/charmbracelet/crush/internal/logging"
 14	"github.com/spf13/afero"
 15	"github.com/spf13/viper"
 16)
 17
 18// MCPType defines the type of MCP (Model Control Protocol) server.
 19type MCPType string
 20
 21// Supported MCP types
 22const (
 23	MCPStdio MCPType = "stdio"
 24	MCPSse   MCPType = "sse"
 25)
 26
 27// MCPServer defines the configuration for a Model Control Protocol server.
 28type MCPServer struct {
 29	Command string            `json:"command"`
 30	Env     []string          `json:"env"`
 31	Args    []string          `json:"args"`
 32	Type    MCPType           `json:"type"`
 33	URL     string            `json:"url"`
 34	Headers map[string]string `json:"headers"`
 35}
 36
 37type AgentName string
 38
 39const (
 40	AgentCoder      AgentName = "coder"
 41	AgentSummarizer AgentName = "summarizer"
 42	AgentTask       AgentName = "task"
 43	AgentTitle      AgentName = "title"
 44)
 45
 46// Agent defines configuration for different LLM models and their token limits.
 47type Agent struct {
 48	Model           models.ModelID `json:"model"`
 49	MaxTokens       int64          `json:"maxTokens"`
 50	ReasoningEffort string         `json:"reasoningEffort"` // For openai models low,medium,heigh
 51}
 52
 53// Provider defines configuration for an LLM provider.
 54type Provider struct {
 55	APIKey   string `json:"apiKey"`
 56	Disabled bool   `json:"disabled"`
 57}
 58
 59// Data defines storage configuration.
 60type Data struct {
 61	Directory string `json:"directory,omitempty"`
 62}
 63
 64// LSPConfig defines configuration for Language Server Protocol integration.
 65type LSPConfig struct {
 66	Disabled bool     `json:"enabled"`
 67	Command  string   `json:"command"`
 68	Args     []string `json:"args"`
 69	Options  any      `json:"options"`
 70}
 71
 72// TUIConfig defines the configuration for the Terminal User Interface.
 73type TUIConfig struct {
 74	Theme string `json:"theme,omitempty"`
 75}
 76
 77// Config is the main configuration structure for the application.
 78type Config struct {
 79	Data         Data                                  `json:"data"`
 80	WorkingDir   string                                `json:"wd,omitempty"`
 81	MCPServers   map[string]MCPServer                  `json:"mcpServers,omitempty"`
 82	Providers    map[models.InferenceProvider]Provider `json:"providers,omitempty"`
 83	LSP          map[string]LSPConfig                  `json:"lsp,omitempty"`
 84	Agents       map[AgentName]Agent                   `json:"agents,omitempty"`
 85	Debug        bool                                  `json:"debug,omitempty"`
 86	DebugLSP     bool                                  `json:"debugLSP,omitempty"`
 87	ContextPaths []string                              `json:"contextPaths,omitempty"`
 88	TUI          TUIConfig                             `json:"tui"`
 89	AutoCompact  bool                                  `json:"autoCompact,omitempty"`
 90}
 91
 92// Application constants
 93const (
 94	defaultDataDirectory = ".crush"
 95	defaultLogLevel      = "info"
 96	appName              = "crush"
 97
 98	MaxTokensFallbackDefault = 4096
 99)
100
101var defaultContextPaths = []string{
102	".github/copilot-instructions.md",
103	".cursorrules",
104	".cursor/rules/",
105	"CLAUDE.md",
106	"CLAUDE.local.md",
107	"GEMINI.md",
108	"gemini.md",
109	"crush.md",
110	"crush.local.md",
111	"Crush.md",
112	"Crush.local.md",
113	"CRUSH.md",
114	"CRUSH.local.md",
115}
116
117// Global configuration instance
118var cfg *Config
119
120// Load initializes the configuration from environment variables and config files.
121// If debug is true, debug mode is enabled and log level is set to debug.
122// It returns an error if configuration loading fails.
123func Load(workingDir string, debug bool) (*Config, error) {
124	if cfg != nil {
125		return cfg, nil
126	}
127
128	cfg = &Config{
129		WorkingDir: workingDir,
130		MCPServers: make(map[string]MCPServer),
131		Providers:  make(map[models.InferenceProvider]Provider),
132		LSP:        make(map[string]LSPConfig),
133	}
134
135	configureViper()
136	setDefaults(debug)
137
138	// Read global config
139	if err := readConfig(viper.ReadInConfig()); err != nil {
140		return cfg, err
141	}
142
143	// Load and merge local config
144	mergeLocalConfig(workingDir)
145
146	setProviderDefaults()
147
148	// Apply configuration to the struct
149	if err := viper.Unmarshal(cfg); err != nil {
150		return cfg, fmt.Errorf("failed to unmarshal config: %w", err)
151	}
152
153	applyDefaultValues()
154	defaultLevel := slog.LevelInfo
155	if cfg.Debug {
156		defaultLevel = slog.LevelDebug
157	}
158	if os.Getenv("CRUSH_DEV_DEBUG") == "true" {
159		loggingFile := fmt.Sprintf("%s/%s", cfg.Data.Directory, "debug.log")
160
161		// if file does not exist create it
162		if _, err := os.Stat(loggingFile); os.IsNotExist(err) {
163			if err := os.MkdirAll(cfg.Data.Directory, 0o755); err != nil {
164				return cfg, fmt.Errorf("failed to create directory: %w", err)
165			}
166			if _, err := os.Create(loggingFile); err != nil {
167				return cfg, fmt.Errorf("failed to create log file: %w", err)
168			}
169		}
170
171		sloggingFileWriter, err := os.OpenFile(loggingFile, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0o666)
172		if err != nil {
173			return cfg, fmt.Errorf("failed to open log file: %w", err)
174		}
175		// Configure logger
176		logger := slog.New(slog.NewTextHandler(sloggingFileWriter, &slog.HandlerOptions{
177			Level: defaultLevel,
178		}))
179		slog.SetDefault(logger)
180	} else {
181		// Configure logger
182		logger := slog.New(slog.NewTextHandler(logging.NewWriter(), &slog.HandlerOptions{
183			Level: defaultLevel,
184		}))
185		slog.SetDefault(logger)
186	}
187
188	// Validate configuration
189	if err := Validate(); err != nil {
190		return cfg, fmt.Errorf("config validation failed: %w", err)
191	}
192
193	if cfg.Agents == nil {
194		cfg.Agents = make(map[AgentName]Agent)
195	}
196
197	// Override the max tokens for title agent
198	cfg.Agents[AgentTitle] = Agent{
199		Model:     cfg.Agents[AgentTitle].Model,
200		MaxTokens: 80,
201	}
202	return cfg, nil
203}
204
205type configFinder struct {
206	appName   string
207	dotPrefix bool
208	paths     []string
209}
210
211func (f configFinder) Find(fsys afero.Fs) ([]string, error) {
212	var configFiles []string
213	configName := fmt.Sprintf("%s.json", f.appName)
214	if f.dotPrefix {
215		configName = fmt.Sprintf(".%s.json", f.appName)
216	}
217	paths := []string{}
218	for _, p := range f.paths {
219		if p == "" {
220			continue
221		}
222		paths = append(paths, os.ExpandEnv(p))
223	}
224
225	for _, path := range paths {
226		if path == "" {
227			continue
228		}
229
230		configPath := filepath.Join(path, configName)
231		if exists, err := afero.Exists(fsys, configPath); err == nil && exists {
232			configFiles = append(configFiles, configPath)
233		}
234	}
235	return configFiles, nil
236}
237
238// configureViper sets up viper's configuration paths and environment variables.
239func configureViper() {
240	viper.SetConfigType("json")
241
242	// Create the three finders
243	windowsFinder := configFinder{appName: appName, dotPrefix: false, paths: []string{
244		"$USERPROFILE",
245		fmt.Sprintf("$APPDATA/%s", appName),
246		fmt.Sprintf("$LOCALAPPDATA/%s", appName),
247	}}
248
249	unixFinder := configFinder{appName: appName, dotPrefix: false, paths: []string{
250		"$HOME",
251		fmt.Sprintf("$XDG_CONFIG_HOME/%s", appName),
252		fmt.Sprintf("$HOME/.config/%s", appName),
253	}}
254
255	localFinder := configFinder{appName: appName, dotPrefix: true, paths: []string{
256		".",
257	}}
258
259	// Use all finders with viper
260	viper.SetOptions(viper.WithFinder(viper.Finders(windowsFinder, unixFinder, localFinder)))
261	viper.SetEnvPrefix(strings.ToUpper(appName))
262	viper.AutomaticEnv()
263}
264
265// setDefaults configures default values for configuration options.
266func setDefaults(debug bool) {
267	viper.SetDefault("data.directory", defaultDataDirectory)
268	viper.SetDefault("contextPaths", defaultContextPaths)
269	viper.SetDefault("tui.theme", "crush")
270	viper.SetDefault("autoCompact", true)
271
272	if debug {
273		viper.SetDefault("debug", true)
274		viper.Set("log.level", "debug")
275	} else {
276		viper.SetDefault("debug", false)
277		viper.SetDefault("log.level", defaultLogLevel)
278	}
279}
280
281// setProviderDefaults configures LLM provider defaults based on provider provided by
282// environment variables and configuration file.
283func setProviderDefaults() {
284	// Set all API keys we can find in the environment
285	if apiKey := os.Getenv("ANTHROPIC_API_KEY"); apiKey != "" {
286		viper.SetDefault("providers.anthropic.apiKey", apiKey)
287	}
288	if apiKey := os.Getenv("OPENAI_API_KEY"); apiKey != "" {
289		viper.SetDefault("providers.openai.apiKey", apiKey)
290	}
291	if apiKey := os.Getenv("GEMINI_API_KEY"); apiKey != "" {
292		viper.SetDefault("providers.gemini.apiKey", apiKey)
293	}
294	if apiKey := os.Getenv("GROQ_API_KEY"); apiKey != "" {
295		viper.SetDefault("providers.groq.apiKey", apiKey)
296	}
297	if apiKey := os.Getenv("OPENROUTER_API_KEY"); apiKey != "" {
298		viper.SetDefault("providers.openrouter.apiKey", apiKey)
299	}
300	if apiKey := os.Getenv("XAI_API_KEY"); apiKey != "" {
301		viper.SetDefault("providers.xai.apiKey", apiKey)
302	}
303	if apiKey := os.Getenv("AZURE_OPENAI_ENDPOINT"); apiKey != "" {
304		// api-key may be empty when using Entra ID credentials – that's okay
305		viper.SetDefault("providers.azure.apiKey", os.Getenv("AZURE_OPENAI_API_KEY"))
306	}
307
308	// Use this order to set the default models
309	// 1. Anthropic
310	// 2. OpenAI
311	// 3. Google Gemini
312	// 4. Groq
313	// 5. OpenRouter
314	// 6. AWS Bedrock
315	// 7. Azure
316	// 8. Google Cloud VertexAI
317
318	// Anthropic configuration
319	if key := viper.GetString("providers.anthropic.apiKey"); strings.TrimSpace(key) != "" {
320		viper.SetDefault("agents.coder.model", models.Claude4Sonnet)
321		viper.SetDefault("agents.summarizer.model", models.Claude4Sonnet)
322		viper.SetDefault("agents.task.model", models.Claude4Sonnet)
323		viper.SetDefault("agents.title.model", models.Claude4Sonnet)
324		return
325	}
326
327	// OpenAI configuration
328	if key := viper.GetString("providers.openai.apiKey"); strings.TrimSpace(key) != "" {
329		viper.SetDefault("agents.coder.model", models.GPT41)
330		viper.SetDefault("agents.summarizer.model", models.GPT41)
331		viper.SetDefault("agents.task.model", models.GPT41Mini)
332		viper.SetDefault("agents.title.model", models.GPT41Mini)
333		return
334	}
335
336	// Google Gemini configuration
337	if key := viper.GetString("providers.gemini.apiKey"); strings.TrimSpace(key) != "" {
338		viper.SetDefault("agents.coder.model", models.Gemini25)
339		viper.SetDefault("agents.summarizer.model", models.Gemini25)
340		viper.SetDefault("agents.task.model", models.Gemini25Flash)
341		viper.SetDefault("agents.title.model", models.Gemini25Flash)
342		return
343	}
344
345	// Groq configuration
346	if key := viper.GetString("providers.groq.apiKey"); strings.TrimSpace(key) != "" {
347		viper.SetDefault("agents.coder.model", models.QWENQwq)
348		viper.SetDefault("agents.summarizer.model", models.QWENQwq)
349		viper.SetDefault("agents.task.model", models.QWENQwq)
350		viper.SetDefault("agents.title.model", models.QWENQwq)
351		return
352	}
353
354	// OpenRouter configuration
355	if key := viper.GetString("providers.openrouter.apiKey"); strings.TrimSpace(key) != "" {
356		viper.SetDefault("agents.coder.model", models.OpenRouterClaude37Sonnet)
357		viper.SetDefault("agents.summarizer.model", models.OpenRouterClaude37Sonnet)
358		viper.SetDefault("agents.task.model", models.OpenRouterClaude37Sonnet)
359		viper.SetDefault("agents.title.model", models.OpenRouterClaude35Haiku)
360		return
361	}
362
363	// XAI configuration
364	if key := viper.GetString("providers.xai.apiKey"); strings.TrimSpace(key) != "" {
365		viper.SetDefault("agents.coder.model", models.XAIGrok3Beta)
366		viper.SetDefault("agents.summarizer.model", models.XAIGrok3Beta)
367		viper.SetDefault("agents.task.model", models.XAIGrok3Beta)
368		viper.SetDefault("agents.title.model", models.XAiGrok3MiniFastBeta)
369		return
370	}
371
372	// AWS Bedrock configuration
373	if hasAWSCredentials() {
374		viper.SetDefault("agents.coder.model", models.BedrockClaude37Sonnet)
375		viper.SetDefault("agents.summarizer.model", models.BedrockClaude37Sonnet)
376		viper.SetDefault("agents.task.model", models.BedrockClaude37Sonnet)
377		viper.SetDefault("agents.title.model", models.BedrockClaude37Sonnet)
378		return
379	}
380
381	// Azure OpenAI configuration
382	if os.Getenv("AZURE_OPENAI_ENDPOINT") != "" {
383		viper.SetDefault("agents.coder.model", models.AzureGPT41)
384		viper.SetDefault("agents.summarizer.model", models.AzureGPT41)
385		viper.SetDefault("agents.task.model", models.AzureGPT41Mini)
386		viper.SetDefault("agents.title.model", models.AzureGPT41Mini)
387		return
388	}
389
390	// Google Cloud VertexAI configuration
391	if hasVertexAICredentials() {
392		viper.SetDefault("agents.coder.model", models.VertexAIGemini25)
393		viper.SetDefault("agents.summarizer.model", models.VertexAIGemini25)
394		viper.SetDefault("agents.task.model", models.VertexAIGemini25Flash)
395		viper.SetDefault("agents.title.model", models.VertexAIGemini25Flash)
396		return
397	}
398}
399
400// hasAWSCredentials checks if AWS credentials are available in the environment.
401func hasAWSCredentials() bool {
402	// Check for explicit AWS credentials
403	if os.Getenv("AWS_ACCESS_KEY_ID") != "" && os.Getenv("AWS_SECRET_ACCESS_KEY") != "" {
404		return true
405	}
406
407	// Check for AWS profile
408	if os.Getenv("AWS_PROFILE") != "" || os.Getenv("AWS_DEFAULT_PROFILE") != "" {
409		return true
410	}
411
412	// Check for AWS region
413	if os.Getenv("AWS_REGION") != "" || os.Getenv("AWS_DEFAULT_REGION") != "" {
414		return true
415	}
416
417	// Check if running on EC2 with instance profile
418	if os.Getenv("AWS_CONTAINER_CREDENTIALS_RELATIVE_URI") != "" ||
419		os.Getenv("AWS_CONTAINER_CREDENTIALS_FULL_URI") != "" {
420		return true
421	}
422
423	return false
424}
425
426// hasVertexAICredentials checks if VertexAI credentials are available in the environment.
427func hasVertexAICredentials() bool {
428	// Check for explicit VertexAI parameters
429	if os.Getenv("VERTEXAI_PROJECT") != "" && os.Getenv("VERTEXAI_LOCATION") != "" {
430		return true
431	}
432	// Check for Google Cloud project and location
433	if os.Getenv("GOOGLE_CLOUD_PROJECT") != "" && (os.Getenv("GOOGLE_CLOUD_REGION") != "" || os.Getenv("GOOGLE_CLOUD_LOCATION") != "") {
434		return true
435	}
436	return false
437}
438
439// readConfig handles the result of reading a configuration file.
440func readConfig(err error) error {
441	if err == nil {
442		return nil
443	}
444
445	// It's okay if the config file doesn't exist
446	if _, ok := err.(viper.ConfigFileNotFoundError); ok {
447		return nil
448	}
449
450	return fmt.Errorf("failed to read config: %w", err)
451}
452
453// mergeLocalConfig loads and merges configuration from the local directory.
454func mergeLocalConfig(workingDir string) {
455	local := viper.New()
456	local.SetConfigName(fmt.Sprintf(".%s", appName))
457	local.SetConfigType("json")
458	local.AddConfigPath(workingDir)
459
460	// Merge local config if it exists
461	if err := local.ReadInConfig(); err == nil {
462		viper.MergeConfigMap(local.AllSettings())
463	}
464}
465
466// applyDefaultValues sets default values for configuration fields that need processing.
467func applyDefaultValues() {
468	// Set default MCP type if not specified
469	for k, v := range cfg.MCPServers {
470		if v.Type == "" {
471			v.Type = MCPStdio
472			cfg.MCPServers[k] = v
473		}
474	}
475}
476
477// It validates model IDs and providers, ensuring they are supported.
478func validateAgent(cfg *Config, name AgentName, agent Agent) error {
479	// Check if model exists
480	model, modelExists := models.SupportedModels[agent.Model]
481	if !modelExists {
482		logging.Warn("unsupported model configured, reverting to default",
483			"agent", name,
484			"configured_model", agent.Model)
485
486		// Set default model based on available providers
487		if setDefaultModelForAgent(name) {
488			logging.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
489		} else {
490			return fmt.Errorf("no valid provider available for agent %s", name)
491		}
492		return nil
493	}
494
495	// Check if provider for the model is configured
496	provider := model.Provider
497	providerCfg, providerExists := cfg.Providers[provider]
498
499	if !providerExists {
500		// Provider not configured, check if we have environment variables
501		apiKey := getProviderAPIKey(provider)
502		if apiKey == "" {
503			logging.Warn("provider not configured for model, reverting to default",
504				"agent", name,
505				"model", agent.Model,
506				"provider", provider)
507
508			// Set default model based on available providers
509			if setDefaultModelForAgent(name) {
510				logging.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
511			} else {
512				return fmt.Errorf("no valid provider available for agent %s", name)
513			}
514		} else {
515			// Add provider with API key from environment
516			cfg.Providers[provider] = Provider{
517				APIKey: apiKey,
518			}
519			logging.Info("added provider from environment", "provider", provider)
520		}
521	} else if providerCfg.Disabled || providerCfg.APIKey == "" {
522		// Provider is disabled or has no API key
523		logging.Warn("provider is disabled or has no API key, reverting to default",
524			"agent", name,
525			"model", agent.Model,
526			"provider", provider)
527
528		// Set default model based on available providers
529		if setDefaultModelForAgent(name) {
530			logging.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
531		} else {
532			return fmt.Errorf("no valid provider available for agent %s", name)
533		}
534	}
535
536	// Validate max tokens
537	if agent.MaxTokens <= 0 {
538		logging.Warn("invalid max tokens, setting to default",
539			"agent", name,
540			"model", agent.Model,
541			"max_tokens", agent.MaxTokens)
542
543		// Update the agent with default max tokens
544		updatedAgent := cfg.Agents[name]
545		if model.DefaultMaxTokens > 0 {
546			updatedAgent.MaxTokens = model.DefaultMaxTokens
547		} else {
548			updatedAgent.MaxTokens = MaxTokensFallbackDefault
549		}
550		cfg.Agents[name] = updatedAgent
551	} else if model.ContextWindow > 0 && agent.MaxTokens > model.ContextWindow/2 {
552		// Ensure max tokens doesn't exceed half the context window (reasonable limit)
553		logging.Warn("max tokens exceeds half the context window, adjusting",
554			"agent", name,
555			"model", agent.Model,
556			"max_tokens", agent.MaxTokens,
557			"context_window", model.ContextWindow)
558
559		// Update the agent with adjusted max tokens
560		updatedAgent := cfg.Agents[name]
561		updatedAgent.MaxTokens = model.ContextWindow / 2
562		cfg.Agents[name] = updatedAgent
563	}
564
565	// Validate reasoning effort for models that support reasoning
566	if model.CanReason && provider == models.ProviderOpenAI || provider == models.ProviderLocal {
567		if agent.ReasoningEffort == "" {
568			// Set default reasoning effort for models that support it
569			logging.Info("setting default reasoning effort for model that supports reasoning",
570				"agent", name,
571				"model", agent.Model)
572
573			// Update the agent with default reasoning effort
574			updatedAgent := cfg.Agents[name]
575			updatedAgent.ReasoningEffort = "medium"
576			cfg.Agents[name] = updatedAgent
577		} else {
578			// Check if reasoning effort is valid (low, medium, high)
579			effort := strings.ToLower(agent.ReasoningEffort)
580			if effort != "low" && effort != "medium" && effort != "high" {
581				logging.Warn("invalid reasoning effort, setting to medium",
582					"agent", name,
583					"model", agent.Model,
584					"reasoning_effort", agent.ReasoningEffort)
585
586				// Update the agent with valid reasoning effort
587				updatedAgent := cfg.Agents[name]
588				updatedAgent.ReasoningEffort = "medium"
589				cfg.Agents[name] = updatedAgent
590			}
591		}
592	} else if !model.CanReason && agent.ReasoningEffort != "" {
593		// Model doesn't support reasoning but reasoning effort is set
594		logging.Warn("model doesn't support reasoning but reasoning effort is set, ignoring",
595			"agent", name,
596			"model", agent.Model,
597			"reasoning_effort", agent.ReasoningEffort)
598
599		// Update the agent to remove reasoning effort
600		updatedAgent := cfg.Agents[name]
601		updatedAgent.ReasoningEffort = ""
602		cfg.Agents[name] = updatedAgent
603	}
604
605	return nil
606}
607
608// Validate checks if the configuration is valid and applies defaults where needed.
609func Validate() error {
610	if cfg == nil {
611		return fmt.Errorf("config not loaded")
612	}
613
614	// Validate agent models
615	for name, agent := range cfg.Agents {
616		if err := validateAgent(cfg, name, agent); err != nil {
617			return err
618		}
619	}
620
621	// Validate providers
622	for provider, providerCfg := range cfg.Providers {
623		if providerCfg.APIKey == "" && !providerCfg.Disabled {
624			logging.Warn("provider has no API key, marking as disabled", "provider", provider)
625			providerCfg.Disabled = true
626			cfg.Providers[provider] = providerCfg
627		}
628	}
629
630	// Validate LSP configurations
631	for language, lspConfig := range cfg.LSP {
632		if lspConfig.Command == "" && !lspConfig.Disabled {
633			logging.Warn("LSP configuration has no command, marking as disabled", "language", language)
634			lspConfig.Disabled = true
635			cfg.LSP[language] = lspConfig
636		}
637	}
638
639	return nil
640}
641
642// getProviderAPIKey gets the API key for a provider from environment variables
643func getProviderAPIKey(provider models.InferenceProvider) string {
644	switch provider {
645	case models.ProviderAnthropic:
646		return os.Getenv("ANTHROPIC_API_KEY")
647	case models.ProviderOpenAI:
648		return os.Getenv("OPENAI_API_KEY")
649	case models.ProviderGemini:
650		return os.Getenv("GEMINI_API_KEY")
651	case models.ProviderGROQ:
652		return os.Getenv("GROQ_API_KEY")
653	case models.ProviderAzure:
654		return os.Getenv("AZURE_OPENAI_API_KEY")
655	case models.ProviderOpenRouter:
656		return os.Getenv("OPENROUTER_API_KEY")
657	case models.ProviderBedrock:
658		if hasAWSCredentials() {
659			return "aws-credentials-available"
660		}
661	case models.ProviderVertexAI:
662		if hasVertexAICredentials() {
663			return "vertex-ai-credentials-available"
664		}
665	}
666	return ""
667}
668
669// setDefaultModelForAgent sets a default model for an agent based on available providers
670func setDefaultModelForAgent(agent AgentName) bool {
671	// Check providers in order of preference
672	if apiKey := os.Getenv("ANTHROPIC_API_KEY"); apiKey != "" {
673		maxTokens := int64(5000)
674		if agent == AgentTitle {
675			maxTokens = 80
676		}
677		cfg.Agents[agent] = Agent{
678			Model:     models.Claude37Sonnet,
679			MaxTokens: maxTokens,
680		}
681		return true
682	}
683
684	if apiKey := os.Getenv("OPENAI_API_KEY"); apiKey != "" {
685		var model models.ModelID
686		maxTokens := int64(5000)
687		reasoningEffort := ""
688
689		switch agent {
690		case AgentTitle:
691			model = models.GPT41Mini
692			maxTokens = 80
693		case AgentTask:
694			model = models.GPT41Mini
695		default:
696			model = models.GPT41
697		}
698
699		// Check if model supports reasoning
700		if modelInfo, ok := models.SupportedModels[model]; ok && modelInfo.CanReason {
701			reasoningEffort = "medium"
702		}
703
704		cfg.Agents[agent] = Agent{
705			Model:           model,
706			MaxTokens:       maxTokens,
707			ReasoningEffort: reasoningEffort,
708		}
709		return true
710	}
711
712	if apiKey := os.Getenv("OPENROUTER_API_KEY"); apiKey != "" {
713		var model models.ModelID
714		maxTokens := int64(5000)
715		reasoningEffort := ""
716
717		switch agent {
718		case AgentTitle:
719			model = models.OpenRouterClaude35Haiku
720			maxTokens = 80
721		case AgentTask:
722			model = models.OpenRouterClaude37Sonnet
723		default:
724			model = models.OpenRouterClaude37Sonnet
725		}
726
727		// Check if model supports reasoning
728		if modelInfo, ok := models.SupportedModels[model]; ok && modelInfo.CanReason {
729			reasoningEffort = "medium"
730		}
731
732		cfg.Agents[agent] = Agent{
733			Model:           model,
734			MaxTokens:       maxTokens,
735			ReasoningEffort: reasoningEffort,
736		}
737		return true
738	}
739
740	if apiKey := os.Getenv("GEMINI_API_KEY"); apiKey != "" {
741		var model models.ModelID
742		maxTokens := int64(5000)
743
744		if agent == AgentTitle {
745			model = models.Gemini25Flash
746			maxTokens = 80
747		} else {
748			model = models.Gemini25
749		}
750
751		cfg.Agents[agent] = Agent{
752			Model:     model,
753			MaxTokens: maxTokens,
754		}
755		return true
756	}
757
758	if apiKey := os.Getenv("GROQ_API_KEY"); apiKey != "" {
759		maxTokens := int64(5000)
760		if agent == AgentTitle {
761			maxTokens = 80
762		}
763
764		cfg.Agents[agent] = Agent{
765			Model:     models.QWENQwq,
766			MaxTokens: maxTokens,
767		}
768		return true
769	}
770
771	if hasAWSCredentials() {
772		maxTokens := int64(5000)
773		if agent == AgentTitle {
774			maxTokens = 80
775		}
776
777		cfg.Agents[agent] = Agent{
778			Model:           models.BedrockClaude37Sonnet,
779			MaxTokens:       maxTokens,
780			ReasoningEffort: "medium", // Claude models support reasoning
781		}
782		return true
783	}
784
785	if hasVertexAICredentials() {
786		var model models.ModelID
787		maxTokens := int64(5000)
788
789		if agent == AgentTitle {
790			model = models.VertexAIGemini25Flash
791			maxTokens = 80
792		} else {
793			model = models.VertexAIGemini25
794		}
795
796		cfg.Agents[agent] = Agent{
797			Model:     model,
798			MaxTokens: maxTokens,
799		}
800		return true
801	}
802
803	return false
804}
805
806func updateCfgFile(updateCfg func(config *Config)) error {
807	if cfg == nil {
808		return fmt.Errorf("config not loaded")
809	}
810
811	// Get the config file path
812	configFile := viper.ConfigFileUsed()
813	var configData []byte
814	if configFile == "" {
815		homeDir, err := os.UserHomeDir()
816		if err != nil {
817			return fmt.Errorf("failed to get home directory: %w", err)
818		}
819		configFile = filepath.Join(homeDir, fmt.Sprintf(".%s.json", appName))
820		logging.Info("config file not found, creating new one", "path", configFile)
821		configData = []byte(`{}`)
822	} else {
823		// Read the existing config file
824		data, err := os.ReadFile(configFile)
825		if err != nil {
826			return fmt.Errorf("failed to read config file: %w", err)
827		}
828		configData = data
829	}
830
831	// Parse the JSON
832	var userCfg *Config
833	if err := json.Unmarshal(configData, &userCfg); err != nil {
834		return fmt.Errorf("failed to parse config file: %w", err)
835	}
836
837	updateCfg(userCfg)
838
839	// Write the updated config back to file
840	updatedData, err := json.MarshalIndent(userCfg, "", "  ")
841	if err != nil {
842		return fmt.Errorf("failed to marshal config: %w", err)
843	}
844
845	if err := os.WriteFile(configFile, updatedData, 0o644); err != nil {
846		return fmt.Errorf("failed to write config file: %w", err)
847	}
848
849	return nil
850}
851
852// Get returns the current configuration.
853// It's safe to call this function multiple times.
854func Get() *Config {
855	return cfg
856}
857
858// WorkingDirectory returns the current working directory from the configuration.
859func WorkingDirectory() string {
860	if cfg == nil {
861		panic("config not loaded")
862	}
863	return cfg.WorkingDir
864}
865
866func UpdateAgentModel(agentName AgentName, modelID models.ModelID) error {
867	if cfg == nil {
868		panic("config not loaded")
869	}
870
871	existingAgentCfg := cfg.Agents[agentName]
872
873	model, ok := models.SupportedModels[modelID]
874	if !ok {
875		return fmt.Errorf("model %s not supported", modelID)
876	}
877
878	maxTokens := existingAgentCfg.MaxTokens
879	if model.DefaultMaxTokens > 0 {
880		maxTokens = model.DefaultMaxTokens
881	}
882
883	newAgentCfg := Agent{
884		Model:           modelID,
885		MaxTokens:       maxTokens,
886		ReasoningEffort: existingAgentCfg.ReasoningEffort,
887	}
888	cfg.Agents[agentName] = newAgentCfg
889
890	if err := validateAgent(cfg, agentName, newAgentCfg); err != nil {
891		// revert config update on failure
892		cfg.Agents[agentName] = existingAgentCfg
893		return fmt.Errorf("failed to update agent model: %w", err)
894	}
895
896	return updateCfgFile(func(config *Config) {
897		if config.Agents == nil {
898			config.Agents = make(map[AgentName]Agent)
899		}
900		config.Agents[agentName] = newAgentCfg
901	})
902}
903
904// UpdateTheme updates the theme in the configuration and writes it to the config file.
905func UpdateTheme(themeName string) error {
906	if cfg == nil {
907		return fmt.Errorf("config not loaded")
908	}
909
910	// Update the in-memory config
911	cfg.TUI.Theme = themeName
912
913	// Update the file config
914	return updateCfgFile(func(config *Config) {
915		config.TUI.Theme = themeName
916	})
917}