config.go

  1// Package config manages application configuration from various sources.
  2package config
  3
  4import (
  5	"encoding/json"
  6	"fmt"
  7	"log/slog"
  8	"os"
  9	"path/filepath"
 10	"strings"
 11
 12	"github.com/opencode-ai/opencode/internal/llm/models"
 13	"github.com/opencode-ai/opencode/internal/logging"
 14	"github.com/spf13/viper"
 15)
 16
 17// MCPType defines the type of MCP (Model Control Protocol) server.
 18type MCPType string
 19
 20// Supported MCP types
 21const (
 22	MCPStdio MCPType = "stdio"
 23	MCPSse   MCPType = "sse"
 24)
 25
 26// MCPServer defines the configuration for a Model Control Protocol server.
 27type MCPServer struct {
 28	Command string            `json:"command"`
 29	Env     []string          `json:"env"`
 30	Args    []string          `json:"args"`
 31	Type    MCPType           `json:"type"`
 32	URL     string            `json:"url"`
 33	Headers map[string]string `json:"headers"`
 34}
 35
 36type AgentName string
 37
 38const (
 39	AgentCoder      AgentName = "coder"
 40	AgentSummarizer AgentName = "summarizer"
 41	AgentTask       AgentName = "task"
 42	AgentTitle      AgentName = "title"
 43)
 44
 45// Agent defines configuration for different LLM models and their token limits.
 46type Agent struct {
 47	Model           models.ModelID `json:"model"`
 48	MaxTokens       int64          `json:"maxTokens"`
 49	ReasoningEffort string         `json:"reasoningEffort"` // For openai models low,medium,heigh
 50}
 51
 52// Provider defines configuration for an LLM provider.
 53type Provider struct {
 54	APIKey   string `json:"apiKey"`
 55	Disabled bool   `json:"disabled"`
 56}
 57
 58// Data defines storage configuration.
 59type Data struct {
 60	Directory string `json:"directory"`
 61}
 62
 63// LSPConfig defines configuration for Language Server Protocol integration.
 64type LSPConfig struct {
 65	Disabled bool     `json:"enabled"`
 66	Command  string   `json:"command"`
 67	Args     []string `json:"args"`
 68	Options  any      `json:"options"`
 69}
 70
 71// TUIConfig defines the configuration for the Terminal User Interface.
 72type TUIConfig struct {
 73	Theme string `json:"theme,omitempty"`
 74}
 75
 76// Config is the main configuration structure for the application.
 77type Config struct {
 78	Data         Data                              `json:"data"`
 79	WorkingDir   string                            `json:"wd,omitempty"`
 80	MCPServers   map[string]MCPServer              `json:"mcpServers,omitempty"`
 81	Providers    map[models.ModelProvider]Provider `json:"providers,omitempty"`
 82	LSP          map[string]LSPConfig              `json:"lsp,omitempty"`
 83	Agents       map[AgentName]Agent               `json:"agents"`
 84	Debug        bool                              `json:"debug,omitempty"`
 85	DebugLSP     bool                              `json:"debugLSP,omitempty"`
 86	ContextPaths []string                          `json:"contextPaths,omitempty"`
 87	TUI          TUIConfig                         `json:"tui"`
 88	AutoCompact  bool                              `json:"autoCompact,omitempty"`
 89}
 90
 91// Application constants
 92const (
 93	defaultDataDirectory = ".opencode"
 94	defaultLogLevel      = "info"
 95	appName              = "opencode"
 96
 97	MaxTokensFallbackDefault = 4096
 98)
 99
100var defaultContextPaths = []string{
101	".github/copilot-instructions.md",
102	".cursorrules",
103	".cursor/rules/",
104	"CLAUDE.md",
105	"CLAUDE.local.md",
106	"opencode.md",
107	"opencode.local.md",
108	"OpenCode.md",
109	"OpenCode.local.md",
110	"OPENCODE.md",
111	"OPENCODE.local.md",
112}
113
114// Global configuration instance
115var cfg *Config
116
117// Load initializes the configuration from environment variables and config files.
118// If debug is true, debug mode is enabled and log level is set to debug.
119// It returns an error if configuration loading fails.
120func Load(workingDir string, debug bool) (*Config, error) {
121	if cfg != nil {
122		return cfg, nil
123	}
124
125	cfg = &Config{
126		WorkingDir: workingDir,
127		MCPServers: make(map[string]MCPServer),
128		Providers:  make(map[models.ModelProvider]Provider),
129		LSP:        make(map[string]LSPConfig),
130	}
131
132	configureViper()
133	setDefaults(debug)
134
135	// Read global config
136	if err := readConfig(viper.ReadInConfig()); err != nil {
137		return cfg, err
138	}
139
140	// Load and merge local config
141	mergeLocalConfig(workingDir)
142
143	setProviderDefaults()
144
145	// Apply configuration to the struct
146	if err := viper.Unmarshal(cfg); err != nil {
147		return cfg, fmt.Errorf("failed to unmarshal config: %w", err)
148	}
149
150	applyDefaultValues()
151	defaultLevel := slog.LevelInfo
152	if cfg.Debug {
153		defaultLevel = slog.LevelDebug
154	}
155	if os.Getenv("OPENCODE_DEV_DEBUG") == "true" {
156		loggingFile := fmt.Sprintf("%s/%s", cfg.Data.Directory, "debug.log")
157
158		// if file does not exist create it
159		if _, err := os.Stat(loggingFile); os.IsNotExist(err) {
160			if err := os.MkdirAll(cfg.Data.Directory, 0o755); err != nil {
161				return cfg, fmt.Errorf("failed to create directory: %w", err)
162			}
163			if _, err := os.Create(loggingFile); err != nil {
164				return cfg, fmt.Errorf("failed to create log file: %w", err)
165			}
166		}
167
168		sloggingFileWriter, err := os.OpenFile(loggingFile, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0o666)
169		if err != nil {
170			return cfg, fmt.Errorf("failed to open log file: %w", err)
171		}
172		// Configure logger
173		logger := slog.New(slog.NewTextHandler(sloggingFileWriter, &slog.HandlerOptions{
174			Level: defaultLevel,
175		}))
176		slog.SetDefault(logger)
177	} else {
178		// Configure logger
179		logger := slog.New(slog.NewTextHandler(logging.NewWriter(), &slog.HandlerOptions{
180			Level: defaultLevel,
181		}))
182		slog.SetDefault(logger)
183	}
184
185	// Validate configuration
186	if err := Validate(); err != nil {
187		return cfg, fmt.Errorf("config validation failed: %w", err)
188	}
189
190	if cfg.Agents == nil {
191		cfg.Agents = make(map[AgentName]Agent)
192	}
193
194	// Override the max tokens for title agent
195	cfg.Agents[AgentTitle] = Agent{
196		Model:     cfg.Agents[AgentTitle].Model,
197		MaxTokens: 80,
198	}
199	return cfg, nil
200}
201
202// configureViper sets up viper's configuration paths and environment variables.
203func configureViper() {
204	viper.SetConfigName(fmt.Sprintf(".%s", appName))
205	viper.SetConfigType("json")
206	viper.AddConfigPath("$HOME")
207	viper.AddConfigPath(fmt.Sprintf("$XDG_CONFIG_HOME/%s", appName))
208	viper.AddConfigPath(fmt.Sprintf("$HOME/.config/%s", appName))
209	viper.SetEnvPrefix(strings.ToUpper(appName))
210	viper.AutomaticEnv()
211}
212
213// setDefaults configures default values for configuration options.
214func setDefaults(debug bool) {
215	viper.SetDefault("data.directory", defaultDataDirectory)
216	viper.SetDefault("contextPaths", defaultContextPaths)
217	viper.SetDefault("tui.theme", "opencode")
218	viper.SetDefault("autoCompact", true)
219
220	if debug {
221		viper.SetDefault("debug", true)
222		viper.Set("log.level", "debug")
223	} else {
224		viper.SetDefault("debug", false)
225		viper.SetDefault("log.level", defaultLogLevel)
226	}
227}
228
229// setProviderDefaults configures LLM provider defaults based on provider provided by
230// environment variables and configuration file.
231func setProviderDefaults() {
232	// Set all API keys we can find in the environment
233	if apiKey := os.Getenv("ANTHROPIC_API_KEY"); apiKey != "" {
234		viper.SetDefault("providers.anthropic.apiKey", apiKey)
235	}
236	if apiKey := os.Getenv("OPENAI_API_KEY"); apiKey != "" {
237		viper.SetDefault("providers.openai.apiKey", apiKey)
238	}
239	if apiKey := os.Getenv("GEMINI_API_KEY"); apiKey != "" {
240		viper.SetDefault("providers.gemini.apiKey", apiKey)
241	}
242	if apiKey := os.Getenv("GROQ_API_KEY"); apiKey != "" {
243		viper.SetDefault("providers.groq.apiKey", apiKey)
244	}
245	if apiKey := os.Getenv("OPENROUTER_API_KEY"); apiKey != "" {
246		viper.SetDefault("providers.openrouter.apiKey", apiKey)
247	}
248	if apiKey := os.Getenv("XAI_API_KEY"); apiKey != "" {
249		viper.SetDefault("providers.xai.apiKey", apiKey)
250	}
251	if apiKey := os.Getenv("AZURE_OPENAI_ENDPOINT"); apiKey != "" {
252		// api-key may be empty when using Entra ID credentials – that's okay
253		viper.SetDefault("providers.azure.apiKey", os.Getenv("AZURE_OPENAI_API_KEY"))
254	}
255
256	// Use this order to set the default models
257	// 1. Anthropic
258	// 2. OpenAI
259	// 3. Google Gemini
260	// 4. Groq
261	// 5. OpenRouter
262	// 6. AWS Bedrock
263	// 7. Azure
264
265	// Anthropic configuration
266	if key := viper.GetString("providers.anthropic.apiKey"); strings.TrimSpace(key) != "" {
267		viper.SetDefault("agents.coder.model", models.Claude37Sonnet)
268		viper.SetDefault("agents.summarizer.model", models.Claude37Sonnet)
269		viper.SetDefault("agents.task.model", models.Claude37Sonnet)
270		viper.SetDefault("agents.title.model", models.Claude37Sonnet)
271		return
272	}
273
274	// OpenAI configuration
275	if key := viper.GetString("providers.openai.apiKey"); strings.TrimSpace(key) != "" {
276		viper.SetDefault("agents.coder.model", models.GPT41)
277		viper.SetDefault("agents.summarizer.model", models.GPT41)
278		viper.SetDefault("agents.task.model", models.GPT41Mini)
279		viper.SetDefault("agents.title.model", models.GPT41Mini)
280		return
281	}
282
283	// Google Gemini configuration
284	if key := viper.GetString("providers.gemini.apiKey"); strings.TrimSpace(key) != "" {
285		viper.SetDefault("agents.coder.model", models.Gemini25)
286		viper.SetDefault("agents.summarizer.model", models.Gemini25)
287		viper.SetDefault("agents.task.model", models.Gemini25Flash)
288		viper.SetDefault("agents.title.model", models.Gemini25Flash)
289		return
290	}
291
292	// Groq configuration
293	if key := viper.GetString("providers.groq.apiKey"); strings.TrimSpace(key) != "" {
294		viper.SetDefault("agents.coder.model", models.QWENQwq)
295		viper.SetDefault("agents.summarizer.model", models.QWENQwq)
296		viper.SetDefault("agents.task.model", models.QWENQwq)
297		viper.SetDefault("agents.title.model", models.QWENQwq)
298		return
299	}
300
301	// OpenRouter configuration
302	if key := viper.GetString("providers.openrouter.apiKey"); strings.TrimSpace(key) != "" {
303		viper.SetDefault("agents.coder.model", models.OpenRouterClaude37Sonnet)
304		viper.SetDefault("agents.summarizer.model", models.OpenRouterClaude37Sonnet)
305		viper.SetDefault("agents.task.model", models.OpenRouterClaude37Sonnet)
306		viper.SetDefault("agents.title.model", models.OpenRouterClaude35Haiku)
307		return
308	}
309
310	// XAI configuration
311	if key := viper.GetString("providers.xai.apiKey"); strings.TrimSpace(key) != "" {
312		viper.SetDefault("agents.coder.model", models.XAIGrok3Beta)
313		viper.SetDefault("agents.summarizer.model", models.XAIGrok3Beta)
314		viper.SetDefault("agents.task.model", models.XAIGrok3Beta)
315		viper.SetDefault("agents.title.model", models.XAiGrok3MiniFastBeta)
316		return
317	}
318
319	// AWS Bedrock configuration
320	if hasAWSCredentials() {
321		viper.SetDefault("agents.coder.model", models.BedrockClaude37Sonnet)
322		viper.SetDefault("agents.summarizer.model", models.BedrockClaude37Sonnet)
323		viper.SetDefault("agents.task.model", models.BedrockClaude37Sonnet)
324		viper.SetDefault("agents.title.model", models.BedrockClaude37Sonnet)
325		return
326	}
327
328	// Azure OpenAI configuration
329	if os.Getenv("AZURE_OPENAI_ENDPOINT") != "" {
330		viper.SetDefault("agents.coder.model", models.AzureGPT41)
331		viper.SetDefault("agents.summarizer.model", models.AzureGPT41)
332		viper.SetDefault("agents.task.model", models.AzureGPT41Mini)
333		viper.SetDefault("agents.title.model", models.AzureGPT41Mini)
334		return
335	}
336}
337
338// hasAWSCredentials checks if AWS credentials are available in the environment.
339func hasAWSCredentials() bool {
340	// Check for explicit AWS credentials
341	if os.Getenv("AWS_ACCESS_KEY_ID") != "" && os.Getenv("AWS_SECRET_ACCESS_KEY") != "" {
342		return true
343	}
344
345	// Check for AWS profile
346	if os.Getenv("AWS_PROFILE") != "" || os.Getenv("AWS_DEFAULT_PROFILE") != "" {
347		return true
348	}
349
350	// Check for AWS region
351	if os.Getenv("AWS_REGION") != "" || os.Getenv("AWS_DEFAULT_REGION") != "" {
352		return true
353	}
354
355	// Check if running on EC2 with instance profile
356	if os.Getenv("AWS_CONTAINER_CREDENTIALS_RELATIVE_URI") != "" ||
357		os.Getenv("AWS_CONTAINER_CREDENTIALS_FULL_URI") != "" {
358		return true
359	}
360
361	return false
362}
363
364// readConfig handles the result of reading a configuration file.
365func readConfig(err error) error {
366	if err == nil {
367		return nil
368	}
369
370	// It's okay if the config file doesn't exist
371	if _, ok := err.(viper.ConfigFileNotFoundError); ok {
372		return nil
373	}
374
375	return fmt.Errorf("failed to read config: %w", err)
376}
377
378// mergeLocalConfig loads and merges configuration from the local directory.
379func mergeLocalConfig(workingDir string) {
380	local := viper.New()
381	local.SetConfigName(fmt.Sprintf(".%s", appName))
382	local.SetConfigType("json")
383	local.AddConfigPath(workingDir)
384
385	// Merge local config if it exists
386	if err := local.ReadInConfig(); err == nil {
387		viper.MergeConfigMap(local.AllSettings())
388	}
389}
390
391// applyDefaultValues sets default values for configuration fields that need processing.
392func applyDefaultValues() {
393	// Set default MCP type if not specified
394	for k, v := range cfg.MCPServers {
395		if v.Type == "" {
396			v.Type = MCPStdio
397			cfg.MCPServers[k] = v
398		}
399	}
400}
401
402// It validates model IDs and providers, ensuring they are supported.
403func validateAgent(cfg *Config, name AgentName, agent Agent) error {
404	// Check if model exists
405	model, modelExists := models.SupportedModels[agent.Model]
406	if !modelExists {
407		logging.Warn("unsupported model configured, reverting to default",
408			"agent", name,
409			"configured_model", agent.Model)
410
411		// Set default model based on available providers
412		if setDefaultModelForAgent(name) {
413			logging.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
414		} else {
415			return fmt.Errorf("no valid provider available for agent %s", name)
416		}
417		return nil
418	}
419
420	// Check if provider for the model is configured
421	provider := model.Provider
422	providerCfg, providerExists := cfg.Providers[provider]
423
424	if !providerExists {
425		// Provider not configured, check if we have environment variables
426		apiKey := getProviderAPIKey(provider)
427		if apiKey == "" {
428			logging.Warn("provider not configured for model, reverting to default",
429				"agent", name,
430				"model", agent.Model,
431				"provider", provider)
432
433			// Set default model based on available providers
434			if setDefaultModelForAgent(name) {
435				logging.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
436			} else {
437				return fmt.Errorf("no valid provider available for agent %s", name)
438			}
439		} else {
440			// Add provider with API key from environment
441			cfg.Providers[provider] = Provider{
442				APIKey: apiKey,
443			}
444			logging.Info("added provider from environment", "provider", provider)
445		}
446	} else if providerCfg.Disabled || providerCfg.APIKey == "" {
447		// Provider is disabled or has no API key
448		logging.Warn("provider is disabled or has no API key, reverting to default",
449			"agent", name,
450			"model", agent.Model,
451			"provider", provider)
452
453		// Set default model based on available providers
454		if setDefaultModelForAgent(name) {
455			logging.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
456		} else {
457			return fmt.Errorf("no valid provider available for agent %s", name)
458		}
459	}
460
461	// Validate max tokens
462	if agent.MaxTokens <= 0 {
463		logging.Warn("invalid max tokens, setting to default",
464			"agent", name,
465			"model", agent.Model,
466			"max_tokens", agent.MaxTokens)
467
468		// Update the agent with default max tokens
469		updatedAgent := cfg.Agents[name]
470		if model.DefaultMaxTokens > 0 {
471			updatedAgent.MaxTokens = model.DefaultMaxTokens
472		} else {
473			updatedAgent.MaxTokens = MaxTokensFallbackDefault
474		}
475		cfg.Agents[name] = updatedAgent
476	} else if model.ContextWindow > 0 && agent.MaxTokens > model.ContextWindow/2 {
477		// Ensure max tokens doesn't exceed half the context window (reasonable limit)
478		logging.Warn("max tokens exceeds half the context window, adjusting",
479			"agent", name,
480			"model", agent.Model,
481			"max_tokens", agent.MaxTokens,
482			"context_window", model.ContextWindow)
483
484		// Update the agent with adjusted max tokens
485		updatedAgent := cfg.Agents[name]
486		updatedAgent.MaxTokens = model.ContextWindow / 2
487		cfg.Agents[name] = updatedAgent
488	}
489
490	// Validate reasoning effort for models that support reasoning
491	if model.CanReason && provider == models.ProviderOpenAI {
492		if agent.ReasoningEffort == "" {
493			// Set default reasoning effort for models that support it
494			logging.Info("setting default reasoning effort for model that supports reasoning",
495				"agent", name,
496				"model", agent.Model)
497
498			// Update the agent with default reasoning effort
499			updatedAgent := cfg.Agents[name]
500			updatedAgent.ReasoningEffort = "medium"
501			cfg.Agents[name] = updatedAgent
502		} else {
503			// Check if reasoning effort is valid (low, medium, high)
504			effort := strings.ToLower(agent.ReasoningEffort)
505			if effort != "low" && effort != "medium" && effort != "high" {
506				logging.Warn("invalid reasoning effort, setting to medium",
507					"agent", name,
508					"model", agent.Model,
509					"reasoning_effort", agent.ReasoningEffort)
510
511				// Update the agent with valid reasoning effort
512				updatedAgent := cfg.Agents[name]
513				updatedAgent.ReasoningEffort = "medium"
514				cfg.Agents[name] = updatedAgent
515			}
516		}
517	} else if !model.CanReason && agent.ReasoningEffort != "" {
518		// Model doesn't support reasoning but reasoning effort is set
519		logging.Warn("model doesn't support reasoning but reasoning effort is set, ignoring",
520			"agent", name,
521			"model", agent.Model,
522			"reasoning_effort", agent.ReasoningEffort)
523
524		// Update the agent to remove reasoning effort
525		updatedAgent := cfg.Agents[name]
526		updatedAgent.ReasoningEffort = ""
527		cfg.Agents[name] = updatedAgent
528	}
529
530	return nil
531}
532
533// Validate checks if the configuration is valid and applies defaults where needed.
534func Validate() error {
535	if cfg == nil {
536		return fmt.Errorf("config not loaded")
537	}
538
539	// Validate agent models
540	for name, agent := range cfg.Agents {
541		if err := validateAgent(cfg, name, agent); err != nil {
542			return err
543		}
544	}
545
546	// Validate providers
547	for provider, providerCfg := range cfg.Providers {
548		if providerCfg.APIKey == "" && !providerCfg.Disabled {
549			logging.Warn("provider has no API key, marking as disabled", "provider", provider)
550			providerCfg.Disabled = true
551			cfg.Providers[provider] = providerCfg
552		}
553	}
554
555	// Validate LSP configurations
556	for language, lspConfig := range cfg.LSP {
557		if lspConfig.Command == "" && !lspConfig.Disabled {
558			logging.Warn("LSP configuration has no command, marking as disabled", "language", language)
559			lspConfig.Disabled = true
560			cfg.LSP[language] = lspConfig
561		}
562	}
563
564	return nil
565}
566
567// getProviderAPIKey gets the API key for a provider from environment variables
568func getProviderAPIKey(provider models.ModelProvider) string {
569	switch provider {
570	case models.ProviderAnthropic:
571		return os.Getenv("ANTHROPIC_API_KEY")
572	case models.ProviderOpenAI:
573		return os.Getenv("OPENAI_API_KEY")
574	case models.ProviderGemini:
575		return os.Getenv("GEMINI_API_KEY")
576	case models.ProviderGROQ:
577		return os.Getenv("GROQ_API_KEY")
578	case models.ProviderAzure:
579		return os.Getenv("AZURE_OPENAI_API_KEY")
580	case models.ProviderOpenRouter:
581		return os.Getenv("OPENROUTER_API_KEY")
582	case models.ProviderBedrock:
583		if hasAWSCredentials() {
584			return "aws-credentials-available"
585		}
586	}
587	return ""
588}
589
590// setDefaultModelForAgent sets a default model for an agent based on available providers
591func setDefaultModelForAgent(agent AgentName) bool {
592	// Check providers in order of preference
593	if apiKey := os.Getenv("ANTHROPIC_API_KEY"); apiKey != "" {
594		maxTokens := int64(5000)
595		if agent == AgentTitle {
596			maxTokens = 80
597		}
598		cfg.Agents[agent] = Agent{
599			Model:     models.Claude37Sonnet,
600			MaxTokens: maxTokens,
601		}
602		return true
603	}
604
605	if apiKey := os.Getenv("OPENAI_API_KEY"); apiKey != "" {
606		var model models.ModelID
607		maxTokens := int64(5000)
608		reasoningEffort := ""
609
610		switch agent {
611		case AgentTitle:
612			model = models.GPT41Mini
613			maxTokens = 80
614		case AgentTask:
615			model = models.GPT41Mini
616		default:
617			model = models.GPT41
618		}
619
620		// Check if model supports reasoning
621		if modelInfo, ok := models.SupportedModels[model]; ok && modelInfo.CanReason {
622			reasoningEffort = "medium"
623		}
624
625		cfg.Agents[agent] = Agent{
626			Model:           model,
627			MaxTokens:       maxTokens,
628			ReasoningEffort: reasoningEffort,
629		}
630		return true
631	}
632
633	if apiKey := os.Getenv("OPENROUTER_API_KEY"); apiKey != "" {
634		var model models.ModelID
635		maxTokens := int64(5000)
636		reasoningEffort := ""
637
638		switch agent {
639		case AgentTitle:
640			model = models.OpenRouterClaude35Haiku
641			maxTokens = 80
642		case AgentTask:
643			model = models.OpenRouterClaude37Sonnet
644		default:
645			model = models.OpenRouterClaude37Sonnet
646		}
647
648		// Check if model supports reasoning
649		if modelInfo, ok := models.SupportedModels[model]; ok && modelInfo.CanReason {
650			reasoningEffort = "medium"
651		}
652
653		cfg.Agents[agent] = Agent{
654			Model:           model,
655			MaxTokens:       maxTokens,
656			ReasoningEffort: reasoningEffort,
657		}
658		return true
659	}
660
661	if apiKey := os.Getenv("GEMINI_API_KEY"); apiKey != "" {
662		var model models.ModelID
663		maxTokens := int64(5000)
664
665		if agent == AgentTitle {
666			model = models.Gemini25Flash
667			maxTokens = 80
668		} else {
669			model = models.Gemini25
670		}
671
672		cfg.Agents[agent] = Agent{
673			Model:     model,
674			MaxTokens: maxTokens,
675		}
676		return true
677	}
678
679	if apiKey := os.Getenv("GROQ_API_KEY"); apiKey != "" {
680		maxTokens := int64(5000)
681		if agent == AgentTitle {
682			maxTokens = 80
683		}
684
685		cfg.Agents[agent] = Agent{
686			Model:     models.QWENQwq,
687			MaxTokens: maxTokens,
688		}
689		return true
690	}
691
692	if hasAWSCredentials() {
693		maxTokens := int64(5000)
694		if agent == AgentTitle {
695			maxTokens = 80
696		}
697
698		cfg.Agents[agent] = Agent{
699			Model:           models.BedrockClaude37Sonnet,
700			MaxTokens:       maxTokens,
701			ReasoningEffort: "medium", // Claude models support reasoning
702		}
703		return true
704	}
705
706	return false
707}
708
709// Get returns the current configuration.
710// It's safe to call this function multiple times.
711func Get() *Config {
712	return cfg
713}
714
715// WorkingDirectory returns the current working directory from the configuration.
716func WorkingDirectory() string {
717	if cfg == nil {
718		panic("config not loaded")
719	}
720	return cfg.WorkingDir
721}
722
723func UpdateAgentModel(agentName AgentName, modelID models.ModelID) error {
724	if cfg == nil {
725		panic("config not loaded")
726	}
727
728	existingAgentCfg := cfg.Agents[agentName]
729
730	model, ok := models.SupportedModels[modelID]
731	if !ok {
732		return fmt.Errorf("model %s not supported", modelID)
733	}
734
735	maxTokens := existingAgentCfg.MaxTokens
736	if model.DefaultMaxTokens > 0 {
737		maxTokens = model.DefaultMaxTokens
738	}
739
740	newAgentCfg := Agent{
741		Model:           modelID,
742		MaxTokens:       maxTokens,
743		ReasoningEffort: existingAgentCfg.ReasoningEffort,
744	}
745	cfg.Agents[agentName] = newAgentCfg
746
747	if err := validateAgent(cfg, agentName, newAgentCfg); err != nil {
748		// revert config update on failure
749		cfg.Agents[agentName] = existingAgentCfg
750		return fmt.Errorf("failed to update agent model: %w", err)
751	}
752
753	return nil
754}
755
756// UpdateTheme updates the theme in the configuration and writes it to the config file.
757func UpdateTheme(themeName string) error {
758	if cfg == nil {
759		return fmt.Errorf("config not loaded")
760	}
761
762	// Update the in-memory config
763	cfg.TUI.Theme = themeName
764
765	// Get the config file path
766	configFile := viper.ConfigFileUsed()
767	var configData []byte
768	if configFile == "" {
769		homeDir, err := os.UserHomeDir()
770		if err != nil {
771			return fmt.Errorf("failed to get home directory: %w", err)
772		}
773		configFile = filepath.Join(homeDir, fmt.Sprintf(".%s.json", appName))
774		logging.Info("config file not found, creating new one", "path", configFile)
775		configData = []byte(`{}`)
776	} else {
777		// Read the existing config file
778		data, err := os.ReadFile(configFile)
779		if err != nil {
780			return fmt.Errorf("failed to read config file: %w", err)
781		}
782		configData = data
783	}
784
785	// Parse the JSON
786	var configMap map[string]interface{}
787	if err := json.Unmarshal(configData, &configMap); err != nil {
788		return fmt.Errorf("failed to parse config file: %w", err)
789	}
790
791	// Update just the theme value
792	tuiConfig, ok := configMap["tui"].(map[string]interface{})
793	if !ok {
794		// TUI config doesn't exist yet, create it
795		configMap["tui"] = map[string]interface{}{"theme": themeName}
796	} else {
797		// Update existing TUI config
798		tuiConfig["theme"] = themeName
799		configMap["tui"] = tuiConfig
800	}
801
802	// Write the updated config back to file
803	updatedData, err := json.MarshalIndent(configMap, "", "  ")
804	if err != nil {
805		return fmt.Errorf("failed to marshal config: %w", err)
806	}
807
808	if err := os.WriteFile(configFile, updatedData, 0o644); err != nil {
809		return fmt.Errorf("failed to write config file: %w", err)
810	}
811
812	return nil
813}