config.go

  1// Package config manages application configuration from various sources.
  2package config
  3
  4import (
  5	"fmt"
  6	"log/slog"
  7	"os"
  8	"strings"
  9
 10	"github.com/opencode-ai/opencode/internal/llm/models"
 11	"github.com/opencode-ai/opencode/internal/logging"
 12	"github.com/spf13/viper"
 13)
 14
 15// MCPType defines the type of MCP (Model Control Protocol) server.
 16type MCPType string
 17
 18// Supported MCP types
 19const (
 20	MCPStdio MCPType = "stdio"
 21	MCPSse   MCPType = "sse"
 22)
 23
 24// MCPServer defines the configuration for a Model Control Protocol server.
 25type MCPServer struct {
 26	Command string            `json:"command"`
 27	Env     []string          `json:"env"`
 28	Args    []string          `json:"args"`
 29	Type    MCPType           `json:"type"`
 30	URL     string            `json:"url"`
 31	Headers map[string]string `json:"headers"`
 32}
 33
 34type AgentName string
 35
 36const (
 37	AgentCoder AgentName = "coder"
 38	AgentTask  AgentName = "task"
 39	AgentTitle AgentName = "title"
 40)
 41
 42// Agent defines configuration for different LLM models and their token limits.
 43type Agent struct {
 44	Model           models.ModelID `json:"model"`
 45	MaxTokens       int64          `json:"maxTokens"`
 46	ReasoningEffort string         `json:"reasoningEffort"` // For openai models low,medium,heigh
 47}
 48
 49// Provider defines configuration for an LLM provider.
 50type Provider struct {
 51	APIKey   string `json:"apiKey"`
 52	Disabled bool   `json:"disabled"`
 53}
 54
 55// Data defines storage configuration.
 56type Data struct {
 57	Directory string `json:"directory"`
 58}
 59
 60// LSPConfig defines configuration for Language Server Protocol integration.
 61type LSPConfig struct {
 62	Disabled bool     `json:"enabled"`
 63	Command  string   `json:"command"`
 64	Args     []string `json:"args"`
 65	Options  any      `json:"options"`
 66}
 67
 68// Config is the main configuration structure for the application.
 69type Config struct {
 70	Data         Data                              `json:"data"`
 71	WorkingDir   string                            `json:"wd,omitempty"`
 72	MCPServers   map[string]MCPServer              `json:"mcpServers,omitempty"`
 73	Providers    map[models.ModelProvider]Provider `json:"providers,omitempty"`
 74	LSP          map[string]LSPConfig              `json:"lsp,omitempty"`
 75	Agents       map[AgentName]Agent               `json:"agents"`
 76	Debug        bool                              `json:"debug,omitempty"`
 77	DebugLSP     bool                              `json:"debugLSP,omitempty"`
 78	ContextPaths []string                          `json:"contextPaths,omitempty"`
 79}
 80
 81// Application constants
 82const (
 83	defaultDataDirectory = ".opencode"
 84	defaultLogLevel      = "info"
 85	appName              = "opencode"
 86
 87	MaxTokensFallbackDefault = 4096
 88)
 89
 90var defaultContextPaths = []string{
 91	".github/copilot-instructions.md",
 92	".cursorrules",
 93	".cursor/rules/",
 94	"CLAUDE.md",
 95	"CLAUDE.local.md",
 96	"opencode.md",
 97	"opencode.local.md",
 98	"OpenCode.md",
 99	"OpenCode.local.md",
100	"OPENCODE.md",
101	"OPENCODE.local.md",
102}
103
104// Global configuration instance
105var cfg *Config
106
107// Load initializes the configuration from environment variables and config files.
108// If debug is true, debug mode is enabled and log level is set to debug.
109// It returns an error if configuration loading fails.
110func Load(workingDir string, debug bool) (*Config, error) {
111	if cfg != nil {
112		return cfg, nil
113	}
114
115	cfg = &Config{
116		WorkingDir: workingDir,
117		MCPServers: make(map[string]MCPServer),
118		Providers:  make(map[models.ModelProvider]Provider),
119		LSP:        make(map[string]LSPConfig),
120	}
121
122	configureViper()
123	setDefaults(debug)
124	setProviderDefaults()
125
126	// Read global config
127	if err := readConfig(viper.ReadInConfig()); err != nil {
128		return cfg, err
129	}
130
131	// Load and merge local config
132	mergeLocalConfig(workingDir)
133
134	// Apply configuration to the struct
135	if err := viper.Unmarshal(cfg); err != nil {
136		return cfg, fmt.Errorf("failed to unmarshal config: %w", err)
137	}
138
139	applyDefaultValues()
140	defaultLevel := slog.LevelInfo
141	if cfg.Debug {
142		defaultLevel = slog.LevelDebug
143	}
144	if os.Getenv("OPENCODE_DEV_DEBUG") == "true" {
145		loggingFile := fmt.Sprintf("%s/%s", cfg.Data.Directory, "debug.log")
146
147		// if file does not exist create it
148		if _, err := os.Stat(loggingFile); os.IsNotExist(err) {
149			if err := os.MkdirAll(cfg.Data.Directory, 0o755); err != nil {
150				return cfg, fmt.Errorf("failed to create directory: %w", err)
151			}
152			if _, err := os.Create(loggingFile); err != nil {
153				return cfg, fmt.Errorf("failed to create log file: %w", err)
154			}
155		}
156
157		sloggingFileWriter, err := os.OpenFile(loggingFile, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0o666)
158		if err != nil {
159			return cfg, fmt.Errorf("failed to open log file: %w", err)
160		}
161		// Configure logger
162		logger := slog.New(slog.NewTextHandler(sloggingFileWriter, &slog.HandlerOptions{
163			Level: defaultLevel,
164		}))
165		slog.SetDefault(logger)
166	} else {
167		// Configure logger
168		logger := slog.New(slog.NewTextHandler(logging.NewWriter(), &slog.HandlerOptions{
169			Level: defaultLevel,
170		}))
171		slog.SetDefault(logger)
172	}
173
174	// Validate configuration
175	if err := Validate(); err != nil {
176		return cfg, fmt.Errorf("config validation failed: %w", err)
177	}
178
179	if cfg.Agents == nil {
180		cfg.Agents = make(map[AgentName]Agent)
181	}
182
183	// Override the max tokens for title agent
184	cfg.Agents[AgentTitle] = Agent{
185		Model:     cfg.Agents[AgentTitle].Model,
186		MaxTokens: 80,
187	}
188	return cfg, nil
189}
190
191// configureViper sets up viper's configuration paths and environment variables.
192func configureViper() {
193	viper.SetConfigName(fmt.Sprintf(".%s", appName))
194	viper.SetConfigType("json")
195	viper.AddConfigPath("$HOME")
196	viper.AddConfigPath(fmt.Sprintf("$XDG_CONFIG_HOME/%s", appName))
197	viper.AddConfigPath(fmt.Sprintf("$HOME/.config/%s", appName))
198	viper.SetEnvPrefix(strings.ToUpper(appName))
199	viper.AutomaticEnv()
200}
201
202// setDefaults configures default values for configuration options.
203func setDefaults(debug bool) {
204	viper.SetDefault("data.directory", defaultDataDirectory)
205	viper.SetDefault("contextPaths", defaultContextPaths)
206
207	if debug {
208		viper.SetDefault("debug", true)
209		viper.Set("log.level", "debug")
210	} else {
211		viper.SetDefault("debug", false)
212		viper.SetDefault("log.level", defaultLogLevel)
213	}
214}
215
216// setProviderDefaults configures LLM provider defaults based on environment variables.
217func setProviderDefaults() {
218	// Set all API keys we can find in the environment
219	if apiKey := os.Getenv("ANTHROPIC_API_KEY"); apiKey != "" {
220		viper.SetDefault("providers.anthropic.apiKey", apiKey)
221	}
222	if apiKey := os.Getenv("OPENAI_API_KEY"); apiKey != "" {
223		viper.SetDefault("providers.openai.apiKey", apiKey)
224	}
225	if apiKey := os.Getenv("GEMINI_API_KEY"); apiKey != "" {
226		viper.SetDefault("providers.gemini.apiKey", apiKey)
227	}
228	if apiKey := os.Getenv("GROQ_API_KEY"); apiKey != "" {
229		viper.SetDefault("providers.groq.apiKey", apiKey)
230	}
231
232	// Use this order to set the default models
233	// 1. Anthropic
234	// 2. OpenAI
235	// 3. Google Gemini
236	// 4. Groq
237	// 5. AWS Bedrock
238	// Anthropic configuration
239	if apiKey := os.Getenv("ANTHROPIC_API_KEY"); apiKey != "" {
240		viper.SetDefault("agents.coder.model", models.Claude37Sonnet)
241		viper.SetDefault("agents.task.model", models.Claude37Sonnet)
242		viper.SetDefault("agents.title.model", models.Claude37Sonnet)
243		return
244	}
245
246	// OpenAI configuration
247	if apiKey := os.Getenv("OPENAI_API_KEY"); apiKey != "" {
248		viper.SetDefault("agents.coder.model", models.GPT41)
249		viper.SetDefault("agents.task.model", models.GPT41Mini)
250		viper.SetDefault("agents.title.model", models.GPT41Mini)
251		return
252	}
253
254	// Google Gemini configuration
255	if apiKey := os.Getenv("GEMINI_API_KEY"); apiKey != "" {
256		viper.SetDefault("agents.coder.model", models.Gemini25)
257		viper.SetDefault("agents.task.model", models.Gemini25Flash)
258		viper.SetDefault("agents.title.model", models.Gemini25Flash)
259		return
260	}
261
262	// Groq configuration
263	if apiKey := os.Getenv("GROQ_API_KEY"); apiKey != "" {
264		viper.SetDefault("agents.coder.model", models.QWENQwq)
265		viper.SetDefault("agents.task.model", models.QWENQwq)
266		viper.SetDefault("agents.title.model", models.QWENQwq)
267		return
268	}
269
270	// AWS Bedrock configuration
271	if hasAWSCredentials() {
272		viper.SetDefault("agents.coder.model", models.BedrockClaude37Sonnet)
273		viper.SetDefault("agents.task.model", models.BedrockClaude37Sonnet)
274		viper.SetDefault("agents.title.model", models.BedrockClaude37Sonnet)
275		return
276	}
277
278	if os.Getenv("AZURE_OPENAI_ENDPOINT") != "" {
279		// api-key may be empty when using Entra ID credentials – that's okay
280		viper.SetDefault("providers.azure.apiKey", os.Getenv("AZURE_OPENAI_API_KEY"))
281		viper.SetDefault("agents.coder.model", models.AzureGPT41)
282		viper.SetDefault("agents.task.model", models.AzureGPT41Mini)
283		viper.SetDefault("agents.title.model", models.AzureGPT41Mini)
284		return
285	}
286}
287
288// hasAWSCredentials checks if AWS credentials are available in the environment.
289func hasAWSCredentials() bool {
290	// Check for explicit AWS credentials
291	if os.Getenv("AWS_ACCESS_KEY_ID") != "" && os.Getenv("AWS_SECRET_ACCESS_KEY") != "" {
292		return true
293	}
294
295	// Check for AWS profile
296	if os.Getenv("AWS_PROFILE") != "" || os.Getenv("AWS_DEFAULT_PROFILE") != "" {
297		return true
298	}
299
300	// Check for AWS region
301	if os.Getenv("AWS_REGION") != "" || os.Getenv("AWS_DEFAULT_REGION") != "" {
302		return true
303	}
304
305	// Check if running on EC2 with instance profile
306	if os.Getenv("AWS_CONTAINER_CREDENTIALS_RELATIVE_URI") != "" ||
307		os.Getenv("AWS_CONTAINER_CREDENTIALS_FULL_URI") != "" {
308		return true
309	}
310
311	return false
312}
313
314// readConfig handles the result of reading a configuration file.
315func readConfig(err error) error {
316	if err == nil {
317		return nil
318	}
319
320	// It's okay if the config file doesn't exist
321	if _, ok := err.(viper.ConfigFileNotFoundError); ok {
322		return nil
323	}
324
325	return fmt.Errorf("failed to read config: %w", err)
326}
327
328// mergeLocalConfig loads and merges configuration from the local directory.
329func mergeLocalConfig(workingDir string) {
330	local := viper.New()
331	local.SetConfigName(fmt.Sprintf(".%s", appName))
332	local.SetConfigType("json")
333	local.AddConfigPath(workingDir)
334
335	// Merge local config if it exists
336	if err := local.ReadInConfig(); err == nil {
337		viper.MergeConfigMap(local.AllSettings())
338	}
339}
340
341// applyDefaultValues sets default values for configuration fields that need processing.
342func applyDefaultValues() {
343	// Set default MCP type if not specified
344	for k, v := range cfg.MCPServers {
345		if v.Type == "" {
346			v.Type = MCPStdio
347			cfg.MCPServers[k] = v
348		}
349	}
350}
351
352// It validates model IDs and providers, ensuring they are supported.
353func validateAgent(cfg *Config, name AgentName, agent Agent) error {
354	// Check if model exists
355	model, modelExists := models.SupportedModels[agent.Model]
356	if !modelExists {
357		logging.Warn("unsupported model configured, reverting to default",
358			"agent", name,
359			"configured_model", agent.Model)
360
361		// Set default model based on available providers
362		if setDefaultModelForAgent(name) {
363			logging.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
364		} else {
365			return fmt.Errorf("no valid provider available for agent %s", name)
366		}
367		return nil
368	}
369
370	// Check if provider for the model is configured
371	provider := model.Provider
372	providerCfg, providerExists := cfg.Providers[provider]
373
374	if !providerExists {
375		// Provider not configured, check if we have environment variables
376		apiKey := getProviderAPIKey(provider)
377		if apiKey == "" {
378			logging.Warn("provider not configured for model, reverting to default",
379				"agent", name,
380				"model", agent.Model,
381				"provider", provider)
382
383			// Set default model based on available providers
384			if setDefaultModelForAgent(name) {
385				logging.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
386			} else {
387				return fmt.Errorf("no valid provider available for agent %s", name)
388			}
389		} else {
390			// Add provider with API key from environment
391			cfg.Providers[provider] = Provider{
392				APIKey: apiKey,
393			}
394			logging.Info("added provider from environment", "provider", provider)
395		}
396	} else if providerCfg.Disabled || providerCfg.APIKey == "" {
397		// Provider is disabled or has no API key
398		logging.Warn("provider is disabled or has no API key, reverting to default",
399			"agent", name,
400			"model", agent.Model,
401			"provider", provider)
402
403		// Set default model based on available providers
404		if setDefaultModelForAgent(name) {
405			logging.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
406		} else {
407			return fmt.Errorf("no valid provider available for agent %s", name)
408		}
409	}
410
411	// Validate max tokens
412	if agent.MaxTokens <= 0 {
413		logging.Warn("invalid max tokens, setting to default",
414			"agent", name,
415			"model", agent.Model,
416			"max_tokens", agent.MaxTokens)
417
418		// Update the agent with default max tokens
419		updatedAgent := cfg.Agents[name]
420		if model.DefaultMaxTokens > 0 {
421			updatedAgent.MaxTokens = model.DefaultMaxTokens
422		} else {
423			updatedAgent.MaxTokens = MaxTokensFallbackDefault
424		}
425		cfg.Agents[name] = updatedAgent
426	} else if model.ContextWindow > 0 && agent.MaxTokens > model.ContextWindow/2 {
427		// Ensure max tokens doesn't exceed half the context window (reasonable limit)
428		logging.Warn("max tokens exceeds half the context window, adjusting",
429			"agent", name,
430			"model", agent.Model,
431			"max_tokens", agent.MaxTokens,
432			"context_window", model.ContextWindow)
433
434		// Update the agent with adjusted max tokens
435		updatedAgent := cfg.Agents[name]
436		updatedAgent.MaxTokens = model.ContextWindow / 2
437		cfg.Agents[name] = updatedAgent
438	}
439
440	// Validate reasoning effort for models that support reasoning
441	if model.CanReason && provider == models.ProviderOpenAI {
442		if agent.ReasoningEffort == "" {
443			// Set default reasoning effort for models that support it
444			logging.Info("setting default reasoning effort for model that supports reasoning",
445				"agent", name,
446				"model", agent.Model)
447
448			// Update the agent with default reasoning effort
449			updatedAgent := cfg.Agents[name]
450			updatedAgent.ReasoningEffort = "medium"
451			cfg.Agents[name] = updatedAgent
452		} else {
453			// Check if reasoning effort is valid (low, medium, high)
454			effort := strings.ToLower(agent.ReasoningEffort)
455			if effort != "low" && effort != "medium" && effort != "high" {
456				logging.Warn("invalid reasoning effort, setting to medium",
457					"agent", name,
458					"model", agent.Model,
459					"reasoning_effort", agent.ReasoningEffort)
460
461				// Update the agent with valid reasoning effort
462				updatedAgent := cfg.Agents[name]
463				updatedAgent.ReasoningEffort = "medium"
464				cfg.Agents[name] = updatedAgent
465			}
466		}
467	} else if !model.CanReason && agent.ReasoningEffort != "" {
468		// Model doesn't support reasoning but reasoning effort is set
469		logging.Warn("model doesn't support reasoning but reasoning effort is set, ignoring",
470			"agent", name,
471			"model", agent.Model,
472			"reasoning_effort", agent.ReasoningEffort)
473
474		// Update the agent to remove reasoning effort
475		updatedAgent := cfg.Agents[name]
476		updatedAgent.ReasoningEffort = ""
477		cfg.Agents[name] = updatedAgent
478	}
479
480	return nil
481}
482
483// Validate checks if the configuration is valid and applies defaults where needed.
484func Validate() error {
485	if cfg == nil {
486		return fmt.Errorf("config not loaded")
487	}
488
489	// Validate agent models
490	for name, agent := range cfg.Agents {
491		if err := validateAgent(cfg, name, agent); err != nil {
492			return err
493		}
494	}
495
496	// Validate providers
497	for provider, providerCfg := range cfg.Providers {
498		if providerCfg.APIKey == "" && !providerCfg.Disabled {
499			logging.Warn("provider has no API key, marking as disabled", "provider", provider)
500			providerCfg.Disabled = true
501			cfg.Providers[provider] = providerCfg
502		}
503	}
504
505	// Validate LSP configurations
506	for language, lspConfig := range cfg.LSP {
507		if lspConfig.Command == "" && !lspConfig.Disabled {
508			logging.Warn("LSP configuration has no command, marking as disabled", "language", language)
509			lspConfig.Disabled = true
510			cfg.LSP[language] = lspConfig
511		}
512	}
513
514	return nil
515}
516
517// getProviderAPIKey gets the API key for a provider from environment variables
518func getProviderAPIKey(provider models.ModelProvider) string {
519	switch provider {
520	case models.ProviderAnthropic:
521		return os.Getenv("ANTHROPIC_API_KEY")
522	case models.ProviderOpenAI:
523		return os.Getenv("OPENAI_API_KEY")
524	case models.ProviderGemini:
525		return os.Getenv("GEMINI_API_KEY")
526	case models.ProviderGROQ:
527		return os.Getenv("GROQ_API_KEY")
528	case models.ProviderAzure:
529		return os.Getenv("AZURE_OPENAI_API_KEY")
530	case models.ProviderBedrock:
531		if hasAWSCredentials() {
532			return "aws-credentials-available"
533		}
534	}
535	return ""
536}
537
538// setDefaultModelForAgent sets a default model for an agent based on available providers
539func setDefaultModelForAgent(agent AgentName) bool {
540	// Check providers in order of preference
541	if apiKey := os.Getenv("ANTHROPIC_API_KEY"); apiKey != "" {
542		maxTokens := int64(5000)
543		if agent == AgentTitle {
544			maxTokens = 80
545		}
546		cfg.Agents[agent] = Agent{
547			Model:     models.Claude37Sonnet,
548			MaxTokens: maxTokens,
549		}
550		return true
551	}
552
553	if apiKey := os.Getenv("OPENAI_API_KEY"); apiKey != "" {
554		var model models.ModelID
555		maxTokens := int64(5000)
556		reasoningEffort := ""
557
558		switch agent {
559		case AgentTitle:
560			model = models.GPT41Mini
561			maxTokens = 80
562		case AgentTask:
563			model = models.GPT41Mini
564		default:
565			model = models.GPT41
566		}
567
568		// Check if model supports reasoning
569		if modelInfo, ok := models.SupportedModels[model]; ok && modelInfo.CanReason {
570			reasoningEffort = "medium"
571		}
572
573		cfg.Agents[agent] = Agent{
574			Model:           model,
575			MaxTokens:       maxTokens,
576			ReasoningEffort: reasoningEffort,
577		}
578		return true
579	}
580
581	if apiKey := os.Getenv("GEMINI_API_KEY"); apiKey != "" {
582		var model models.ModelID
583		maxTokens := int64(5000)
584
585		if agent == AgentTitle {
586			model = models.Gemini25Flash
587			maxTokens = 80
588		} else {
589			model = models.Gemini25
590		}
591
592		cfg.Agents[agent] = Agent{
593			Model:     model,
594			MaxTokens: maxTokens,
595		}
596		return true
597	}
598
599	if apiKey := os.Getenv("GROQ_API_KEY"); apiKey != "" {
600		maxTokens := int64(5000)
601		if agent == AgentTitle {
602			maxTokens = 80
603		}
604
605		cfg.Agents[agent] = Agent{
606			Model:     models.QWENQwq,
607			MaxTokens: maxTokens,
608		}
609		return true
610	}
611
612	if hasAWSCredentials() {
613		maxTokens := int64(5000)
614		if agent == AgentTitle {
615			maxTokens = 80
616		}
617
618		cfg.Agents[agent] = Agent{
619			Model:           models.BedrockClaude37Sonnet,
620			MaxTokens:       maxTokens,
621			ReasoningEffort: "medium", // Claude models support reasoning
622		}
623		return true
624	}
625
626	return false
627}
628
629// Get returns the current configuration.
630// It's safe to call this function multiple times.
631func Get() *Config {
632	return cfg
633}
634
635// WorkingDirectory returns the current working directory from the configuration.
636func WorkingDirectory() string {
637	if cfg == nil {
638		panic("config not loaded")
639	}
640	return cfg.WorkingDir
641}
642
643func UpdateAgentModel(agentName AgentName, modelID models.ModelID) error {
644	if cfg == nil {
645		panic("config not loaded")
646	}
647
648	existingAgentCfg := cfg.Agents[agentName]
649
650	model, ok := models.SupportedModels[modelID]
651	if !ok {
652		return fmt.Errorf("model %s not supported", modelID)
653	}
654
655	maxTokens := existingAgentCfg.MaxTokens
656	if model.DefaultMaxTokens > 0 {
657		maxTokens = model.DefaultMaxTokens
658	}
659
660	newAgentCfg := Agent{
661		Model:           modelID,
662		MaxTokens:       maxTokens,
663		ReasoningEffort: existingAgentCfg.ReasoningEffort,
664	}
665	cfg.Agents[agentName] = newAgentCfg
666
667	if err := validateAgent(cfg, agentName, newAgentCfg); err != nil {
668		// revert config update on failure
669		cfg.Agents[agentName] = existingAgentCfg
670		return fmt.Errorf("failed to update agent model: %w", err)
671	}
672
673	return nil
674}