1// Package config manages application configuration from various sources.
2package config
3
4import (
5 "fmt"
6 "log/slog"
7 "os"
8 "strings"
9
10 "github.com/opencode-ai/opencode/internal/llm/models"
11 "github.com/opencode-ai/opencode/internal/logging"
12 "github.com/spf13/viper"
13)
14
15// MCPType defines the type of MCP (Model Control Protocol) server.
16type MCPType string
17
18// Supported MCP types
19const (
20 MCPStdio MCPType = "stdio"
21 MCPSse MCPType = "sse"
22)
23
24// MCPServer defines the configuration for a Model Control Protocol server.
25type MCPServer struct {
26 Command string `json:"command"`
27 Env []string `json:"env"`
28 Args []string `json:"args"`
29 Type MCPType `json:"type"`
30 URL string `json:"url"`
31 Headers map[string]string `json:"headers"`
32}
33
34type AgentName string
35
36const (
37 AgentCoder AgentName = "coder"
38 AgentTask AgentName = "task"
39 AgentTitle AgentName = "title"
40)
41
42// Agent defines configuration for different LLM models and their token limits.
43type Agent struct {
44 Model models.ModelID `json:"model"`
45 MaxTokens int64 `json:"maxTokens"`
46 ReasoningEffort string `json:"reasoningEffort"` // For openai models low,medium,heigh
47}
48
49// Provider defines configuration for an LLM provider.
50type Provider struct {
51 APIKey string `json:"apiKey"`
52 Disabled bool `json:"disabled"`
53}
54
55// Data defines storage configuration.
56type Data struct {
57 Directory string `json:"directory"`
58}
59
60// LSPConfig defines configuration for Language Server Protocol integration.
61type LSPConfig struct {
62 Disabled bool `json:"enabled"`
63 Command string `json:"command"`
64 Args []string `json:"args"`
65 Options any `json:"options"`
66}
67
68// Config is the main configuration structure for the application.
69type Config struct {
70 Data Data `json:"data"`
71 WorkingDir string `json:"wd,omitempty"`
72 MCPServers map[string]MCPServer `json:"mcpServers,omitempty"`
73 Providers map[models.ModelProvider]Provider `json:"providers,omitempty"`
74 LSP map[string]LSPConfig `json:"lsp,omitempty"`
75 Agents map[AgentName]Agent `json:"agents"`
76 Debug bool `json:"debug,omitempty"`
77 DebugLSP bool `json:"debugLSP,omitempty"`
78 ContextPaths []string `json:"contextPaths,omitempty"`
79}
80
81// Application constants
82const (
83 defaultDataDirectory = ".opencode"
84 defaultLogLevel = "info"
85 appName = "opencode"
86
87 MaxTokensFallbackDefault = 4096
88)
89
90var defaultContextPaths = []string{
91 ".github/copilot-instructions.md",
92 ".cursorrules",
93 ".cursor/rules/",
94 "CLAUDE.md",
95 "CLAUDE.local.md",
96 "opencode.md",
97 "opencode.local.md",
98 "OpenCode.md",
99 "OpenCode.local.md",
100 "OPENCODE.md",
101 "OPENCODE.local.md",
102}
103
104// Global configuration instance
105var cfg *Config
106
107// Load initializes the configuration from environment variables and config files.
108// If debug is true, debug mode is enabled and log level is set to debug.
109// It returns an error if configuration loading fails.
110func Load(workingDir string, debug bool) (*Config, error) {
111 if cfg != nil {
112 return cfg, nil
113 }
114
115 cfg = &Config{
116 WorkingDir: workingDir,
117 MCPServers: make(map[string]MCPServer),
118 Providers: make(map[models.ModelProvider]Provider),
119 LSP: make(map[string]LSPConfig),
120 }
121
122 configureViper()
123 setDefaults(debug)
124 setProviderDefaults()
125
126 // Read global config
127 if err := readConfig(viper.ReadInConfig()); err != nil {
128 return cfg, err
129 }
130
131 // Load and merge local config
132 mergeLocalConfig(workingDir)
133
134 // Apply configuration to the struct
135 if err := viper.Unmarshal(cfg); err != nil {
136 return cfg, fmt.Errorf("failed to unmarshal config: %w", err)
137 }
138
139 applyDefaultValues()
140 defaultLevel := slog.LevelInfo
141 if cfg.Debug {
142 defaultLevel = slog.LevelDebug
143 }
144 if os.Getenv("OPENCODE_DEV_DEBUG") == "true" {
145 loggingFile := fmt.Sprintf("%s/%s", cfg.Data.Directory, "debug.log")
146
147 // if file does not exist create it
148 if _, err := os.Stat(loggingFile); os.IsNotExist(err) {
149 if err := os.MkdirAll(cfg.Data.Directory, 0o755); err != nil {
150 return cfg, fmt.Errorf("failed to create directory: %w", err)
151 }
152 if _, err := os.Create(loggingFile); err != nil {
153 return cfg, fmt.Errorf("failed to create log file: %w", err)
154 }
155 }
156
157 sloggingFileWriter, err := os.OpenFile(loggingFile, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0o666)
158 if err != nil {
159 return cfg, fmt.Errorf("failed to open log file: %w", err)
160 }
161 // Configure logger
162 logger := slog.New(slog.NewTextHandler(sloggingFileWriter, &slog.HandlerOptions{
163 Level: defaultLevel,
164 }))
165 slog.SetDefault(logger)
166 } else {
167 // Configure logger
168 logger := slog.New(slog.NewTextHandler(logging.NewWriter(), &slog.HandlerOptions{
169 Level: defaultLevel,
170 }))
171 slog.SetDefault(logger)
172 }
173
174 // Validate configuration
175 if err := Validate(); err != nil {
176 return cfg, fmt.Errorf("config validation failed: %w", err)
177 }
178
179 if cfg.Agents == nil {
180 cfg.Agents = make(map[AgentName]Agent)
181 }
182
183 // Override the max tokens for title agent
184 cfg.Agents[AgentTitle] = Agent{
185 Model: cfg.Agents[AgentTitle].Model,
186 MaxTokens: 80,
187 }
188 return cfg, nil
189}
190
191// configureViper sets up viper's configuration paths and environment variables.
192func configureViper() {
193 viper.SetConfigName(fmt.Sprintf(".%s", appName))
194 viper.SetConfigType("json")
195 viper.AddConfigPath("$HOME")
196 viper.AddConfigPath(fmt.Sprintf("$XDG_CONFIG_HOME/%s", appName))
197 viper.AddConfigPath(fmt.Sprintf("$HOME/.config/%s", appName))
198 viper.SetEnvPrefix(strings.ToUpper(appName))
199 viper.AutomaticEnv()
200}
201
202// setDefaults configures default values for configuration options.
203func setDefaults(debug bool) {
204 viper.SetDefault("data.directory", defaultDataDirectory)
205 viper.SetDefault("contextPaths", defaultContextPaths)
206
207 if debug {
208 viper.SetDefault("debug", true)
209 viper.Set("log.level", "debug")
210 } else {
211 viper.SetDefault("debug", false)
212 viper.SetDefault("log.level", defaultLogLevel)
213 }
214}
215
216// setProviderDefaults configures LLM provider defaults based on environment variables.
217func setProviderDefaults() {
218 // Set all API keys we can find in the environment
219 if apiKey := os.Getenv("ANTHROPIC_API_KEY"); apiKey != "" {
220 viper.SetDefault("providers.anthropic.apiKey", apiKey)
221 }
222 if apiKey := os.Getenv("OPENAI_API_KEY"); apiKey != "" {
223 viper.SetDefault("providers.openai.apiKey", apiKey)
224 }
225 if apiKey := os.Getenv("GEMINI_API_KEY"); apiKey != "" {
226 viper.SetDefault("providers.gemini.apiKey", apiKey)
227 }
228 if apiKey := os.Getenv("GROQ_API_KEY"); apiKey != "" {
229 viper.SetDefault("providers.groq.apiKey", apiKey)
230 }
231 if apiKey := os.Getenv("OPENROUTER_API_KEY"); apiKey != "" {
232 viper.SetDefault("providers.openrouter.apiKey", apiKey)
233 }
234
235 // Use this order to set the default models
236 // 1. Anthropic
237 // 2. OpenAI
238 // 3. Google Gemini
239 // 4. Groq
240 // 5. AWS Bedrock
241 // Anthropic configuration
242 if apiKey := os.Getenv("ANTHROPIC_API_KEY"); apiKey != "" {
243 viper.SetDefault("agents.coder.model", models.Claude37Sonnet)
244 viper.SetDefault("agents.task.model", models.Claude37Sonnet)
245 viper.SetDefault("agents.title.model", models.Claude37Sonnet)
246 return
247 }
248
249 // OpenAI configuration
250 if apiKey := os.Getenv("OPENAI_API_KEY"); apiKey != "" {
251 viper.SetDefault("agents.coder.model", models.GPT41)
252 viper.SetDefault("agents.task.model", models.GPT41Mini)
253 viper.SetDefault("agents.title.model", models.GPT41Mini)
254 return
255 }
256
257 // Google Gemini configuration
258 if apiKey := os.Getenv("GEMINI_API_KEY"); apiKey != "" {
259 viper.SetDefault("agents.coder.model", models.Gemini25)
260 viper.SetDefault("agents.task.model", models.Gemini25Flash)
261 viper.SetDefault("agents.title.model", models.Gemini25Flash)
262 return
263 }
264
265 // Groq configuration
266 if apiKey := os.Getenv("GROQ_API_KEY"); apiKey != "" {
267 viper.SetDefault("agents.coder.model", models.QWENQwq)
268 viper.SetDefault("agents.task.model", models.QWENQwq)
269 viper.SetDefault("agents.title.model", models.QWENQwq)
270 return
271 }
272
273 // OpenRouter configuration
274 if apiKey := os.Getenv("OPENROUTER_API_KEY"); apiKey != "" {
275 viper.SetDefault("providers.openrouter.apiKey", apiKey)
276 viper.SetDefault("agents.coder.model", models.OpenRouterClaude37Sonnet)
277 viper.SetDefault("agents.task.model", models.OpenRouterClaude37Sonnet)
278 viper.SetDefault("agents.title.model", models.OpenRouterClaude35Haiku)
279 return
280 }
281
282 // AWS Bedrock configuration
283 if hasAWSCredentials() {
284 viper.SetDefault("agents.coder.model", models.BedrockClaude37Sonnet)
285 viper.SetDefault("agents.task.model", models.BedrockClaude37Sonnet)
286 viper.SetDefault("agents.title.model", models.BedrockClaude37Sonnet)
287 return
288 }
289
290 if os.Getenv("AZURE_OPENAI_ENDPOINT") != "" {
291 // api-key may be empty when using Entra ID credentials – that's okay
292 viper.SetDefault("providers.azure.apiKey", os.Getenv("AZURE_OPENAI_API_KEY"))
293 viper.SetDefault("agents.coder.model", models.AzureGPT41)
294 viper.SetDefault("agents.task.model", models.AzureGPT41Mini)
295 viper.SetDefault("agents.title.model", models.AzureGPT41Mini)
296 return
297 }
298}
299
300// hasAWSCredentials checks if AWS credentials are available in the environment.
301func hasAWSCredentials() bool {
302 // Check for explicit AWS credentials
303 if os.Getenv("AWS_ACCESS_KEY_ID") != "" && os.Getenv("AWS_SECRET_ACCESS_KEY") != "" {
304 return true
305 }
306
307 // Check for AWS profile
308 if os.Getenv("AWS_PROFILE") != "" || os.Getenv("AWS_DEFAULT_PROFILE") != "" {
309 return true
310 }
311
312 // Check for AWS region
313 if os.Getenv("AWS_REGION") != "" || os.Getenv("AWS_DEFAULT_REGION") != "" {
314 return true
315 }
316
317 // Check if running on EC2 with instance profile
318 if os.Getenv("AWS_CONTAINER_CREDENTIALS_RELATIVE_URI") != "" ||
319 os.Getenv("AWS_CONTAINER_CREDENTIALS_FULL_URI") != "" {
320 return true
321 }
322
323 return false
324}
325
326// readConfig handles the result of reading a configuration file.
327func readConfig(err error) error {
328 if err == nil {
329 return nil
330 }
331
332 // It's okay if the config file doesn't exist
333 if _, ok := err.(viper.ConfigFileNotFoundError); ok {
334 return nil
335 }
336
337 return fmt.Errorf("failed to read config: %w", err)
338}
339
340// mergeLocalConfig loads and merges configuration from the local directory.
341func mergeLocalConfig(workingDir string) {
342 local := viper.New()
343 local.SetConfigName(fmt.Sprintf(".%s", appName))
344 local.SetConfigType("json")
345 local.AddConfigPath(workingDir)
346
347 // Merge local config if it exists
348 if err := local.ReadInConfig(); err == nil {
349 viper.MergeConfigMap(local.AllSettings())
350 }
351}
352
353// applyDefaultValues sets default values for configuration fields that need processing.
354func applyDefaultValues() {
355 // Set default MCP type if not specified
356 for k, v := range cfg.MCPServers {
357 if v.Type == "" {
358 v.Type = MCPStdio
359 cfg.MCPServers[k] = v
360 }
361 }
362}
363
364// It validates model IDs and providers, ensuring they are supported.
365func validateAgent(cfg *Config, name AgentName, agent Agent) error {
366 // Check if model exists
367 model, modelExists := models.SupportedModels[agent.Model]
368 if !modelExists {
369 logging.Warn("unsupported model configured, reverting to default",
370 "agent", name,
371 "configured_model", agent.Model)
372
373 // Set default model based on available providers
374 if setDefaultModelForAgent(name) {
375 logging.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
376 } else {
377 return fmt.Errorf("no valid provider available for agent %s", name)
378 }
379 return nil
380 }
381
382 // Check if provider for the model is configured
383 provider := model.Provider
384 providerCfg, providerExists := cfg.Providers[provider]
385
386 if !providerExists {
387 // Provider not configured, check if we have environment variables
388 apiKey := getProviderAPIKey(provider)
389 if apiKey == "" {
390 logging.Warn("provider not configured for model, reverting to default",
391 "agent", name,
392 "model", agent.Model,
393 "provider", provider)
394
395 // Set default model based on available providers
396 if setDefaultModelForAgent(name) {
397 logging.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
398 } else {
399 return fmt.Errorf("no valid provider available for agent %s", name)
400 }
401 } else {
402 // Add provider with API key from environment
403 cfg.Providers[provider] = Provider{
404 APIKey: apiKey,
405 }
406 logging.Info("added provider from environment", "provider", provider)
407 }
408 } else if providerCfg.Disabled || providerCfg.APIKey == "" {
409 // Provider is disabled or has no API key
410 logging.Warn("provider is disabled or has no API key, reverting to default",
411 "agent", name,
412 "model", agent.Model,
413 "provider", provider)
414
415 // Set default model based on available providers
416 if setDefaultModelForAgent(name) {
417 logging.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
418 } else {
419 return fmt.Errorf("no valid provider available for agent %s", name)
420 }
421 }
422
423 // Validate max tokens
424 if agent.MaxTokens <= 0 {
425 logging.Warn("invalid max tokens, setting to default",
426 "agent", name,
427 "model", agent.Model,
428 "max_tokens", agent.MaxTokens)
429
430 // Update the agent with default max tokens
431 updatedAgent := cfg.Agents[name]
432 if model.DefaultMaxTokens > 0 {
433 updatedAgent.MaxTokens = model.DefaultMaxTokens
434 } else {
435 updatedAgent.MaxTokens = MaxTokensFallbackDefault
436 }
437 cfg.Agents[name] = updatedAgent
438 } else if model.ContextWindow > 0 && agent.MaxTokens > model.ContextWindow/2 {
439 // Ensure max tokens doesn't exceed half the context window (reasonable limit)
440 logging.Warn("max tokens exceeds half the context window, adjusting",
441 "agent", name,
442 "model", agent.Model,
443 "max_tokens", agent.MaxTokens,
444 "context_window", model.ContextWindow)
445
446 // Update the agent with adjusted max tokens
447 updatedAgent := cfg.Agents[name]
448 updatedAgent.MaxTokens = model.ContextWindow / 2
449 cfg.Agents[name] = updatedAgent
450 }
451
452 // Validate reasoning effort for models that support reasoning
453 if model.CanReason && provider == models.ProviderOpenAI {
454 if agent.ReasoningEffort == "" {
455 // Set default reasoning effort for models that support it
456 logging.Info("setting default reasoning effort for model that supports reasoning",
457 "agent", name,
458 "model", agent.Model)
459
460 // Update the agent with default reasoning effort
461 updatedAgent := cfg.Agents[name]
462 updatedAgent.ReasoningEffort = "medium"
463 cfg.Agents[name] = updatedAgent
464 } else {
465 // Check if reasoning effort is valid (low, medium, high)
466 effort := strings.ToLower(agent.ReasoningEffort)
467 if effort != "low" && effort != "medium" && effort != "high" {
468 logging.Warn("invalid reasoning effort, setting to medium",
469 "agent", name,
470 "model", agent.Model,
471 "reasoning_effort", agent.ReasoningEffort)
472
473 // Update the agent with valid reasoning effort
474 updatedAgent := cfg.Agents[name]
475 updatedAgent.ReasoningEffort = "medium"
476 cfg.Agents[name] = updatedAgent
477 }
478 }
479 } else if !model.CanReason && agent.ReasoningEffort != "" {
480 // Model doesn't support reasoning but reasoning effort is set
481 logging.Warn("model doesn't support reasoning but reasoning effort is set, ignoring",
482 "agent", name,
483 "model", agent.Model,
484 "reasoning_effort", agent.ReasoningEffort)
485
486 // Update the agent to remove reasoning effort
487 updatedAgent := cfg.Agents[name]
488 updatedAgent.ReasoningEffort = ""
489 cfg.Agents[name] = updatedAgent
490 }
491
492 return nil
493}
494
495// Validate checks if the configuration is valid and applies defaults where needed.
496func Validate() error {
497 if cfg == nil {
498 return fmt.Errorf("config not loaded")
499 }
500
501 // Validate agent models
502 for name, agent := range cfg.Agents {
503 if err := validateAgent(cfg, name, agent); err != nil {
504 return err
505 }
506 }
507
508 // Validate providers
509 for provider, providerCfg := range cfg.Providers {
510 if providerCfg.APIKey == "" && !providerCfg.Disabled {
511 logging.Warn("provider has no API key, marking as disabled", "provider", provider)
512 providerCfg.Disabled = true
513 cfg.Providers[provider] = providerCfg
514 }
515 }
516
517 // Validate LSP configurations
518 for language, lspConfig := range cfg.LSP {
519 if lspConfig.Command == "" && !lspConfig.Disabled {
520 logging.Warn("LSP configuration has no command, marking as disabled", "language", language)
521 lspConfig.Disabled = true
522 cfg.LSP[language] = lspConfig
523 }
524 }
525
526 return nil
527}
528
529// getProviderAPIKey gets the API key for a provider from environment variables
530func getProviderAPIKey(provider models.ModelProvider) string {
531 switch provider {
532 case models.ProviderAnthropic:
533 return os.Getenv("ANTHROPIC_API_KEY")
534 case models.ProviderOpenAI:
535 return os.Getenv("OPENAI_API_KEY")
536 case models.ProviderGemini:
537 return os.Getenv("GEMINI_API_KEY")
538 case models.ProviderGROQ:
539 return os.Getenv("GROQ_API_KEY")
540 case models.ProviderAzure:
541 return os.Getenv("AZURE_OPENAI_API_KEY")
542 case models.ProviderOpenRouter:
543 return os.Getenv("OPENROUTER_API_KEY")
544 case models.ProviderBedrock:
545 if hasAWSCredentials() {
546 return "aws-credentials-available"
547 }
548 }
549 return ""
550}
551
552// setDefaultModelForAgent sets a default model for an agent based on available providers
553func setDefaultModelForAgent(agent AgentName) bool {
554 // Check providers in order of preference
555 if apiKey := os.Getenv("ANTHROPIC_API_KEY"); apiKey != "" {
556 maxTokens := int64(5000)
557 if agent == AgentTitle {
558 maxTokens = 80
559 }
560 cfg.Agents[agent] = Agent{
561 Model: models.Claude37Sonnet,
562 MaxTokens: maxTokens,
563 }
564 return true
565 }
566
567 if apiKey := os.Getenv("OPENAI_API_KEY"); apiKey != "" {
568 var model models.ModelID
569 maxTokens := int64(5000)
570 reasoningEffort := ""
571
572 switch agent {
573 case AgentTitle:
574 model = models.GPT41Mini
575 maxTokens = 80
576 case AgentTask:
577 model = models.GPT41Mini
578 default:
579 model = models.GPT41
580 }
581
582 // Check if model supports reasoning
583 if modelInfo, ok := models.SupportedModels[model]; ok && modelInfo.CanReason {
584 reasoningEffort = "medium"
585 }
586
587 cfg.Agents[agent] = Agent{
588 Model: model,
589 MaxTokens: maxTokens,
590 ReasoningEffort: reasoningEffort,
591 }
592 return true
593 }
594
595 if apiKey := os.Getenv("OPENROUTER_API_KEY"); apiKey != "" {
596 var model models.ModelID
597 maxTokens := int64(5000)
598 reasoningEffort := ""
599
600 switch agent {
601 case AgentTitle:
602 model = models.OpenRouterClaude35Haiku
603 maxTokens = 80
604 case AgentTask:
605 model = models.OpenRouterClaude37Sonnet
606 default:
607 model = models.OpenRouterClaude37Sonnet
608 }
609
610 // Check if model supports reasoning
611 if modelInfo, ok := models.SupportedModels[model]; ok && modelInfo.CanReason {
612 reasoningEffort = "medium"
613 }
614
615 cfg.Agents[agent] = Agent{
616 Model: model,
617 MaxTokens: maxTokens,
618 ReasoningEffort: reasoningEffort,
619 }
620 return true
621 }
622
623 if apiKey := os.Getenv("GEMINI_API_KEY"); apiKey != "" {
624 var model models.ModelID
625 maxTokens := int64(5000)
626
627 if agent == AgentTitle {
628 model = models.Gemini25Flash
629 maxTokens = 80
630 } else {
631 model = models.Gemini25
632 }
633
634 cfg.Agents[agent] = Agent{
635 Model: model,
636 MaxTokens: maxTokens,
637 }
638 return true
639 }
640
641 if apiKey := os.Getenv("GROQ_API_KEY"); apiKey != "" {
642 maxTokens := int64(5000)
643 if agent == AgentTitle {
644 maxTokens = 80
645 }
646
647 cfg.Agents[agent] = Agent{
648 Model: models.QWENQwq,
649 MaxTokens: maxTokens,
650 }
651 return true
652 }
653
654 if hasAWSCredentials() {
655 maxTokens := int64(5000)
656 if agent == AgentTitle {
657 maxTokens = 80
658 }
659
660 cfg.Agents[agent] = Agent{
661 Model: models.BedrockClaude37Sonnet,
662 MaxTokens: maxTokens,
663 ReasoningEffort: "medium", // Claude models support reasoning
664 }
665 return true
666 }
667
668 return false
669}
670
671// Get returns the current configuration.
672// It's safe to call this function multiple times.
673func Get() *Config {
674 return cfg
675}
676
677// WorkingDirectory returns the current working directory from the configuration.
678func WorkingDirectory() string {
679 if cfg == nil {
680 panic("config not loaded")
681 }
682 return cfg.WorkingDir
683}
684
685func UpdateAgentModel(agentName AgentName, modelID models.ModelID) error {
686 if cfg == nil {
687 panic("config not loaded")
688 }
689
690 existingAgentCfg := cfg.Agents[agentName]
691
692 model, ok := models.SupportedModels[modelID]
693 if !ok {
694 return fmt.Errorf("model %s not supported", modelID)
695 }
696
697 maxTokens := existingAgentCfg.MaxTokens
698 if model.DefaultMaxTokens > 0 {
699 maxTokens = model.DefaultMaxTokens
700 }
701
702 newAgentCfg := Agent{
703 Model: modelID,
704 MaxTokens: maxTokens,
705 ReasoningEffort: existingAgentCfg.ReasoningEffort,
706 }
707 cfg.Agents[agentName] = newAgentCfg
708
709 if err := validateAgent(cfg, agentName, newAgentCfg); err != nil {
710 // revert config update on failure
711 cfg.Agents[agentName] = existingAgentCfg
712 return fmt.Errorf("failed to update agent model: %w", err)
713 }
714
715 return nil
716}