1// Package config manages application configuration from various sources.
2package config
3
4import (
5 "fmt"
6 "log/slog"
7 "os"
8 "strings"
9
10 "github.com/opencode-ai/opencode/internal/llm/models"
11 "github.com/opencode-ai/opencode/internal/logging"
12 "github.com/spf13/viper"
13)
14
15// MCPType defines the type of MCP (Model Control Protocol) server.
16type MCPType string
17
18// Supported MCP types
19const (
20 MCPStdio MCPType = "stdio"
21 MCPSse MCPType = "sse"
22)
23
24// MCPServer defines the configuration for a Model Control Protocol server.
25type MCPServer struct {
26 Command string `json:"command"`
27 Env []string `json:"env"`
28 Args []string `json:"args"`
29 Type MCPType `json:"type"`
30 URL string `json:"url"`
31 Headers map[string]string `json:"headers"`
32}
33
34type AgentName string
35
36const (
37 AgentCoder AgentName = "coder"
38 AgentTask AgentName = "task"
39 AgentTitle AgentName = "title"
40)
41
42// Agent defines configuration for different LLM models and their token limits.
43type Agent struct {
44 Model models.ModelID `json:"model"`
45 MaxTokens int64 `json:"maxTokens"`
46 ReasoningEffort string `json:"reasoningEffort"` // For openai models low,medium,heigh
47}
48
49// Provider defines configuration for an LLM provider.
50type Provider struct {
51 APIKey string `json:"apiKey"`
52 Disabled bool `json:"disabled"`
53}
54
55// Data defines storage configuration.
56type Data struct {
57 Directory string `json:"directory"`
58}
59
60// LSPConfig defines configuration for Language Server Protocol integration.
61type LSPConfig struct {
62 Disabled bool `json:"enabled"`
63 Command string `json:"command"`
64 Args []string `json:"args"`
65 Options any `json:"options"`
66}
67
68// Config is the main configuration structure for the application.
69type Config struct {
70 Data Data `json:"data"`
71 WorkingDir string `json:"wd,omitempty"`
72 MCPServers map[string]MCPServer `json:"mcpServers,omitempty"`
73 Providers map[models.ModelProvider]Provider `json:"providers,omitempty"`
74 LSP map[string]LSPConfig `json:"lsp,omitempty"`
75 Agents map[AgentName]Agent `json:"agents"`
76 Debug bool `json:"debug,omitempty"`
77 DebugLSP bool `json:"debugLSP,omitempty"`
78 ContextPaths []string `json:"contextPaths,omitempty"`
79}
80
81// Application constants
82const (
83 defaultDataDirectory = ".opencode"
84 defaultLogLevel = "info"
85 appName = "opencode"
86)
87
88var defaultContextPaths = []string{
89 ".github/copilot-instructions.md",
90 ".cursorrules",
91 ".cursor/rules/",
92 "CLAUDE.md",
93 "CLAUDE.local.md",
94 "opencode.md",
95 "opencode.local.md",
96 "OpenCode.md",
97 "OpenCode.local.md",
98 "OPENCODE.md",
99 "OPENCODE.local.md",
100}
101
102// Global configuration instance
103var cfg *Config
104
105// Load initializes the configuration from environment variables and config files.
106// If debug is true, debug mode is enabled and log level is set to debug.
107// It returns an error if configuration loading fails.
108func Load(workingDir string, debug bool) (*Config, error) {
109 if cfg != nil {
110 return cfg, nil
111 }
112
113 cfg = &Config{
114 WorkingDir: workingDir,
115 MCPServers: make(map[string]MCPServer),
116 Providers: make(map[models.ModelProvider]Provider),
117 LSP: make(map[string]LSPConfig),
118 }
119
120 configureViper()
121 setDefaults(debug)
122 setProviderDefaults()
123
124 // Read global config
125 if err := readConfig(viper.ReadInConfig()); err != nil {
126 return cfg, err
127 }
128
129 // Load and merge local config
130 mergeLocalConfig(workingDir)
131
132 // Apply configuration to the struct
133 if err := viper.Unmarshal(cfg); err != nil {
134 return cfg, fmt.Errorf("failed to unmarshal config: %w", err)
135 }
136
137 applyDefaultValues()
138 defaultLevel := slog.LevelInfo
139 if cfg.Debug {
140 defaultLevel = slog.LevelDebug
141 }
142 if os.Getenv("OPENCODE_DEV_DEBUG") == "true" {
143 loggingFile := fmt.Sprintf("%s/%s", cfg.Data.Directory, "debug.log")
144
145 // if file does not exist create it
146 if _, err := os.Stat(loggingFile); os.IsNotExist(err) {
147 if err := os.MkdirAll(cfg.Data.Directory, 0o755); err != nil {
148 return cfg, fmt.Errorf("failed to create directory: %w", err)
149 }
150 if _, err := os.Create(loggingFile); err != nil {
151 return cfg, fmt.Errorf("failed to create log file: %w", err)
152 }
153 }
154
155 sloggingFileWriter, err := os.OpenFile(loggingFile, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0o666)
156 if err != nil {
157 return cfg, fmt.Errorf("failed to open log file: %w", err)
158 }
159 // Configure logger
160 logger := slog.New(slog.NewTextHandler(sloggingFileWriter, &slog.HandlerOptions{
161 Level: defaultLevel,
162 }))
163 slog.SetDefault(logger)
164 } else {
165 // Configure logger
166 logger := slog.New(slog.NewTextHandler(logging.NewWriter(), &slog.HandlerOptions{
167 Level: defaultLevel,
168 }))
169 slog.SetDefault(logger)
170 }
171
172 // Validate configuration
173 if err := Validate(); err != nil {
174 return cfg, fmt.Errorf("config validation failed: %w", err)
175 }
176
177 if cfg.Agents == nil {
178 cfg.Agents = make(map[AgentName]Agent)
179 }
180
181 // Override the max tokens for title agent
182 cfg.Agents[AgentTitle] = Agent{
183 Model: cfg.Agents[AgentTitle].Model,
184 MaxTokens: 80,
185 }
186 return cfg, nil
187}
188
189// configureViper sets up viper's configuration paths and environment variables.
190func configureViper() {
191 viper.SetConfigName(fmt.Sprintf(".%s", appName))
192 viper.SetConfigType("json")
193 viper.AddConfigPath("$HOME")
194 viper.AddConfigPath(fmt.Sprintf("$XDG_CONFIG_HOME/%s", appName))
195 viper.AddConfigPath(fmt.Sprintf("$HOME/.config/%s", appName))
196 viper.SetEnvPrefix(strings.ToUpper(appName))
197 viper.AutomaticEnv()
198}
199
200// setDefaults configures default values for configuration options.
201func setDefaults(debug bool) {
202 viper.SetDefault("data.directory", defaultDataDirectory)
203 viper.SetDefault("contextPaths", defaultContextPaths)
204
205 if debug {
206 viper.SetDefault("debug", true)
207 viper.Set("log.level", "debug")
208 } else {
209 viper.SetDefault("debug", false)
210 viper.SetDefault("log.level", defaultLogLevel)
211 }
212}
213
214// setProviderDefaults configures LLM provider defaults based on environment variables.
215func setProviderDefaults() {
216 // Set all API keys we can find in the environment
217 if apiKey := os.Getenv("ANTHROPIC_API_KEY"); apiKey != "" {
218 viper.SetDefault("providers.anthropic.apiKey", apiKey)
219 }
220 if apiKey := os.Getenv("OPENAI_API_KEY"); apiKey != "" {
221 viper.SetDefault("providers.openai.apiKey", apiKey)
222 }
223 if apiKey := os.Getenv("GEMINI_API_KEY"); apiKey != "" {
224 viper.SetDefault("providers.gemini.apiKey", apiKey)
225 }
226 if apiKey := os.Getenv("GROQ_API_KEY"); apiKey != "" {
227 viper.SetDefault("providers.groq.apiKey", apiKey)
228 }
229
230 // Use this order to set the default models
231 // 1. Anthropic
232 // 2. OpenAI
233 // 3. Google Gemini
234 // 4. Groq
235 // 5. AWS Bedrock
236 // Anthropic configuration
237 if apiKey := os.Getenv("ANTHROPIC_API_KEY"); apiKey != "" {
238 viper.SetDefault("agents.coder.model", models.Claude37Sonnet)
239 viper.SetDefault("agents.task.model", models.Claude37Sonnet)
240 viper.SetDefault("agents.title.model", models.Claude37Sonnet)
241 return
242 }
243
244 // OpenAI configuration
245 if apiKey := os.Getenv("OPENAI_API_KEY"); apiKey != "" {
246 viper.SetDefault("agents.coder.model", models.GPT41)
247 viper.SetDefault("agents.task.model", models.GPT41Mini)
248 viper.SetDefault("agents.title.model", models.GPT41Mini)
249 return
250 }
251
252 // Google Gemini configuration
253 if apiKey := os.Getenv("GEMINI_API_KEY"); apiKey != "" {
254 viper.SetDefault("agents.coder.model", models.Gemini25)
255 viper.SetDefault("agents.task.model", models.Gemini25Flash)
256 viper.SetDefault("agents.title.model", models.Gemini25Flash)
257 return
258 }
259
260 // Groq configuration
261 if apiKey := os.Getenv("GROQ_API_KEY"); apiKey != "" {
262 viper.SetDefault("agents.coder.model", models.QWENQwq)
263 viper.SetDefault("agents.task.model", models.QWENQwq)
264 viper.SetDefault("agents.title.model", models.QWENQwq)
265 return
266 }
267
268 // AWS Bedrock configuration
269 if hasAWSCredentials() {
270 viper.SetDefault("agents.coder.model", models.BedrockClaude37Sonnet)
271 viper.SetDefault("agents.task.model", models.BedrockClaude37Sonnet)
272 viper.SetDefault("agents.title.model", models.BedrockClaude37Sonnet)
273 return
274 }
275
276 if os.Getenv("AZURE_OPENAI_ENDPOINT") != "" {
277 // api-key may be empty when using Entra ID credentials – that's okay
278 viper.SetDefault("providers.azure.apiKey", os.Getenv("AZURE_OPENAI_API_KEY"))
279 viper.SetDefault("agents.coder.model", models.AzureGPT41)
280 viper.SetDefault("agents.task.model", models.AzureGPT41Mini)
281 viper.SetDefault("agents.title.model", models.AzureGPT41Mini)
282 return
283 }
284}
285
286// hasAWSCredentials checks if AWS credentials are available in the environment.
287func hasAWSCredentials() bool {
288 // Check for explicit AWS credentials
289 if os.Getenv("AWS_ACCESS_KEY_ID") != "" && os.Getenv("AWS_SECRET_ACCESS_KEY") != "" {
290 return true
291 }
292
293 // Check for AWS profile
294 if os.Getenv("AWS_PROFILE") != "" || os.Getenv("AWS_DEFAULT_PROFILE") != "" {
295 return true
296 }
297
298 // Check for AWS region
299 if os.Getenv("AWS_REGION") != "" || os.Getenv("AWS_DEFAULT_REGION") != "" {
300 return true
301 }
302
303 // Check if running on EC2 with instance profile
304 if os.Getenv("AWS_CONTAINER_CREDENTIALS_RELATIVE_URI") != "" ||
305 os.Getenv("AWS_CONTAINER_CREDENTIALS_FULL_URI") != "" {
306 return true
307 }
308
309 return false
310}
311
312// readConfig handles the result of reading a configuration file.
313func readConfig(err error) error {
314 if err == nil {
315 return nil
316 }
317
318 // It's okay if the config file doesn't exist
319 if _, ok := err.(viper.ConfigFileNotFoundError); ok {
320 return nil
321 }
322
323 return fmt.Errorf("failed to read config: %w", err)
324}
325
326// mergeLocalConfig loads and merges configuration from the local directory.
327func mergeLocalConfig(workingDir string) {
328 local := viper.New()
329 local.SetConfigName(fmt.Sprintf(".%s", appName))
330 local.SetConfigType("json")
331 local.AddConfigPath(workingDir)
332
333 // Merge local config if it exists
334 if err := local.ReadInConfig(); err == nil {
335 viper.MergeConfigMap(local.AllSettings())
336 }
337}
338
339// applyDefaultValues sets default values for configuration fields that need processing.
340func applyDefaultValues() {
341 // Set default MCP type if not specified
342 for k, v := range cfg.MCPServers {
343 if v.Type == "" {
344 v.Type = MCPStdio
345 cfg.MCPServers[k] = v
346 }
347 }
348}
349
350// Validate checks if the configuration is valid and applies defaults where needed.
351// It validates model IDs and providers, ensuring they are supported.
352func Validate() error {
353 if cfg == nil {
354 return fmt.Errorf("config not loaded")
355 }
356
357 // Validate agent models
358 for name, agent := range cfg.Agents {
359 // Check if model exists
360 model, modelExists := models.SupportedModels[agent.Model]
361 if !modelExists {
362 logging.Warn("unsupported model configured, reverting to default",
363 "agent", name,
364 "configured_model", agent.Model)
365
366 // Set default model based on available providers
367 if setDefaultModelForAgent(name) {
368 logging.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
369 } else {
370 return fmt.Errorf("no valid provider available for agent %s", name)
371 }
372 continue
373 }
374
375 // Check if provider for the model is configured
376 provider := model.Provider
377 providerCfg, providerExists := cfg.Providers[provider]
378
379 if !providerExists {
380 // Provider not configured, check if we have environment variables
381 apiKey := getProviderAPIKey(provider)
382 if apiKey == "" {
383 logging.Warn("provider not configured for model, reverting to default",
384 "agent", name,
385 "model", agent.Model,
386 "provider", provider)
387
388 // Set default model based on available providers
389 if setDefaultModelForAgent(name) {
390 logging.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
391 } else {
392 return fmt.Errorf("no valid provider available for agent %s", name)
393 }
394 } else {
395 // Add provider with API key from environment
396 cfg.Providers[provider] = Provider{
397 APIKey: apiKey,
398 }
399 logging.Info("added provider from environment", "provider", provider)
400 }
401 } else if providerCfg.Disabled || providerCfg.APIKey == "" {
402 // Provider is disabled or has no API key
403 logging.Warn("provider is disabled or has no API key, reverting to default",
404 "agent", name,
405 "model", agent.Model,
406 "provider", provider)
407
408 // Set default model based on available providers
409 if setDefaultModelForAgent(name) {
410 logging.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
411 } else {
412 return fmt.Errorf("no valid provider available for agent %s", name)
413 }
414 }
415
416 // Validate max tokens
417 if agent.MaxTokens <= 0 {
418 logging.Warn("invalid max tokens, setting to default",
419 "agent", name,
420 "model", agent.Model,
421 "max_tokens", agent.MaxTokens)
422
423 // Update the agent with default max tokens
424 updatedAgent := cfg.Agents[name]
425 if model.DefaultMaxTokens > 0 {
426 updatedAgent.MaxTokens = model.DefaultMaxTokens
427 } else {
428 updatedAgent.MaxTokens = 4096 // Fallback default
429 }
430 cfg.Agents[name] = updatedAgent
431 } else if model.ContextWindow > 0 && agent.MaxTokens > model.ContextWindow/2 {
432 // Ensure max tokens doesn't exceed half the context window (reasonable limit)
433 logging.Warn("max tokens exceeds half the context window, adjusting",
434 "agent", name,
435 "model", agent.Model,
436 "max_tokens", agent.MaxTokens,
437 "context_window", model.ContextWindow)
438
439 // Update the agent with adjusted max tokens
440 updatedAgent := cfg.Agents[name]
441 updatedAgent.MaxTokens = model.ContextWindow / 2
442 cfg.Agents[name] = updatedAgent
443 }
444
445 // Validate reasoning effort for models that support reasoning
446 if model.CanReason && provider == models.ProviderOpenAI {
447 if agent.ReasoningEffort == "" {
448 // Set default reasoning effort for models that support it
449 logging.Info("setting default reasoning effort for model that supports reasoning",
450 "agent", name,
451 "model", agent.Model)
452
453 // Update the agent with default reasoning effort
454 updatedAgent := cfg.Agents[name]
455 updatedAgent.ReasoningEffort = "medium"
456 cfg.Agents[name] = updatedAgent
457 } else {
458 // Check if reasoning effort is valid (low, medium, high)
459 effort := strings.ToLower(agent.ReasoningEffort)
460 if effort != "low" && effort != "medium" && effort != "high" {
461 logging.Warn("invalid reasoning effort, setting to medium",
462 "agent", name,
463 "model", agent.Model,
464 "reasoning_effort", agent.ReasoningEffort)
465
466 // Update the agent with valid reasoning effort
467 updatedAgent := cfg.Agents[name]
468 updatedAgent.ReasoningEffort = "medium"
469 cfg.Agents[name] = updatedAgent
470 }
471 }
472 } else if !model.CanReason && agent.ReasoningEffort != "" {
473 // Model doesn't support reasoning but reasoning effort is set
474 logging.Warn("model doesn't support reasoning but reasoning effort is set, ignoring",
475 "agent", name,
476 "model", agent.Model,
477 "reasoning_effort", agent.ReasoningEffort)
478
479 // Update the agent to remove reasoning effort
480 updatedAgent := cfg.Agents[name]
481 updatedAgent.ReasoningEffort = ""
482 cfg.Agents[name] = updatedAgent
483 }
484 }
485
486 // Validate providers
487 for provider, providerCfg := range cfg.Providers {
488 if providerCfg.APIKey == "" && !providerCfg.Disabled {
489 logging.Warn("provider has no API key, marking as disabled", "provider", provider)
490 providerCfg.Disabled = true
491 cfg.Providers[provider] = providerCfg
492 }
493 }
494
495 // Validate LSP configurations
496 for language, lspConfig := range cfg.LSP {
497 if lspConfig.Command == "" && !lspConfig.Disabled {
498 logging.Warn("LSP configuration has no command, marking as disabled", "language", language)
499 lspConfig.Disabled = true
500 cfg.LSP[language] = lspConfig
501 }
502 }
503
504 return nil
505}
506
507// getProviderAPIKey gets the API key for a provider from environment variables
508func getProviderAPIKey(provider models.ModelProvider) string {
509 switch provider {
510 case models.ProviderAnthropic:
511 return os.Getenv("ANTHROPIC_API_KEY")
512 case models.ProviderOpenAI:
513 return os.Getenv("OPENAI_API_KEY")
514 case models.ProviderGemini:
515 return os.Getenv("GEMINI_API_KEY")
516 case models.ProviderGROQ:
517 return os.Getenv("GROQ_API_KEY")
518 case models.ProviderAzure:
519 return os.Getenv("AZURE_OPENAI_API_KEY")
520 case models.ProviderBedrock:
521 if hasAWSCredentials() {
522 return "aws-credentials-available"
523 }
524 }
525 return ""
526}
527
528// setDefaultModelForAgent sets a default model for an agent based on available providers
529func setDefaultModelForAgent(agent AgentName) bool {
530 // Check providers in order of preference
531 if apiKey := os.Getenv("ANTHROPIC_API_KEY"); apiKey != "" {
532 maxTokens := int64(5000)
533 if agent == AgentTitle {
534 maxTokens = 80
535 }
536 cfg.Agents[agent] = Agent{
537 Model: models.Claude37Sonnet,
538 MaxTokens: maxTokens,
539 }
540 return true
541 }
542
543 if apiKey := os.Getenv("OPENAI_API_KEY"); apiKey != "" {
544 var model models.ModelID
545 maxTokens := int64(5000)
546 reasoningEffort := ""
547
548 switch agent {
549 case AgentTitle:
550 model = models.GPT41Mini
551 maxTokens = 80
552 case AgentTask:
553 model = models.GPT41Mini
554 default:
555 model = models.GPT41
556 }
557
558 // Check if model supports reasoning
559 if modelInfo, ok := models.SupportedModels[model]; ok && modelInfo.CanReason {
560 reasoningEffort = "medium"
561 }
562
563 cfg.Agents[agent] = Agent{
564 Model: model,
565 MaxTokens: maxTokens,
566 ReasoningEffort: reasoningEffort,
567 }
568 return true
569 }
570
571 if apiKey := os.Getenv("GEMINI_API_KEY"); apiKey != "" {
572 var model models.ModelID
573 maxTokens := int64(5000)
574
575 if agent == AgentTitle {
576 model = models.Gemini25Flash
577 maxTokens = 80
578 } else {
579 model = models.Gemini25
580 }
581
582 cfg.Agents[agent] = Agent{
583 Model: model,
584 MaxTokens: maxTokens,
585 }
586 return true
587 }
588
589 if apiKey := os.Getenv("GROQ_API_KEY"); apiKey != "" {
590 maxTokens := int64(5000)
591 if agent == AgentTitle {
592 maxTokens = 80
593 }
594
595 cfg.Agents[agent] = Agent{
596 Model: models.QWENQwq,
597 MaxTokens: maxTokens,
598 }
599 return true
600 }
601
602 if hasAWSCredentials() {
603 maxTokens := int64(5000)
604 if agent == AgentTitle {
605 maxTokens = 80
606 }
607
608 cfg.Agents[agent] = Agent{
609 Model: models.BedrockClaude37Sonnet,
610 MaxTokens: maxTokens,
611 ReasoningEffort: "medium", // Claude models support reasoning
612 }
613 return true
614 }
615
616 return false
617}
618
619// Get returns the current configuration.
620// It's safe to call this function multiple times.
621func Get() *Config {
622 return cfg
623}
624
625// WorkingDirectory returns the current working directory from the configuration.
626func WorkingDirectory() string {
627 if cfg == nil {
628 panic("config not loaded")
629 }
630 return cfg.WorkingDir
631}