1// Package config manages application configuration from various sources.
2package config
3
4import (
5 "fmt"
6 "log/slog"
7 "os"
8 "strings"
9
10 "github.com/opencode-ai/opencode/internal/llm/models"
11 "github.com/opencode-ai/opencode/internal/logging"
12 "github.com/spf13/viper"
13)
14
15// MCPType defines the type of MCP (Model Control Protocol) server.
16type MCPType string
17
18// Supported MCP types
19const (
20 MCPStdio MCPType = "stdio"
21 MCPSse MCPType = "sse"
22)
23
24// MCPServer defines the configuration for a Model Control Protocol server.
25type MCPServer struct {
26 Command string `json:"command"`
27 Env []string `json:"env"`
28 Args []string `json:"args"`
29 Type MCPType `json:"type"`
30 URL string `json:"url"`
31 Headers map[string]string `json:"headers"`
32}
33
34type AgentName string
35
36const (
37 AgentCoder AgentName = "coder"
38 AgentTask AgentName = "task"
39 AgentTitle AgentName = "title"
40)
41
42// Agent defines configuration for different LLM models and their token limits.
43type Agent struct {
44 Model models.ModelID `json:"model"`
45 MaxTokens int64 `json:"maxTokens"`
46 ReasoningEffort string `json:"reasoningEffort"` // For openai models low,medium,heigh
47}
48
49// Provider defines configuration for an LLM provider.
50type Provider struct {
51 APIKey string `json:"apiKey"`
52 Disabled bool `json:"disabled"`
53}
54
55// Data defines storage configuration.
56type Data struct {
57 Directory string `json:"directory"`
58}
59
60// LSPConfig defines configuration for Language Server Protocol integration.
61type LSPConfig struct {
62 Disabled bool `json:"enabled"`
63 Command string `json:"command"`
64 Args []string `json:"args"`
65 Options any `json:"options"`
66}
67
68// Config is the main configuration structure for the application.
69type Config struct {
70 Data Data `json:"data"`
71 WorkingDir string `json:"wd,omitempty"`
72 MCPServers map[string]MCPServer `json:"mcpServers,omitempty"`
73 Providers map[models.ModelProvider]Provider `json:"providers,omitempty"`
74 LSP map[string]LSPConfig `json:"lsp,omitempty"`
75 Agents map[AgentName]Agent `json:"agents"`
76 Debug bool `json:"debug,omitempty"`
77 DebugLSP bool `json:"debugLSP,omitempty"`
78 ContextPaths []string `json:"contextPaths,omitempty"`
79}
80
81// Application constants
82const (
83 defaultDataDirectory = ".opencode"
84 defaultLogLevel = "info"
85 appName = "opencode"
86)
87
88var defaultContextPaths = []string{
89 ".github/copilot-instructions.md",
90 ".cursorrules",
91 ".cursor/rules/",
92 "CLAUDE.md",
93 "CLAUDE.local.md",
94 "opencode.md",
95 "opencode.local.md",
96 "OpenCode.md",
97 "OpenCode.local.md",
98 "OPENCODE.md",
99 "OPENCODE.local.md",
100}
101
102// Global configuration instance
103var cfg *Config
104
105// Load initializes the configuration from environment variables and config files.
106// If debug is true, debug mode is enabled and log level is set to debug.
107// It returns an error if configuration loading fails.
108func Load(workingDir string, debug bool) (*Config, error) {
109 if cfg != nil {
110 return cfg, nil
111 }
112
113 cfg = &Config{
114 WorkingDir: workingDir,
115 MCPServers: make(map[string]MCPServer),
116 Providers: make(map[models.ModelProvider]Provider),
117 LSP: make(map[string]LSPConfig),
118 }
119
120 configureViper()
121 setDefaults(debug)
122 setProviderDefaults()
123
124 // Read global config
125 if err := readConfig(viper.ReadInConfig()); err != nil {
126 return cfg, err
127 }
128
129 // Load and merge local config
130 mergeLocalConfig(workingDir)
131
132 // Apply configuration to the struct
133 if err := viper.Unmarshal(cfg); err != nil {
134 return cfg, fmt.Errorf("failed to unmarshal config: %w", err)
135 }
136
137 applyDefaultValues()
138 defaultLevel := slog.LevelInfo
139 if cfg.Debug {
140 defaultLevel = slog.LevelDebug
141 }
142 if os.Getenv("OPENCODE_DEV_DEBUG") == "true" {
143 loggingFile := fmt.Sprintf("%s/%s", cfg.Data.Directory, "debug.log")
144
145 // if file does not exist create it
146 if _, err := os.Stat(loggingFile); os.IsNotExist(err) {
147 if err := os.MkdirAll(cfg.Data.Directory, 0o755); err != nil {
148 return cfg, fmt.Errorf("failed to create directory: %w", err)
149 }
150 if _, err := os.Create(loggingFile); err != nil {
151 return cfg, fmt.Errorf("failed to create log file: %w", err)
152 }
153 }
154
155 sloggingFileWriter, err := os.OpenFile(loggingFile, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0o666)
156 if err != nil {
157 return cfg, fmt.Errorf("failed to open log file: %w", err)
158 }
159 // Configure logger
160 logger := slog.New(slog.NewTextHandler(sloggingFileWriter, &slog.HandlerOptions{
161 Level: defaultLevel,
162 }))
163 slog.SetDefault(logger)
164 } else {
165 // Configure logger
166 logger := slog.New(slog.NewTextHandler(logging.NewWriter(), &slog.HandlerOptions{
167 Level: defaultLevel,
168 }))
169 slog.SetDefault(logger)
170 }
171
172 // Validate configuration
173 if err := Validate(); err != nil {
174 return cfg, fmt.Errorf("config validation failed: %w", err)
175 }
176
177 if cfg.Agents == nil {
178 cfg.Agents = make(map[AgentName]Agent)
179 }
180
181 // Override the max tokens for title agent
182 cfg.Agents[AgentTitle] = Agent{
183 Model: cfg.Agents[AgentTitle].Model,
184 MaxTokens: 80,
185 }
186 return cfg, nil
187}
188
189// configureViper sets up viper's configuration paths and environment variables.
190func configureViper() {
191 viper.SetConfigName(fmt.Sprintf(".%s", appName))
192 viper.SetConfigType("json")
193 viper.AddConfigPath("$HOME")
194 viper.AddConfigPath(fmt.Sprintf("$XDG_CONFIG_HOME/%s", appName))
195 viper.AddConfigPath(fmt.Sprintf("$HOME/.config/%s", appName))
196 viper.SetEnvPrefix(strings.ToUpper(appName))
197 viper.AutomaticEnv()
198}
199
200// setDefaults configures default values for configuration options.
201func setDefaults(debug bool) {
202 viper.SetDefault("data.directory", defaultDataDirectory)
203 viper.SetDefault("contextPaths", defaultContextPaths)
204
205 if debug {
206 viper.SetDefault("debug", true)
207 viper.Set("log.level", "debug")
208 } else {
209 viper.SetDefault("debug", false)
210 viper.SetDefault("log.level", defaultLogLevel)
211 }
212}
213
214// setProviderDefaults configures LLM provider defaults based on environment variables.
215func setProviderDefaults() {
216 // Set all API keys we can find in the environment
217 if apiKey := os.Getenv("ANTHROPIC_API_KEY"); apiKey != "" {
218 viper.SetDefault("providers.anthropic.apiKey", apiKey)
219 }
220 if apiKey := os.Getenv("OPENAI_API_KEY"); apiKey != "" {
221 viper.SetDefault("providers.openai.apiKey", apiKey)
222 }
223 if apiKey := os.Getenv("GEMINI_API_KEY"); apiKey != "" {
224 viper.SetDefault("providers.gemini.apiKey", apiKey)
225 }
226 if apiKey := os.Getenv("GROQ_API_KEY"); apiKey != "" {
227 viper.SetDefault("providers.groq.apiKey", apiKey)
228 }
229
230 // Use this order to set the default models
231 // 1. Anthropic
232 // 2. OpenAI
233 // 3. Google Gemini
234 // 4. Groq
235 // 5. AWS Bedrock
236 // Anthropic configuration
237 if apiKey := os.Getenv("ANTHROPIC_API_KEY"); apiKey != "" {
238 viper.SetDefault("agents.coder.model", models.Claude37Sonnet)
239 viper.SetDefault("agents.task.model", models.Claude37Sonnet)
240 viper.SetDefault("agents.title.model", models.Claude37Sonnet)
241 return
242 }
243
244 // OpenAI configuration
245 if apiKey := os.Getenv("OPENAI_API_KEY"); apiKey != "" {
246 viper.SetDefault("agents.coder.model", models.GPT41)
247 viper.SetDefault("agents.task.model", models.GPT41Mini)
248 viper.SetDefault("agents.title.model", models.GPT41Mini)
249 return
250 }
251
252 // Google Gemini configuration
253 if apiKey := os.Getenv("GEMINI_API_KEY"); apiKey != "" {
254 viper.SetDefault("agents.coder.model", models.Gemini25)
255 viper.SetDefault("agents.task.model", models.Gemini25Flash)
256 viper.SetDefault("agents.title.model", models.Gemini25Flash)
257 return
258 }
259
260 // Groq configuration
261 if apiKey := os.Getenv("GROQ_API_KEY"); apiKey != "" {
262 viper.SetDefault("agents.coder.model", models.QWENQwq)
263 viper.SetDefault("agents.task.model", models.QWENQwq)
264 viper.SetDefault("agents.title.model", models.QWENQwq)
265 return
266 }
267
268 // AWS Bedrock configuration
269 if hasAWSCredentials() {
270 viper.SetDefault("agents.coder.model", models.BedrockClaude37Sonnet)
271 viper.SetDefault("agents.task.model", models.BedrockClaude37Sonnet)
272 viper.SetDefault("agents.title.model", models.BedrockClaude37Sonnet)
273 return
274 }
275}
276
277// hasAWSCredentials checks if AWS credentials are available in the environment.
278func hasAWSCredentials() bool {
279 // Check for explicit AWS credentials
280 if os.Getenv("AWS_ACCESS_KEY_ID") != "" && os.Getenv("AWS_SECRET_ACCESS_KEY") != "" {
281 return true
282 }
283
284 // Check for AWS profile
285 if os.Getenv("AWS_PROFILE") != "" || os.Getenv("AWS_DEFAULT_PROFILE") != "" {
286 return true
287 }
288
289 // Check for AWS region
290 if os.Getenv("AWS_REGION") != "" || os.Getenv("AWS_DEFAULT_REGION") != "" {
291 return true
292 }
293
294 // Check if running on EC2 with instance profile
295 if os.Getenv("AWS_CONTAINER_CREDENTIALS_RELATIVE_URI") != "" ||
296 os.Getenv("AWS_CONTAINER_CREDENTIALS_FULL_URI") != "" {
297 return true
298 }
299
300 return false
301}
302
303// readConfig handles the result of reading a configuration file.
304func readConfig(err error) error {
305 if err == nil {
306 return nil
307 }
308
309 // It's okay if the config file doesn't exist
310 if _, ok := err.(viper.ConfigFileNotFoundError); ok {
311 return nil
312 }
313
314 return fmt.Errorf("failed to read config: %w", err)
315}
316
317// mergeLocalConfig loads and merges configuration from the local directory.
318func mergeLocalConfig(workingDir string) {
319 local := viper.New()
320 local.SetConfigName(fmt.Sprintf(".%s", appName))
321 local.SetConfigType("json")
322 local.AddConfigPath(workingDir)
323
324 // Merge local config if it exists
325 if err := local.ReadInConfig(); err == nil {
326 viper.MergeConfigMap(local.AllSettings())
327 }
328}
329
330// applyDefaultValues sets default values for configuration fields that need processing.
331func applyDefaultValues() {
332 // Set default MCP type if not specified
333 for k, v := range cfg.MCPServers {
334 if v.Type == "" {
335 v.Type = MCPStdio
336 cfg.MCPServers[k] = v
337 }
338 }
339}
340
341// Validate checks if the configuration is valid and applies defaults where needed.
342// It validates model IDs and providers, ensuring they are supported.
343func Validate() error {
344 if cfg == nil {
345 return fmt.Errorf("config not loaded")
346 }
347
348 // Validate agent models
349 for name, agent := range cfg.Agents {
350 // Check if model exists
351 model, modelExists := models.SupportedModels[agent.Model]
352 if !modelExists {
353 logging.Warn("unsupported model configured, reverting to default",
354 "agent", name,
355 "configured_model", agent.Model)
356
357 // Set default model based on available providers
358 if setDefaultModelForAgent(name) {
359 logging.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
360 } else {
361 return fmt.Errorf("no valid provider available for agent %s", name)
362 }
363 continue
364 }
365
366 // Check if provider for the model is configured
367 provider := model.Provider
368 providerCfg, providerExists := cfg.Providers[provider]
369
370 if !providerExists {
371 // Provider not configured, check if we have environment variables
372 apiKey := getProviderAPIKey(provider)
373 if apiKey == "" {
374 logging.Warn("provider not configured for model, reverting to default",
375 "agent", name,
376 "model", agent.Model,
377 "provider", provider)
378
379 // Set default model based on available providers
380 if setDefaultModelForAgent(name) {
381 logging.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
382 } else {
383 return fmt.Errorf("no valid provider available for agent %s", name)
384 }
385 } else {
386 // Add provider with API key from environment
387 cfg.Providers[provider] = Provider{
388 APIKey: apiKey,
389 }
390 logging.Info("added provider from environment", "provider", provider)
391 }
392 } else if providerCfg.Disabled || providerCfg.APIKey == "" {
393 // Provider is disabled or has no API key
394 logging.Warn("provider is disabled or has no API key, reverting to default",
395 "agent", name,
396 "model", agent.Model,
397 "provider", provider)
398
399 // Set default model based on available providers
400 if setDefaultModelForAgent(name) {
401 logging.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
402 } else {
403 return fmt.Errorf("no valid provider available for agent %s", name)
404 }
405 }
406
407 // Validate max tokens
408 if agent.MaxTokens <= 0 {
409 logging.Warn("invalid max tokens, setting to default",
410 "agent", name,
411 "model", agent.Model,
412 "max_tokens", agent.MaxTokens)
413
414 // Update the agent with default max tokens
415 updatedAgent := cfg.Agents[name]
416 if model.DefaultMaxTokens > 0 {
417 updatedAgent.MaxTokens = model.DefaultMaxTokens
418 } else {
419 updatedAgent.MaxTokens = 4096 // Fallback default
420 }
421 cfg.Agents[name] = updatedAgent
422 } else if model.ContextWindow > 0 && agent.MaxTokens > model.ContextWindow/2 {
423 // Ensure max tokens doesn't exceed half the context window (reasonable limit)
424 logging.Warn("max tokens exceeds half the context window, adjusting",
425 "agent", name,
426 "model", agent.Model,
427 "max_tokens", agent.MaxTokens,
428 "context_window", model.ContextWindow)
429
430 // Update the agent with adjusted max tokens
431 updatedAgent := cfg.Agents[name]
432 updatedAgent.MaxTokens = model.ContextWindow / 2
433 cfg.Agents[name] = updatedAgent
434 }
435
436 // Validate reasoning effort for models that support reasoning
437 if model.CanReason && provider == models.ProviderOpenAI {
438 if agent.ReasoningEffort == "" {
439 // Set default reasoning effort for models that support it
440 logging.Info("setting default reasoning effort for model that supports reasoning",
441 "agent", name,
442 "model", agent.Model)
443
444 // Update the agent with default reasoning effort
445 updatedAgent := cfg.Agents[name]
446 updatedAgent.ReasoningEffort = "medium"
447 cfg.Agents[name] = updatedAgent
448 } else {
449 // Check if reasoning effort is valid (low, medium, high)
450 effort := strings.ToLower(agent.ReasoningEffort)
451 if effort != "low" && effort != "medium" && effort != "high" {
452 logging.Warn("invalid reasoning effort, setting to medium",
453 "agent", name,
454 "model", agent.Model,
455 "reasoning_effort", agent.ReasoningEffort)
456
457 // Update the agent with valid reasoning effort
458 updatedAgent := cfg.Agents[name]
459 updatedAgent.ReasoningEffort = "medium"
460 cfg.Agents[name] = updatedAgent
461 }
462 }
463 } else if !model.CanReason && agent.ReasoningEffort != "" {
464 // Model doesn't support reasoning but reasoning effort is set
465 logging.Warn("model doesn't support reasoning but reasoning effort is set, ignoring",
466 "agent", name,
467 "model", agent.Model,
468 "reasoning_effort", agent.ReasoningEffort)
469
470 // Update the agent to remove reasoning effort
471 updatedAgent := cfg.Agents[name]
472 updatedAgent.ReasoningEffort = ""
473 cfg.Agents[name] = updatedAgent
474 }
475 }
476
477 // Validate providers
478 for provider, providerCfg := range cfg.Providers {
479 if providerCfg.APIKey == "" && !providerCfg.Disabled {
480 logging.Warn("provider has no API key, marking as disabled", "provider", provider)
481 providerCfg.Disabled = true
482 cfg.Providers[provider] = providerCfg
483 }
484 }
485
486 // Validate LSP configurations
487 for language, lspConfig := range cfg.LSP {
488 if lspConfig.Command == "" && !lspConfig.Disabled {
489 logging.Warn("LSP configuration has no command, marking as disabled", "language", language)
490 lspConfig.Disabled = true
491 cfg.LSP[language] = lspConfig
492 }
493 }
494
495 return nil
496}
497
498// getProviderAPIKey gets the API key for a provider from environment variables
499func getProviderAPIKey(provider models.ModelProvider) string {
500 switch provider {
501 case models.ProviderAnthropic:
502 return os.Getenv("ANTHROPIC_API_KEY")
503 case models.ProviderOpenAI:
504 return os.Getenv("OPENAI_API_KEY")
505 case models.ProviderGemini:
506 return os.Getenv("GEMINI_API_KEY")
507 case models.ProviderGROQ:
508 return os.Getenv("GROQ_API_KEY")
509 case models.ProviderBedrock:
510 if hasAWSCredentials() {
511 return "aws-credentials-available"
512 }
513 }
514 return ""
515}
516
517// setDefaultModelForAgent sets a default model for an agent based on available providers
518func setDefaultModelForAgent(agent AgentName) bool {
519 // Check providers in order of preference
520 if apiKey := os.Getenv("ANTHROPIC_API_KEY"); apiKey != "" {
521 maxTokens := int64(5000)
522 if agent == AgentTitle {
523 maxTokens = 80
524 }
525 cfg.Agents[agent] = Agent{
526 Model: models.Claude37Sonnet,
527 MaxTokens: maxTokens,
528 }
529 return true
530 }
531
532 if apiKey := os.Getenv("OPENAI_API_KEY"); apiKey != "" {
533 var model models.ModelID
534 maxTokens := int64(5000)
535 reasoningEffort := ""
536
537 switch agent {
538 case AgentTitle:
539 model = models.GPT41Mini
540 maxTokens = 80
541 case AgentTask:
542 model = models.GPT41Mini
543 default:
544 model = models.GPT41
545 }
546
547 // Check if model supports reasoning
548 if modelInfo, ok := models.SupportedModels[model]; ok && modelInfo.CanReason {
549 reasoningEffort = "medium"
550 }
551
552 cfg.Agents[agent] = Agent{
553 Model: model,
554 MaxTokens: maxTokens,
555 ReasoningEffort: reasoningEffort,
556 }
557 return true
558 }
559
560 if apiKey := os.Getenv("GEMINI_API_KEY"); apiKey != "" {
561 var model models.ModelID
562 maxTokens := int64(5000)
563
564 if agent == AgentTitle {
565 model = models.Gemini25Flash
566 maxTokens = 80
567 } else {
568 model = models.Gemini25
569 }
570
571 cfg.Agents[agent] = Agent{
572 Model: model,
573 MaxTokens: maxTokens,
574 }
575 return true
576 }
577
578 if apiKey := os.Getenv("GROQ_API_KEY"); apiKey != "" {
579 maxTokens := int64(5000)
580 if agent == AgentTitle {
581 maxTokens = 80
582 }
583
584 cfg.Agents[agent] = Agent{
585 Model: models.QWENQwq,
586 MaxTokens: maxTokens,
587 }
588 return true
589 }
590
591 if hasAWSCredentials() {
592 maxTokens := int64(5000)
593 if agent == AgentTitle {
594 maxTokens = 80
595 }
596
597 cfg.Agents[agent] = Agent{
598 Model: models.BedrockClaude37Sonnet,
599 MaxTokens: maxTokens,
600 ReasoningEffort: "medium", // Claude models support reasoning
601 }
602 return true
603 }
604
605 return false
606}
607
608// Get returns the current configuration.
609// It's safe to call this function multiple times.
610func Get() *Config {
611 return cfg
612}
613
614// WorkingDirectory returns the current working directory from the configuration.
615func WorkingDirectory() string {
616 if cfg == nil {
617 panic("config not loaded")
618 }
619 return cfg.WorkingDir
620}