1// Package config manages application configuration from various sources.
2package config
3
4import (
5 "fmt"
6 "log/slog"
7 "os"
8 "strings"
9
10 "github.com/kujtimiihoxha/opencode/internal/llm/models"
11 "github.com/kujtimiihoxha/opencode/internal/logging"
12 "github.com/spf13/viper"
13)
14
15// MCPType defines the type of MCP (Model Control Protocol) server.
16type MCPType string
17
18// Supported MCP types
19const (
20 MCPStdio MCPType = "stdio"
21 MCPSse MCPType = "sse"
22)
23
24// MCPServer defines the configuration for a Model Control Protocol server.
25type MCPServer struct {
26 Command string `json:"command"`
27 Env []string `json:"env"`
28 Args []string `json:"args"`
29 Type MCPType `json:"type"`
30 URL string `json:"url"`
31 Headers map[string]string `json:"headers"`
32}
33
34type AgentName string
35
36const (
37 AgentCoder AgentName = "coder"
38 AgentTask AgentName = "task"
39 AgentTitle AgentName = "title"
40)
41
42// Agent defines configuration for different LLM models and their token limits.
43type Agent struct {
44 Model models.ModelID `json:"model"`
45 MaxTokens int64 `json:"maxTokens"`
46 ReasoningEffort string `json:"reasoningEffort"` // For openai models low,medium,heigh
47}
48
49// Provider defines configuration for an LLM provider.
50type Provider struct {
51 APIKey string `json:"apiKey"`
52 Disabled bool `json:"disabled"`
53}
54
55// Data defines storage configuration.
56type Data struct {
57 Directory string `json:"directory"`
58}
59
60// LSPConfig defines configuration for Language Server Protocol integration.
61type LSPConfig struct {
62 Disabled bool `json:"enabled"`
63 Command string `json:"command"`
64 Args []string `json:"args"`
65 Options any `json:"options"`
66}
67
68// Config is the main configuration structure for the application.
69type Config struct {
70 Data Data `json:"data"`
71 WorkingDir string `json:"wd,omitempty"`
72 MCPServers map[string]MCPServer `json:"mcpServers,omitempty"`
73 Providers map[models.ModelProvider]Provider `json:"providers,omitempty"`
74 LSP map[string]LSPConfig `json:"lsp,omitempty"`
75 Agents map[AgentName]Agent `json:"agents"`
76 Debug bool `json:"debug,omitempty"`
77 DebugLSP bool `json:"debugLSP,omitempty"`
78}
79
80// Application constants
81const (
82 defaultDataDirectory = ".opencode"
83 defaultLogLevel = "info"
84 appName = "opencode"
85)
86
87// Global configuration instance
88var cfg *Config
89
90// Load initializes the configuration from environment variables and config files.
91// If debug is true, debug mode is enabled and log level is set to debug.
92// It returns an error if configuration loading fails.
93func Load(workingDir string, debug bool) (*Config, error) {
94 if cfg != nil {
95 return cfg, nil
96 }
97
98 cfg = &Config{
99 WorkingDir: workingDir,
100 MCPServers: make(map[string]MCPServer),
101 Providers: make(map[models.ModelProvider]Provider),
102 LSP: make(map[string]LSPConfig),
103 }
104
105 configureViper()
106 setDefaults(debug)
107 setProviderDefaults()
108
109 // Read global config
110 if err := readConfig(viper.ReadInConfig()); err != nil {
111 return cfg, err
112 }
113
114 // Load and merge local config
115 mergeLocalConfig(workingDir)
116
117 // Apply configuration to the struct
118 if err := viper.Unmarshal(cfg); err != nil {
119 return cfg, fmt.Errorf("failed to unmarshal config: %w", err)
120 }
121
122 applyDefaultValues()
123 defaultLevel := slog.LevelInfo
124 if cfg.Debug {
125 defaultLevel = slog.LevelDebug
126 }
127 if os.Getenv("OPENCODE_DEV_DEBUG") == "true" {
128 loggingFile := fmt.Sprintf("%s/%s", cfg.Data.Directory, "debug.log")
129
130 // if file does not exist create it
131 if _, err := os.Stat(loggingFile); os.IsNotExist(err) {
132 if err := os.MkdirAll(cfg.Data.Directory, 0o755); err != nil {
133 return cfg, fmt.Errorf("failed to create directory: %w", err)
134 }
135 if _, err := os.Create(loggingFile); err != nil {
136 return cfg, fmt.Errorf("failed to create log file: %w", err)
137 }
138 }
139
140 sloggingFileWriter, err := os.OpenFile(loggingFile, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0o666)
141 if err != nil {
142 return cfg, fmt.Errorf("failed to open log file: %w", err)
143 }
144 // Configure logger
145 logger := slog.New(slog.NewTextHandler(sloggingFileWriter, &slog.HandlerOptions{
146 Level: defaultLevel,
147 }))
148 slog.SetDefault(logger)
149 } else {
150 // Configure logger
151 logger := slog.New(slog.NewTextHandler(logging.NewWriter(), &slog.HandlerOptions{
152 Level: defaultLevel,
153 }))
154 slog.SetDefault(logger)
155 }
156
157 // Validate configuration
158 if err := Validate(); err != nil {
159 return cfg, fmt.Errorf("config validation failed: %w", err)
160 }
161
162 if cfg.Agents == nil {
163 cfg.Agents = make(map[AgentName]Agent)
164 }
165
166 // Override the max tokens for title agent
167 cfg.Agents[AgentTitle] = Agent{
168 Model: cfg.Agents[AgentTitle].Model,
169 MaxTokens: 80,
170 }
171 return cfg, nil
172}
173
174// configureViper sets up viper's configuration paths and environment variables.
175func configureViper() {
176 viper.SetConfigName(fmt.Sprintf(".%s", appName))
177 viper.SetConfigType("json")
178 viper.AddConfigPath("$HOME")
179 viper.AddConfigPath(fmt.Sprintf("$XDG_CONFIG_HOME/%s", appName))
180 viper.AddConfigPath(fmt.Sprintf("$HOME/.config/%s", appName))
181 viper.SetEnvPrefix(strings.ToUpper(appName))
182 viper.AutomaticEnv()
183}
184
185// setDefaults configures default values for configuration options.
186func setDefaults(debug bool) {
187 viper.SetDefault("data.directory", defaultDataDirectory)
188
189 if debug {
190 viper.SetDefault("debug", true)
191 viper.Set("log.level", "debug")
192 } else {
193 viper.SetDefault("debug", false)
194 viper.SetDefault("log.level", defaultLogLevel)
195 }
196}
197
198// setProviderDefaults configures LLM provider defaults based on environment variables.
199// the default model priority is:
200// 1. Anthropic
201// 2. OpenAI
202// 3. Google Gemini
203// 4. AWS Bedrock
204func setProviderDefaults() {
205 // Groq configuration
206 if apiKey := os.Getenv("GROQ_API_KEY"); apiKey != "" {
207 viper.SetDefault("providers.groq.apiKey", apiKey)
208 viper.SetDefault("agents.coder.model", models.QWENQwq)
209 viper.SetDefault("agents.task.model", models.QWENQwq)
210 viper.SetDefault("agents.title.model", models.QWENQwq)
211 }
212
213 // Google Gemini configuration
214 if apiKey := os.Getenv("GEMINI_API_KEY"); apiKey != "" {
215 viper.SetDefault("providers.gemini.apiKey", apiKey)
216 viper.SetDefault("agents.coder.model", models.Gemini25)
217 viper.SetDefault("agents.task.model", models.Gemini25Flash)
218 viper.SetDefault("agents.title.model", models.Gemini25Flash)
219 }
220
221 // OpenAI configuration
222 if apiKey := os.Getenv("OPENAI_API_KEY"); apiKey != "" {
223 viper.SetDefault("providers.openai.apiKey", apiKey)
224 viper.SetDefault("agents.coder.model", models.GPT41)
225 viper.SetDefault("agents.task.model", models.GPT41Mini)
226 viper.SetDefault("agents.title.model", models.GPT41Mini)
227
228 }
229
230 // Anthropic configuration
231 if apiKey := os.Getenv("ANTHROPIC_API_KEY"); apiKey != "" {
232 viper.SetDefault("providers.anthropic.apiKey", apiKey)
233 viper.SetDefault("agents.coder.model", models.Claude37Sonnet)
234 viper.SetDefault("agents.task.model", models.Claude37Sonnet)
235 viper.SetDefault("agents.title.model", models.Claude37Sonnet)
236 }
237
238 if hasAWSCredentials() {
239 viper.SetDefault("agents.coder.model", models.BedrockClaude37Sonnet)
240 viper.SetDefault("agents.task.model", models.BedrockClaude37Sonnet)
241 viper.SetDefault("agents.title.model", models.BedrockClaude37Sonnet)
242 }
243}
244
245// hasAWSCredentials checks if AWS credentials are available in the environment.
246func hasAWSCredentials() bool {
247 // Check for explicit AWS credentials
248 if os.Getenv("AWS_ACCESS_KEY_ID") != "" && os.Getenv("AWS_SECRET_ACCESS_KEY") != "" {
249 return true
250 }
251
252 // Check for AWS profile
253 if os.Getenv("AWS_PROFILE") != "" || os.Getenv("AWS_DEFAULT_PROFILE") != "" {
254 return true
255 }
256
257 // Check for AWS region
258 if os.Getenv("AWS_REGION") != "" || os.Getenv("AWS_DEFAULT_REGION") != "" {
259 return true
260 }
261
262 // Check if running on EC2 with instance profile
263 if os.Getenv("AWS_CONTAINER_CREDENTIALS_RELATIVE_URI") != "" ||
264 os.Getenv("AWS_CONTAINER_CREDENTIALS_FULL_URI") != "" {
265 return true
266 }
267
268 return false
269}
270
271// readConfig handles the result of reading a configuration file.
272func readConfig(err error) error {
273 if err == nil {
274 return nil
275 }
276
277 // It's okay if the config file doesn't exist
278 if _, ok := err.(viper.ConfigFileNotFoundError); ok {
279 return nil
280 }
281
282 return fmt.Errorf("failed to read config: %w", err)
283}
284
285// mergeLocalConfig loads and merges configuration from the local directory.
286func mergeLocalConfig(workingDir string) {
287 local := viper.New()
288 local.SetConfigName(fmt.Sprintf(".%s", appName))
289 local.SetConfigType("json")
290 local.AddConfigPath(workingDir)
291
292 // Merge local config if it exists
293 if err := local.ReadInConfig(); err == nil {
294 viper.MergeConfigMap(local.AllSettings())
295 }
296}
297
298// applyDefaultValues sets default values for configuration fields that need processing.
299func applyDefaultValues() {
300 // Set default MCP type if not specified
301 for k, v := range cfg.MCPServers {
302 if v.Type == "" {
303 v.Type = MCPStdio
304 cfg.MCPServers[k] = v
305 }
306 }
307}
308
309// Validate checks if the configuration is valid and applies defaults where needed.
310// It validates model IDs and providers, ensuring they are supported.
311func Validate() error {
312 if cfg == nil {
313 return fmt.Errorf("config not loaded")
314 }
315
316 // Validate agent models
317 for name, agent := range cfg.Agents {
318 // Check if model exists
319 model, modelExists := models.SupportedModels[agent.Model]
320 if !modelExists {
321 logging.Warn("unsupported model configured, reverting to default",
322 "agent", name,
323 "configured_model", agent.Model)
324
325 // Set default model based on available providers
326 if setDefaultModelForAgent(name) {
327 logging.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
328 } else {
329 return fmt.Errorf("no valid provider available for agent %s", name)
330 }
331 continue
332 }
333
334 // Check if provider for the model is configured
335 provider := model.Provider
336 providerCfg, providerExists := cfg.Providers[provider]
337
338 if !providerExists {
339 // Provider not configured, check if we have environment variables
340 apiKey := getProviderAPIKey(provider)
341 if apiKey == "" {
342 logging.Warn("provider not configured for model, reverting to default",
343 "agent", name,
344 "model", agent.Model,
345 "provider", provider)
346
347 // Set default model based on available providers
348 if setDefaultModelForAgent(name) {
349 logging.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
350 } else {
351 return fmt.Errorf("no valid provider available for agent %s", name)
352 }
353 } else {
354 // Add provider with API key from environment
355 cfg.Providers[provider] = Provider{
356 APIKey: apiKey,
357 }
358 logging.Info("added provider from environment", "provider", provider)
359 }
360 } else if providerCfg.Disabled || providerCfg.APIKey == "" {
361 // Provider is disabled or has no API key
362 logging.Warn("provider is disabled or has no API key, reverting to default",
363 "agent", name,
364 "model", agent.Model,
365 "provider", provider)
366
367 // Set default model based on available providers
368 if setDefaultModelForAgent(name) {
369 logging.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
370 } else {
371 return fmt.Errorf("no valid provider available for agent %s", name)
372 }
373 }
374
375 // Validate max tokens
376 if agent.MaxTokens <= 0 {
377 logging.Warn("invalid max tokens, setting to default",
378 "agent", name,
379 "model", agent.Model,
380 "max_tokens", agent.MaxTokens)
381
382 // Update the agent with default max tokens
383 updatedAgent := cfg.Agents[name]
384 if model.DefaultMaxTokens > 0 {
385 updatedAgent.MaxTokens = model.DefaultMaxTokens
386 } else {
387 updatedAgent.MaxTokens = 4096 // Fallback default
388 }
389 cfg.Agents[name] = updatedAgent
390 } else if model.ContextWindow > 0 && agent.MaxTokens > model.ContextWindow/2 {
391 // Ensure max tokens doesn't exceed half the context window (reasonable limit)
392 logging.Warn("max tokens exceeds half the context window, adjusting",
393 "agent", name,
394 "model", agent.Model,
395 "max_tokens", agent.MaxTokens,
396 "context_window", model.ContextWindow)
397
398 // Update the agent with adjusted max tokens
399 updatedAgent := cfg.Agents[name]
400 updatedAgent.MaxTokens = model.ContextWindow / 2
401 cfg.Agents[name] = updatedAgent
402 }
403
404 // Validate reasoning effort for models that support reasoning
405 if model.CanReason && provider == models.ProviderOpenAI {
406 if agent.ReasoningEffort == "" {
407 // Set default reasoning effort for models that support it
408 logging.Info("setting default reasoning effort for model that supports reasoning",
409 "agent", name,
410 "model", agent.Model)
411
412 // Update the agent with default reasoning effort
413 updatedAgent := cfg.Agents[name]
414 updatedAgent.ReasoningEffort = "medium"
415 cfg.Agents[name] = updatedAgent
416 } else {
417 // Check if reasoning effort is valid (low, medium, high)
418 effort := strings.ToLower(agent.ReasoningEffort)
419 if effort != "low" && effort != "medium" && effort != "high" {
420 logging.Warn("invalid reasoning effort, setting to medium",
421 "agent", name,
422 "model", agent.Model,
423 "reasoning_effort", agent.ReasoningEffort)
424
425 // Update the agent with valid reasoning effort
426 updatedAgent := cfg.Agents[name]
427 updatedAgent.ReasoningEffort = "medium"
428 cfg.Agents[name] = updatedAgent
429 }
430 }
431 } else if !model.CanReason && agent.ReasoningEffort != "" {
432 // Model doesn't support reasoning but reasoning effort is set
433 logging.Warn("model doesn't support reasoning but reasoning effort is set, ignoring",
434 "agent", name,
435 "model", agent.Model,
436 "reasoning_effort", agent.ReasoningEffort)
437
438 // Update the agent to remove reasoning effort
439 updatedAgent := cfg.Agents[name]
440 updatedAgent.ReasoningEffort = ""
441 cfg.Agents[name] = updatedAgent
442 }
443 }
444
445 // Validate providers
446 for provider, providerCfg := range cfg.Providers {
447 if providerCfg.APIKey == "" && !providerCfg.Disabled {
448 logging.Warn("provider has no API key, marking as disabled", "provider", provider)
449 providerCfg.Disabled = true
450 cfg.Providers[provider] = providerCfg
451 }
452 }
453
454 // Validate LSP configurations
455 for language, lspConfig := range cfg.LSP {
456 if lspConfig.Command == "" && !lspConfig.Disabled {
457 logging.Warn("LSP configuration has no command, marking as disabled", "language", language)
458 lspConfig.Disabled = true
459 cfg.LSP[language] = lspConfig
460 }
461 }
462
463 return nil
464}
465
466// getProviderAPIKey gets the API key for a provider from environment variables
467func getProviderAPIKey(provider models.ModelProvider) string {
468 switch provider {
469 case models.ProviderAnthropic:
470 return os.Getenv("ANTHROPIC_API_KEY")
471 case models.ProviderOpenAI:
472 return os.Getenv("OPENAI_API_KEY")
473 case models.ProviderGemini:
474 return os.Getenv("GEMINI_API_KEY")
475 case models.ProviderGROQ:
476 return os.Getenv("GROQ_API_KEY")
477 case models.ProviderBedrock:
478 if hasAWSCredentials() {
479 return "aws-credentials-available"
480 }
481 }
482 return ""
483}
484
485// setDefaultModelForAgent sets a default model for an agent based on available providers
486func setDefaultModelForAgent(agent AgentName) bool {
487 // Check providers in order of preference
488 if apiKey := os.Getenv("ANTHROPIC_API_KEY"); apiKey != "" {
489 maxTokens := int64(5000)
490 if agent == AgentTitle {
491 maxTokens = 80
492 }
493 cfg.Agents[agent] = Agent{
494 Model: models.Claude37Sonnet,
495 MaxTokens: maxTokens,
496 }
497 return true
498 }
499
500 if apiKey := os.Getenv("OPENAI_API_KEY"); apiKey != "" {
501 var model models.ModelID
502 maxTokens := int64(5000)
503 reasoningEffort := ""
504
505 switch agent {
506 case AgentTitle:
507 model = models.GPT41Mini
508 maxTokens = 80
509 case AgentTask:
510 model = models.GPT41Mini
511 default:
512 model = models.GPT41
513 }
514
515 // Check if model supports reasoning
516 if modelInfo, ok := models.SupportedModels[model]; ok && modelInfo.CanReason {
517 reasoningEffort = "medium"
518 }
519
520 cfg.Agents[agent] = Agent{
521 Model: model,
522 MaxTokens: maxTokens,
523 ReasoningEffort: reasoningEffort,
524 }
525 return true
526 }
527
528 if apiKey := os.Getenv("GEMINI_API_KEY"); apiKey != "" {
529 var model models.ModelID
530 maxTokens := int64(5000)
531
532 if agent == AgentTitle {
533 model = models.Gemini25Flash
534 maxTokens = 80
535 } else {
536 model = models.Gemini25
537 }
538
539 cfg.Agents[agent] = Agent{
540 Model: model,
541 MaxTokens: maxTokens,
542 }
543 return true
544 }
545
546 if apiKey := os.Getenv("GROQ_API_KEY"); apiKey != "" {
547 maxTokens := int64(5000)
548 if agent == AgentTitle {
549 maxTokens = 80
550 }
551
552 cfg.Agents[agent] = Agent{
553 Model: models.QWENQwq,
554 MaxTokens: maxTokens,
555 }
556 return true
557 }
558
559 if hasAWSCredentials() {
560 maxTokens := int64(5000)
561 if agent == AgentTitle {
562 maxTokens = 80
563 }
564
565 cfg.Agents[agent] = Agent{
566 Model: models.BedrockClaude37Sonnet,
567 MaxTokens: maxTokens,
568 ReasoningEffort: "medium", // Claude models support reasoning
569 }
570 return true
571 }
572
573 return false
574}
575
576// Get returns the current configuration.
577// It's safe to call this function multiple times.
578func Get() *Config {
579 return cfg
580}
581
582// WorkingDirectory returns the current working directory from the configuration.
583func WorkingDirectory() string {
584 if cfg == nil {
585 panic("config not loaded")
586 }
587 return cfg.WorkingDir
588}