1// Package config manages application configuration from various sources.
2package config
3
4import (
5 "fmt"
6 "log/slog"
7 "os"
8 "strings"
9
10 "github.com/kujtimiihoxha/opencode/internal/llm/models"
11 "github.com/kujtimiihoxha/opencode/internal/logging"
12 "github.com/spf13/viper"
13)
14
15// MCPType defines the type of MCP (Model Control Protocol) server.
16type MCPType string
17
18// Supported MCP types
19const (
20 MCPStdio MCPType = "stdio"
21 MCPSse MCPType = "sse"
22)
23
24// MCPServer defines the configuration for a Model Control Protocol server.
25type MCPServer struct {
26 Command string `json:"command"`
27 Env []string `json:"env"`
28 Args []string `json:"args"`
29 Type MCPType `json:"type"`
30 URL string `json:"url"`
31 Headers map[string]string `json:"headers"`
32}
33
34type AgentName string
35
36const (
37 AgentCoder AgentName = "coder"
38 AgentTask AgentName = "task"
39 AgentTitle AgentName = "title"
40)
41
42// Agent defines configuration for different LLM models and their token limits.
43type Agent struct {
44 Model models.ModelID `json:"model"`
45 MaxTokens int64 `json:"maxTokens"`
46 ReasoningEffort string `json:"reasoningEffort"` // For openai models low,medium,heigh
47}
48
49// Provider defines configuration for an LLM provider.
50type Provider struct {
51 APIKey string `json:"apiKey"`
52 Disabled bool `json:"disabled"`
53}
54
55// Data defines storage configuration.
56type Data struct {
57 Directory string `json:"directory"`
58}
59
60// LSPConfig defines configuration for Language Server Protocol integration.
61type LSPConfig struct {
62 Disabled bool `json:"enabled"`
63 Command string `json:"command"`
64 Args []string `json:"args"`
65 Options any `json:"options"`
66}
67
68// Config is the main configuration structure for the application.
69type Config struct {
70 Data Data `json:"data"`
71 WorkingDir string `json:"wd,omitempty"`
72 MCPServers map[string]MCPServer `json:"mcpServers,omitempty"`
73 Providers map[models.ModelProvider]Provider `json:"providers,omitempty"`
74 LSP map[string]LSPConfig `json:"lsp,omitempty"`
75 Agents map[AgentName]Agent `json:"agents"`
76 Debug bool `json:"debug,omitempty"`
77 DebugLSP bool `json:"debugLSP,omitempty"`
78}
79
80// Application constants
81const (
82 defaultDataDirectory = ".opencode"
83 defaultLogLevel = "info"
84 appName = "opencode"
85)
86
87// Global configuration instance
88var cfg *Config
89
90// Load initializes the configuration from environment variables and config files.
91// If debug is true, debug mode is enabled and log level is set to debug.
92// It returns an error if configuration loading fails.
93func Load(workingDir string, debug bool) (*Config, error) {
94 if cfg != nil {
95 return cfg, nil
96 }
97
98 cfg = &Config{
99 WorkingDir: workingDir,
100 MCPServers: make(map[string]MCPServer),
101 Providers: make(map[models.ModelProvider]Provider),
102 LSP: make(map[string]LSPConfig),
103 }
104
105 configureViper()
106 setDefaults(debug)
107 setProviderDefaults()
108
109 // Read global config
110 if err := readConfig(viper.ReadInConfig()); err != nil {
111 return cfg, err
112 }
113
114 // Load and merge local config
115 mergeLocalConfig(workingDir)
116
117 // Apply configuration to the struct
118 if err := viper.Unmarshal(cfg); err != nil {
119 return cfg, fmt.Errorf("failed to unmarshal config: %w", err)
120 }
121
122 applyDefaultValues()
123 defaultLevel := slog.LevelInfo
124 if cfg.Debug {
125 defaultLevel = slog.LevelDebug
126 }
127 if os.Getenv("OPENCODE_DEV_DEBUG") == "true" {
128 loggingFile := fmt.Sprintf("%s/%s", cfg.Data.Directory, "debug.log")
129
130 // if file does not exist create it
131 if _, err := os.Stat(loggingFile); os.IsNotExist(err) {
132 if err := os.MkdirAll(cfg.Data.Directory, 0o755); err != nil {
133 return cfg, fmt.Errorf("failed to create directory: %w", err)
134 }
135 if _, err := os.Create(loggingFile); err != nil {
136 return cfg, fmt.Errorf("failed to create log file: %w", err)
137 }
138 }
139
140 sloggingFileWriter, err := os.OpenFile(loggingFile, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0o666)
141 if err != nil {
142 return cfg, fmt.Errorf("failed to open log file: %w", err)
143 }
144 // Configure logger
145 logger := slog.New(slog.NewTextHandler(sloggingFileWriter, &slog.HandlerOptions{
146 Level: defaultLevel,
147 }))
148 slog.SetDefault(logger)
149 } else {
150 // Configure logger
151 logger := slog.New(slog.NewTextHandler(logging.NewWriter(), &slog.HandlerOptions{
152 Level: defaultLevel,
153 }))
154 slog.SetDefault(logger)
155 }
156
157 // Validate configuration
158 if err := Validate(); err != nil {
159 return cfg, fmt.Errorf("config validation failed: %w", err)
160 }
161
162 if cfg.Agents == nil {
163 cfg.Agents = make(map[AgentName]Agent)
164 }
165
166 // Override the max tokens for title agent
167 cfg.Agents[AgentTitle] = Agent{
168 Model: cfg.Agents[AgentTitle].Model,
169 MaxTokens: 80,
170 }
171 return cfg, nil
172}
173
174// configureViper sets up viper's configuration paths and environment variables.
175func configureViper() {
176 viper.SetConfigName(fmt.Sprintf(".%s", appName))
177 viper.SetConfigType("json")
178 viper.AddConfigPath("$HOME")
179 viper.AddConfigPath(fmt.Sprintf("$XDG_CONFIG_HOME/%s", appName))
180 viper.SetEnvPrefix(strings.ToUpper(appName))
181 viper.AutomaticEnv()
182}
183
184// setDefaults configures default values for configuration options.
185func setDefaults(debug bool) {
186 viper.SetDefault("data.directory", defaultDataDirectory)
187
188 if debug {
189 viper.SetDefault("debug", true)
190 viper.Set("log.level", "debug")
191 } else {
192 viper.SetDefault("debug", false)
193 viper.SetDefault("log.level", defaultLogLevel)
194 }
195}
196
197// setProviderDefaults configures LLM provider defaults based on environment variables.
198// the default model priority is:
199// 1. Anthropic
200// 2. OpenAI
201// 3. Google Gemini
202// 4. AWS Bedrock
203func setProviderDefaults() {
204 // Groq configuration
205 if apiKey := os.Getenv("GROQ_API_KEY"); apiKey != "" {
206 viper.SetDefault("providers.groq.apiKey", apiKey)
207 viper.SetDefault("agents.coder.model", models.QWENQwq)
208 viper.SetDefault("agents.task.model", models.QWENQwq)
209 viper.SetDefault("agents.title.model", models.QWENQwq)
210 }
211
212 // Google Gemini configuration
213 if apiKey := os.Getenv("GEMINI_API_KEY"); apiKey != "" {
214 viper.SetDefault("providers.gemini.apiKey", apiKey)
215 viper.SetDefault("agents.coder.model", models.Gemini25)
216 viper.SetDefault("agents.task.model", models.Gemini25Flash)
217 viper.SetDefault("agents.title.model", models.Gemini25Flash)
218 }
219
220 // OpenAI configuration
221 if apiKey := os.Getenv("OPENAI_API_KEY"); apiKey != "" {
222 viper.SetDefault("providers.openai.apiKey", apiKey)
223 viper.SetDefault("agents.coder.model", models.GPT41)
224 viper.SetDefault("agents.task.model", models.GPT41Mini)
225 viper.SetDefault("agents.title.model", models.GPT41Mini)
226
227 }
228
229 // Anthropic configuration
230 if apiKey := os.Getenv("ANTHROPIC_API_KEY"); apiKey != "" {
231 viper.SetDefault("providers.anthropic.apiKey", apiKey)
232 viper.SetDefault("agents.coder.model", models.Claude37Sonnet)
233 viper.SetDefault("agents.task.model", models.Claude37Sonnet)
234 viper.SetDefault("agents.title.model", models.Claude37Sonnet)
235 }
236
237 if hasAWSCredentials() {
238 viper.SetDefault("agents.coder.model", models.BedrockClaude37Sonnet)
239 viper.SetDefault("agents.task.model", models.BedrockClaude37Sonnet)
240 viper.SetDefault("agents.title.model", models.BedrockClaude37Sonnet)
241 }
242}
243
244// hasAWSCredentials checks if AWS credentials are available in the environment.
245func hasAWSCredentials() bool {
246 // Check for explicit AWS credentials
247 if os.Getenv("AWS_ACCESS_KEY_ID") != "" && os.Getenv("AWS_SECRET_ACCESS_KEY") != "" {
248 return true
249 }
250
251 // Check for AWS profile
252 if os.Getenv("AWS_PROFILE") != "" || os.Getenv("AWS_DEFAULT_PROFILE") != "" {
253 return true
254 }
255
256 // Check for AWS region
257 if os.Getenv("AWS_REGION") != "" || os.Getenv("AWS_DEFAULT_REGION") != "" {
258 return true
259 }
260
261 // Check if running on EC2 with instance profile
262 if os.Getenv("AWS_CONTAINER_CREDENTIALS_RELATIVE_URI") != "" ||
263 os.Getenv("AWS_CONTAINER_CREDENTIALS_FULL_URI") != "" {
264 return true
265 }
266
267 return false
268}
269
270// readConfig handles the result of reading a configuration file.
271func readConfig(err error) error {
272 if err == nil {
273 return nil
274 }
275
276 // It's okay if the config file doesn't exist
277 if _, ok := err.(viper.ConfigFileNotFoundError); ok {
278 return nil
279 }
280
281 return fmt.Errorf("failed to read config: %w", err)
282}
283
284// mergeLocalConfig loads and merges configuration from the local directory.
285func mergeLocalConfig(workingDir string) {
286 local := viper.New()
287 local.SetConfigName(fmt.Sprintf(".%s", appName))
288 local.SetConfigType("json")
289 local.AddConfigPath(workingDir)
290
291 // Merge local config if it exists
292 if err := local.ReadInConfig(); err == nil {
293 viper.MergeConfigMap(local.AllSettings())
294 }
295}
296
297// applyDefaultValues sets default values for configuration fields that need processing.
298func applyDefaultValues() {
299 // Set default MCP type if not specified
300 for k, v := range cfg.MCPServers {
301 if v.Type == "" {
302 v.Type = MCPStdio
303 cfg.MCPServers[k] = v
304 }
305 }
306}
307
308// Validate checks if the configuration is valid and applies defaults where needed.
309// It validates model IDs and providers, ensuring they are supported.
310func Validate() error {
311 if cfg == nil {
312 return fmt.Errorf("config not loaded")
313 }
314
315 // Validate agent models
316 for name, agent := range cfg.Agents {
317 // Check if model exists
318 model, modelExists := models.SupportedModels[agent.Model]
319 if !modelExists {
320 logging.Warn("unsupported model configured, reverting to default",
321 "agent", name,
322 "configured_model", agent.Model)
323
324 // Set default model based on available providers
325 if setDefaultModelForAgent(name) {
326 logging.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
327 } else {
328 return fmt.Errorf("no valid provider available for agent %s", name)
329 }
330 continue
331 }
332
333 // Check if provider for the model is configured
334 provider := model.Provider
335 providerCfg, providerExists := cfg.Providers[provider]
336
337 if !providerExists {
338 // Provider not configured, check if we have environment variables
339 apiKey := getProviderAPIKey(provider)
340 if apiKey == "" {
341 logging.Warn("provider not configured for model, reverting to default",
342 "agent", name,
343 "model", agent.Model,
344 "provider", provider)
345
346 // Set default model based on available providers
347 if setDefaultModelForAgent(name) {
348 logging.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
349 } else {
350 return fmt.Errorf("no valid provider available for agent %s", name)
351 }
352 } else {
353 // Add provider with API key from environment
354 cfg.Providers[provider] = Provider{
355 APIKey: apiKey,
356 }
357 logging.Info("added provider from environment", "provider", provider)
358 }
359 } else if providerCfg.Disabled || providerCfg.APIKey == "" {
360 // Provider is disabled or has no API key
361 logging.Warn("provider is disabled or has no API key, reverting to default",
362 "agent", name,
363 "model", agent.Model,
364 "provider", provider)
365
366 // Set default model based on available providers
367 if setDefaultModelForAgent(name) {
368 logging.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
369 } else {
370 return fmt.Errorf("no valid provider available for agent %s", name)
371 }
372 }
373
374 // Validate max tokens
375 if agent.MaxTokens <= 0 {
376 logging.Warn("invalid max tokens, setting to default",
377 "agent", name,
378 "model", agent.Model,
379 "max_tokens", agent.MaxTokens)
380
381 // Update the agent with default max tokens
382 updatedAgent := cfg.Agents[name]
383 if model.DefaultMaxTokens > 0 {
384 updatedAgent.MaxTokens = model.DefaultMaxTokens
385 } else {
386 updatedAgent.MaxTokens = 4096 // Fallback default
387 }
388 cfg.Agents[name] = updatedAgent
389 } else if model.ContextWindow > 0 && agent.MaxTokens > model.ContextWindow/2 {
390 // Ensure max tokens doesn't exceed half the context window (reasonable limit)
391 logging.Warn("max tokens exceeds half the context window, adjusting",
392 "agent", name,
393 "model", agent.Model,
394 "max_tokens", agent.MaxTokens,
395 "context_window", model.ContextWindow)
396
397 // Update the agent with adjusted max tokens
398 updatedAgent := cfg.Agents[name]
399 updatedAgent.MaxTokens = model.ContextWindow / 2
400 cfg.Agents[name] = updatedAgent
401 }
402
403 // Validate reasoning effort for models that support reasoning
404 if model.CanReason && provider == models.ProviderOpenAI {
405 if agent.ReasoningEffort == "" {
406 // Set default reasoning effort for models that support it
407 logging.Info("setting default reasoning effort for model that supports reasoning",
408 "agent", name,
409 "model", agent.Model)
410
411 // Update the agent with default reasoning effort
412 updatedAgent := cfg.Agents[name]
413 updatedAgent.ReasoningEffort = "medium"
414 cfg.Agents[name] = updatedAgent
415 } else {
416 // Check if reasoning effort is valid (low, medium, high)
417 effort := strings.ToLower(agent.ReasoningEffort)
418 if effort != "low" && effort != "medium" && effort != "high" {
419 logging.Warn("invalid reasoning effort, setting to medium",
420 "agent", name,
421 "model", agent.Model,
422 "reasoning_effort", agent.ReasoningEffort)
423
424 // Update the agent with valid reasoning effort
425 updatedAgent := cfg.Agents[name]
426 updatedAgent.ReasoningEffort = "medium"
427 cfg.Agents[name] = updatedAgent
428 }
429 }
430 } else if !model.CanReason && agent.ReasoningEffort != "" {
431 // Model doesn't support reasoning but reasoning effort is set
432 logging.Warn("model doesn't support reasoning but reasoning effort is set, ignoring",
433 "agent", name,
434 "model", agent.Model,
435 "reasoning_effort", agent.ReasoningEffort)
436
437 // Update the agent to remove reasoning effort
438 updatedAgent := cfg.Agents[name]
439 updatedAgent.ReasoningEffort = ""
440 cfg.Agents[name] = updatedAgent
441 }
442 }
443
444 // Validate providers
445 for provider, providerCfg := range cfg.Providers {
446 if providerCfg.APIKey == "" && !providerCfg.Disabled {
447 logging.Warn("provider has no API key, marking as disabled", "provider", provider)
448 providerCfg.Disabled = true
449 cfg.Providers[provider] = providerCfg
450 }
451 }
452
453 // Validate LSP configurations
454 for language, lspConfig := range cfg.LSP {
455 if lspConfig.Command == "" && !lspConfig.Disabled {
456 logging.Warn("LSP configuration has no command, marking as disabled", "language", language)
457 lspConfig.Disabled = true
458 cfg.LSP[language] = lspConfig
459 }
460 }
461
462 return nil
463}
464
465// getProviderAPIKey gets the API key for a provider from environment variables
466func getProviderAPIKey(provider models.ModelProvider) string {
467 switch provider {
468 case models.ProviderAnthropic:
469 return os.Getenv("ANTHROPIC_API_KEY")
470 case models.ProviderOpenAI:
471 return os.Getenv("OPENAI_API_KEY")
472 case models.ProviderGemini:
473 return os.Getenv("GEMINI_API_KEY")
474 case models.ProviderGROQ:
475 return os.Getenv("GROQ_API_KEY")
476 case models.ProviderBedrock:
477 if hasAWSCredentials() {
478 return "aws-credentials-available"
479 }
480 }
481 return ""
482}
483
484// setDefaultModelForAgent sets a default model for an agent based on available providers
485func setDefaultModelForAgent(agent AgentName) bool {
486 // Check providers in order of preference
487 if apiKey := os.Getenv("ANTHROPIC_API_KEY"); apiKey != "" {
488 maxTokens := int64(5000)
489 if agent == AgentTitle {
490 maxTokens = 80
491 }
492 cfg.Agents[agent] = Agent{
493 Model: models.Claude37Sonnet,
494 MaxTokens: maxTokens,
495 }
496 return true
497 }
498
499 if apiKey := os.Getenv("OPENAI_API_KEY"); apiKey != "" {
500 var model models.ModelID
501 maxTokens := int64(5000)
502 reasoningEffort := ""
503
504 switch agent {
505 case AgentTitle:
506 model = models.GPT41Mini
507 maxTokens = 80
508 case AgentTask:
509 model = models.GPT41Mini
510 default:
511 model = models.GPT41
512 }
513
514 // Check if model supports reasoning
515 if modelInfo, ok := models.SupportedModels[model]; ok && modelInfo.CanReason {
516 reasoningEffort = "medium"
517 }
518
519 cfg.Agents[agent] = Agent{
520 Model: model,
521 MaxTokens: maxTokens,
522 ReasoningEffort: reasoningEffort,
523 }
524 return true
525 }
526
527 if apiKey := os.Getenv("GEMINI_API_KEY"); apiKey != "" {
528 var model models.ModelID
529 maxTokens := int64(5000)
530
531 if agent == AgentTitle {
532 model = models.Gemini25Flash
533 maxTokens = 80
534 } else {
535 model = models.Gemini25
536 }
537
538 cfg.Agents[agent] = Agent{
539 Model: model,
540 MaxTokens: maxTokens,
541 }
542 return true
543 }
544
545 if apiKey := os.Getenv("GROQ_API_KEY"); apiKey != "" {
546 maxTokens := int64(5000)
547 if agent == AgentTitle {
548 maxTokens = 80
549 }
550
551 cfg.Agents[agent] = Agent{
552 Model: models.QWENQwq,
553 MaxTokens: maxTokens,
554 }
555 return true
556 }
557
558 if hasAWSCredentials() {
559 maxTokens := int64(5000)
560 if agent == AgentTitle {
561 maxTokens = 80
562 }
563
564 cfg.Agents[agent] = Agent{
565 Model: models.BedrockClaude37Sonnet,
566 MaxTokens: maxTokens,
567 ReasoningEffort: "medium", // Claude models support reasoning
568 }
569 return true
570 }
571
572 return false
573}
574
575// Get returns the current configuration.
576// It's safe to call this function multiple times.
577func Get() *Config {
578 return cfg
579}
580
581// WorkingDirectory returns the current working directory from the configuration.
582func WorkingDirectory() string {
583 if cfg == nil {
584 panic("config not loaded")
585 }
586 return cfg.WorkingDir
587}