1// Package config manages application configuration from various sources.
2package config
3
4import (
5 "encoding/json"
6 "fmt"
7 "log/slog"
8 "os"
9 "path/filepath"
10 "strings"
11
12 "github.com/opencode-ai/opencode/internal/llm/models"
13 "github.com/opencode-ai/opencode/internal/logging"
14 "github.com/spf13/viper"
15)
16
17// MCPType defines the type of MCP (Model Control Protocol) server.
18type MCPType string
19
20// Supported MCP types
21const (
22 MCPStdio MCPType = "stdio"
23 MCPSse MCPType = "sse"
24)
25
26// MCPServer defines the configuration for a Model Control Protocol server.
27type MCPServer struct {
28 Command string `json:"command"`
29 Env []string `json:"env"`
30 Args []string `json:"args"`
31 Type MCPType `json:"type"`
32 URL string `json:"url"`
33 Headers map[string]string `json:"headers"`
34}
35
36type AgentName string
37
38const (
39 AgentCoder AgentName = "coder"
40 AgentSummarizer AgentName = "summarizer"
41 AgentTask AgentName = "task"
42 AgentTitle AgentName = "title"
43)
44
45// Agent defines configuration for different LLM models and their token limits.
46type Agent struct {
47 Model models.ModelID `json:"model"`
48 MaxTokens int64 `json:"maxTokens"`
49 ReasoningEffort string `json:"reasoningEffort"` // For openai models low,medium,heigh
50}
51
52// Provider defines configuration for an LLM provider.
53type Provider struct {
54 APIKey string `json:"apiKey"`
55 Disabled bool `json:"disabled"`
56}
57
58// Data defines storage configuration.
59type Data struct {
60 Directory string `json:"directory,omitempty"`
61}
62
63// LSPConfig defines configuration for Language Server Protocol integration.
64type LSPConfig struct {
65 Disabled bool `json:"enabled"`
66 Command string `json:"command"`
67 Args []string `json:"args"`
68 Options any `json:"options"`
69}
70
71// TUIConfig defines the configuration for the Terminal User Interface.
72type TUIConfig struct {
73 Theme string `json:"theme,omitempty"`
74}
75
76// ShellConfig defines the configuration for the shell used by the bash tool.
77type ShellConfig struct {
78 Path string `json:"path,omitempty"`
79 Args []string `json:"args,omitempty"`
80}
81
82// Config is the main configuration structure for the application.
83type Config struct {
84 Data Data `json:"data"`
85 WorkingDir string `json:"wd,omitempty"`
86 MCPServers map[string]MCPServer `json:"mcpServers,omitempty"`
87 Providers map[models.ModelProvider]Provider `json:"providers,omitempty"`
88 LSP map[string]LSPConfig `json:"lsp,omitempty"`
89 Agents map[AgentName]Agent `json:"agents,omitempty"`
90 Debug bool `json:"debug,omitempty"`
91 DebugLSP bool `json:"debugLSP,omitempty"`
92 ContextPaths []string `json:"contextPaths,omitempty"`
93 TUI TUIConfig `json:"tui"`
94 Shell ShellConfig `json:"shell,omitempty"`
95 AutoCompact bool `json:"autoCompact,omitempty"`
96}
97
98// Application constants
99const (
100 defaultDataDirectory = ".opencode"
101 defaultLogLevel = "info"
102 appName = "opencode"
103
104 MaxTokensFallbackDefault = 4096
105)
106
107var defaultContextPaths = []string{
108 ".github/copilot-instructions.md",
109 ".cursorrules",
110 ".cursor/rules/",
111 "CLAUDE.md",
112 "CLAUDE.local.md",
113 "opencode.md",
114 "opencode.local.md",
115 "OpenCode.md",
116 "OpenCode.local.md",
117 "OPENCODE.md",
118 "OPENCODE.local.md",
119}
120
121// Global configuration instance
122var cfg *Config
123
124// Load initializes the configuration from environment variables and config files.
125// If debug is true, debug mode is enabled and log level is set to debug.
126// It returns an error if configuration loading fails.
127func Load(workingDir string, debug bool) (*Config, error) {
128 if cfg != nil {
129 return cfg, nil
130 }
131
132 cfg = &Config{
133 WorkingDir: workingDir,
134 MCPServers: make(map[string]MCPServer),
135 Providers: make(map[models.ModelProvider]Provider),
136 LSP: make(map[string]LSPConfig),
137 }
138
139 configureViper()
140 setDefaults(debug)
141
142 // Read global config
143 if err := readConfig(viper.ReadInConfig()); err != nil {
144 return cfg, err
145 }
146
147 // Load and merge local config
148 mergeLocalConfig(workingDir)
149
150 setProviderDefaults()
151
152 // Apply configuration to the struct
153 if err := viper.Unmarshal(cfg); err != nil {
154 return cfg, fmt.Errorf("failed to unmarshal config: %w", err)
155 }
156
157 applyDefaultValues()
158 defaultLevel := slog.LevelInfo
159 if cfg.Debug {
160 defaultLevel = slog.LevelDebug
161 }
162 if os.Getenv("OPENCODE_DEV_DEBUG") == "true" {
163 loggingFile := fmt.Sprintf("%s/%s", cfg.Data.Directory, "debug.log")
164
165 // if file does not exist create it
166 if _, err := os.Stat(loggingFile); os.IsNotExist(err) {
167 if err := os.MkdirAll(cfg.Data.Directory, 0o755); err != nil {
168 return cfg, fmt.Errorf("failed to create directory: %w", err)
169 }
170 if _, err := os.Create(loggingFile); err != nil {
171 return cfg, fmt.Errorf("failed to create log file: %w", err)
172 }
173 }
174
175 sloggingFileWriter, err := os.OpenFile(loggingFile, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0o666)
176 if err != nil {
177 return cfg, fmt.Errorf("failed to open log file: %w", err)
178 }
179 // Configure logger
180 logger := slog.New(slog.NewTextHandler(sloggingFileWriter, &slog.HandlerOptions{
181 Level: defaultLevel,
182 }))
183 slog.SetDefault(logger)
184 } else {
185 // Configure logger
186 logger := slog.New(slog.NewTextHandler(logging.NewWriter(), &slog.HandlerOptions{
187 Level: defaultLevel,
188 }))
189 slog.SetDefault(logger)
190 }
191
192 // Validate configuration
193 if err := Validate(); err != nil {
194 return cfg, fmt.Errorf("config validation failed: %w", err)
195 }
196
197 if cfg.Agents == nil {
198 cfg.Agents = make(map[AgentName]Agent)
199 }
200
201 // Override the max tokens for title agent
202 cfg.Agents[AgentTitle] = Agent{
203 Model: cfg.Agents[AgentTitle].Model,
204 MaxTokens: 80,
205 }
206 return cfg, nil
207}
208
209// configureViper sets up viper's configuration paths and environment variables.
210func configureViper() {
211 viper.SetConfigName(fmt.Sprintf(".%s", appName))
212 viper.SetConfigType("json")
213 viper.AddConfigPath("$HOME")
214 viper.AddConfigPath(fmt.Sprintf("$XDG_CONFIG_HOME/%s", appName))
215 viper.AddConfigPath(fmt.Sprintf("$HOME/.config/%s", appName))
216 viper.SetEnvPrefix(strings.ToUpper(appName))
217 viper.AutomaticEnv()
218}
219
220// setDefaults configures default values for configuration options.
221func setDefaults(debug bool) {
222 viper.SetDefault("data.directory", defaultDataDirectory)
223 viper.SetDefault("contextPaths", defaultContextPaths)
224 viper.SetDefault("tui.theme", "opencode")
225 viper.SetDefault("autoCompact", true)
226
227 // Set default shell from environment or fallback to /bin/bash
228 shellPath := os.Getenv("SHELL")
229 if shellPath == "" {
230 shellPath = "/bin/bash"
231 }
232 viper.SetDefault("shell.path", shellPath)
233 viper.SetDefault("shell.args", []string{"-l"})
234
235 if debug {
236 viper.SetDefault("debug", true)
237 viper.Set("log.level", "debug")
238 } else {
239 viper.SetDefault("debug", false)
240 viper.SetDefault("log.level", defaultLogLevel)
241 }
242}
243
244// setProviderDefaults configures LLM provider defaults based on provider provided by
245// environment variables and configuration file.
246func setProviderDefaults() {
247 // Set all API keys we can find in the environment
248 if apiKey := os.Getenv("ANTHROPIC_API_KEY"); apiKey != "" {
249 viper.SetDefault("providers.anthropic.apiKey", apiKey)
250 }
251 if apiKey := os.Getenv("OPENAI_API_KEY"); apiKey != "" {
252 viper.SetDefault("providers.openai.apiKey", apiKey)
253 }
254 if apiKey := os.Getenv("GEMINI_API_KEY"); apiKey != "" {
255 viper.SetDefault("providers.gemini.apiKey", apiKey)
256 }
257 if apiKey := os.Getenv("GROQ_API_KEY"); apiKey != "" {
258 viper.SetDefault("providers.groq.apiKey", apiKey)
259 }
260 if apiKey := os.Getenv("OPENROUTER_API_KEY"); apiKey != "" {
261 viper.SetDefault("providers.openrouter.apiKey", apiKey)
262 }
263 if apiKey := os.Getenv("XAI_API_KEY"); apiKey != "" {
264 viper.SetDefault("providers.xai.apiKey", apiKey)
265 }
266 if apiKey := os.Getenv("AZURE_OPENAI_ENDPOINT"); apiKey != "" {
267 // api-key may be empty when using Entra ID credentials – that's okay
268 viper.SetDefault("providers.azure.apiKey", os.Getenv("AZURE_OPENAI_API_KEY"))
269 }
270
271 // Use this order to set the default models
272 // 1. Anthropic
273 // 2. OpenAI
274 // 3. Google Gemini
275 // 4. Groq
276 // 5. OpenRouter
277 // 6. AWS Bedrock
278 // 7. Azure
279
280 // Anthropic configuration
281 if key := viper.GetString("providers.anthropic.apiKey"); strings.TrimSpace(key) != "" {
282 viper.SetDefault("agents.coder.model", models.Claude37Sonnet)
283 viper.SetDefault("agents.summarizer.model", models.Claude37Sonnet)
284 viper.SetDefault("agents.task.model", models.Claude37Sonnet)
285 viper.SetDefault("agents.title.model", models.Claude37Sonnet)
286 return
287 }
288
289 // OpenAI configuration
290 if key := viper.GetString("providers.openai.apiKey"); strings.TrimSpace(key) != "" {
291 viper.SetDefault("agents.coder.model", models.GPT41)
292 viper.SetDefault("agents.summarizer.model", models.GPT41)
293 viper.SetDefault("agents.task.model", models.GPT41Mini)
294 viper.SetDefault("agents.title.model", models.GPT41Mini)
295 return
296 }
297
298 // Google Gemini configuration
299 if key := viper.GetString("providers.gemini.apiKey"); strings.TrimSpace(key) != "" {
300 viper.SetDefault("agents.coder.model", models.Gemini25)
301 viper.SetDefault("agents.summarizer.model", models.Gemini25)
302 viper.SetDefault("agents.task.model", models.Gemini25Flash)
303 viper.SetDefault("agents.title.model", models.Gemini25Flash)
304 return
305 }
306
307 // Groq configuration
308 if key := viper.GetString("providers.groq.apiKey"); strings.TrimSpace(key) != "" {
309 viper.SetDefault("agents.coder.model", models.QWENQwq)
310 viper.SetDefault("agents.summarizer.model", models.QWENQwq)
311 viper.SetDefault("agents.task.model", models.QWENQwq)
312 viper.SetDefault("agents.title.model", models.QWENQwq)
313 return
314 }
315
316 // OpenRouter configuration
317 if key := viper.GetString("providers.openrouter.apiKey"); strings.TrimSpace(key) != "" {
318 viper.SetDefault("agents.coder.model", models.OpenRouterClaude37Sonnet)
319 viper.SetDefault("agents.summarizer.model", models.OpenRouterClaude37Sonnet)
320 viper.SetDefault("agents.task.model", models.OpenRouterClaude37Sonnet)
321 viper.SetDefault("agents.title.model", models.OpenRouterClaude35Haiku)
322 return
323 }
324
325 // XAI configuration
326 if key := viper.GetString("providers.xai.apiKey"); strings.TrimSpace(key) != "" {
327 viper.SetDefault("agents.coder.model", models.XAIGrok3Beta)
328 viper.SetDefault("agents.summarizer.model", models.XAIGrok3Beta)
329 viper.SetDefault("agents.task.model", models.XAIGrok3Beta)
330 viper.SetDefault("agents.title.model", models.XAiGrok3MiniFastBeta)
331 return
332 }
333
334 // AWS Bedrock configuration
335 if hasAWSCredentials() {
336 viper.SetDefault("agents.coder.model", models.BedrockClaude37Sonnet)
337 viper.SetDefault("agents.summarizer.model", models.BedrockClaude37Sonnet)
338 viper.SetDefault("agents.task.model", models.BedrockClaude37Sonnet)
339 viper.SetDefault("agents.title.model", models.BedrockClaude37Sonnet)
340 return
341 }
342
343 // Azure OpenAI configuration
344 if os.Getenv("AZURE_OPENAI_ENDPOINT") != "" {
345 viper.SetDefault("agents.coder.model", models.AzureGPT41)
346 viper.SetDefault("agents.summarizer.model", models.AzureGPT41)
347 viper.SetDefault("agents.task.model", models.AzureGPT41Mini)
348 viper.SetDefault("agents.title.model", models.AzureGPT41Mini)
349 return
350 }
351}
352
353// hasAWSCredentials checks if AWS credentials are available in the environment.
354func hasAWSCredentials() bool {
355 // Check for explicit AWS credentials
356 if os.Getenv("AWS_ACCESS_KEY_ID") != "" && os.Getenv("AWS_SECRET_ACCESS_KEY") != "" {
357 return true
358 }
359
360 // Check for AWS profile
361 if os.Getenv("AWS_PROFILE") != "" || os.Getenv("AWS_DEFAULT_PROFILE") != "" {
362 return true
363 }
364
365 // Check for AWS region
366 if os.Getenv("AWS_REGION") != "" || os.Getenv("AWS_DEFAULT_REGION") != "" {
367 return true
368 }
369
370 // Check if running on EC2 with instance profile
371 if os.Getenv("AWS_CONTAINER_CREDENTIALS_RELATIVE_URI") != "" ||
372 os.Getenv("AWS_CONTAINER_CREDENTIALS_FULL_URI") != "" {
373 return true
374 }
375
376 return false
377}
378
379// readConfig handles the result of reading a configuration file.
380func readConfig(err error) error {
381 if err == nil {
382 return nil
383 }
384
385 // It's okay if the config file doesn't exist
386 if _, ok := err.(viper.ConfigFileNotFoundError); ok {
387 return nil
388 }
389
390 return fmt.Errorf("failed to read config: %w", err)
391}
392
393// mergeLocalConfig loads and merges configuration from the local directory.
394func mergeLocalConfig(workingDir string) {
395 local := viper.New()
396 local.SetConfigName(fmt.Sprintf(".%s", appName))
397 local.SetConfigType("json")
398 local.AddConfigPath(workingDir)
399
400 // Merge local config if it exists
401 if err := local.ReadInConfig(); err == nil {
402 viper.MergeConfigMap(local.AllSettings())
403 }
404}
405
406// applyDefaultValues sets default values for configuration fields that need processing.
407func applyDefaultValues() {
408 // Set default MCP type if not specified
409 for k, v := range cfg.MCPServers {
410 if v.Type == "" {
411 v.Type = MCPStdio
412 cfg.MCPServers[k] = v
413 }
414 }
415}
416
417// It validates model IDs and providers, ensuring they are supported.
418func validateAgent(cfg *Config, name AgentName, agent Agent) error {
419 // Check if model exists
420 model, modelExists := models.SupportedModels[agent.Model]
421 if !modelExists {
422 logging.Warn("unsupported model configured, reverting to default",
423 "agent", name,
424 "configured_model", agent.Model)
425
426 // Set default model based on available providers
427 if setDefaultModelForAgent(name) {
428 logging.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
429 } else {
430 return fmt.Errorf("no valid provider available for agent %s", name)
431 }
432 return nil
433 }
434
435 // Check if provider for the model is configured
436 provider := model.Provider
437 providerCfg, providerExists := cfg.Providers[provider]
438
439 if !providerExists {
440 // Provider not configured, check if we have environment variables
441 apiKey := getProviderAPIKey(provider)
442 if apiKey == "" {
443 logging.Warn("provider not configured for model, reverting to default",
444 "agent", name,
445 "model", agent.Model,
446 "provider", provider)
447
448 // Set default model based on available providers
449 if setDefaultModelForAgent(name) {
450 logging.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
451 } else {
452 return fmt.Errorf("no valid provider available for agent %s", name)
453 }
454 } else {
455 // Add provider with API key from environment
456 cfg.Providers[provider] = Provider{
457 APIKey: apiKey,
458 }
459 logging.Info("added provider from environment", "provider", provider)
460 }
461 } else if providerCfg.Disabled || providerCfg.APIKey == "" {
462 // Provider is disabled or has no API key
463 logging.Warn("provider is disabled or has no API key, reverting to default",
464 "agent", name,
465 "model", agent.Model,
466 "provider", provider)
467
468 // Set default model based on available providers
469 if setDefaultModelForAgent(name) {
470 logging.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
471 } else {
472 return fmt.Errorf("no valid provider available for agent %s", name)
473 }
474 }
475
476 // Validate max tokens
477 if agent.MaxTokens <= 0 {
478 logging.Warn("invalid max tokens, setting to default",
479 "agent", name,
480 "model", agent.Model,
481 "max_tokens", agent.MaxTokens)
482
483 // Update the agent with default max tokens
484 updatedAgent := cfg.Agents[name]
485 if model.DefaultMaxTokens > 0 {
486 updatedAgent.MaxTokens = model.DefaultMaxTokens
487 } else {
488 updatedAgent.MaxTokens = MaxTokensFallbackDefault
489 }
490 cfg.Agents[name] = updatedAgent
491 } else if model.ContextWindow > 0 && agent.MaxTokens > model.ContextWindow/2 {
492 // Ensure max tokens doesn't exceed half the context window (reasonable limit)
493 logging.Warn("max tokens exceeds half the context window, adjusting",
494 "agent", name,
495 "model", agent.Model,
496 "max_tokens", agent.MaxTokens,
497 "context_window", model.ContextWindow)
498
499 // Update the agent with adjusted max tokens
500 updatedAgent := cfg.Agents[name]
501 updatedAgent.MaxTokens = model.ContextWindow / 2
502 cfg.Agents[name] = updatedAgent
503 }
504
505 // Validate reasoning effort for models that support reasoning
506 if model.CanReason && provider == models.ProviderOpenAI {
507 if agent.ReasoningEffort == "" {
508 // Set default reasoning effort for models that support it
509 logging.Info("setting default reasoning effort for model that supports reasoning",
510 "agent", name,
511 "model", agent.Model)
512
513 // Update the agent with default reasoning effort
514 updatedAgent := cfg.Agents[name]
515 updatedAgent.ReasoningEffort = "medium"
516 cfg.Agents[name] = updatedAgent
517 } else {
518 // Check if reasoning effort is valid (low, medium, high)
519 effort := strings.ToLower(agent.ReasoningEffort)
520 if effort != "low" && effort != "medium" && effort != "high" {
521 logging.Warn("invalid reasoning effort, setting to medium",
522 "agent", name,
523 "model", agent.Model,
524 "reasoning_effort", agent.ReasoningEffort)
525
526 // Update the agent with valid reasoning effort
527 updatedAgent := cfg.Agents[name]
528 updatedAgent.ReasoningEffort = "medium"
529 cfg.Agents[name] = updatedAgent
530 }
531 }
532 } else if !model.CanReason && agent.ReasoningEffort != "" {
533 // Model doesn't support reasoning but reasoning effort is set
534 logging.Warn("model doesn't support reasoning but reasoning effort is set, ignoring",
535 "agent", name,
536 "model", agent.Model,
537 "reasoning_effort", agent.ReasoningEffort)
538
539 // Update the agent to remove reasoning effort
540 updatedAgent := cfg.Agents[name]
541 updatedAgent.ReasoningEffort = ""
542 cfg.Agents[name] = updatedAgent
543 }
544
545 return nil
546}
547
548// Validate checks if the configuration is valid and applies defaults where needed.
549func Validate() error {
550 if cfg == nil {
551 return fmt.Errorf("config not loaded")
552 }
553
554 // Validate agent models
555 for name, agent := range cfg.Agents {
556 if err := validateAgent(cfg, name, agent); err != nil {
557 return err
558 }
559 }
560
561 // Validate providers
562 for provider, providerCfg := range cfg.Providers {
563 if providerCfg.APIKey == "" && !providerCfg.Disabled {
564 logging.Warn("provider has no API key, marking as disabled", "provider", provider)
565 providerCfg.Disabled = true
566 cfg.Providers[provider] = providerCfg
567 }
568 }
569
570 // Validate LSP configurations
571 for language, lspConfig := range cfg.LSP {
572 if lspConfig.Command == "" && !lspConfig.Disabled {
573 logging.Warn("LSP configuration has no command, marking as disabled", "language", language)
574 lspConfig.Disabled = true
575 cfg.LSP[language] = lspConfig
576 }
577 }
578
579 return nil
580}
581
582// getProviderAPIKey gets the API key for a provider from environment variables
583func getProviderAPIKey(provider models.ModelProvider) string {
584 switch provider {
585 case models.ProviderAnthropic:
586 return os.Getenv("ANTHROPIC_API_KEY")
587 case models.ProviderOpenAI:
588 return os.Getenv("OPENAI_API_KEY")
589 case models.ProviderGemini:
590 return os.Getenv("GEMINI_API_KEY")
591 case models.ProviderGROQ:
592 return os.Getenv("GROQ_API_KEY")
593 case models.ProviderAzure:
594 return os.Getenv("AZURE_OPENAI_API_KEY")
595 case models.ProviderOpenRouter:
596 return os.Getenv("OPENROUTER_API_KEY")
597 case models.ProviderBedrock:
598 if hasAWSCredentials() {
599 return "aws-credentials-available"
600 }
601 }
602 return ""
603}
604
605// setDefaultModelForAgent sets a default model for an agent based on available providers
606func setDefaultModelForAgent(agent AgentName) bool {
607 // Check providers in order of preference
608 if apiKey := os.Getenv("ANTHROPIC_API_KEY"); apiKey != "" {
609 maxTokens := int64(5000)
610 if agent == AgentTitle {
611 maxTokens = 80
612 }
613 cfg.Agents[agent] = Agent{
614 Model: models.Claude37Sonnet,
615 MaxTokens: maxTokens,
616 }
617 return true
618 }
619
620 if apiKey := os.Getenv("OPENAI_API_KEY"); apiKey != "" {
621 var model models.ModelID
622 maxTokens := int64(5000)
623 reasoningEffort := ""
624
625 switch agent {
626 case AgentTitle:
627 model = models.GPT41Mini
628 maxTokens = 80
629 case AgentTask:
630 model = models.GPT41Mini
631 default:
632 model = models.GPT41
633 }
634
635 // Check if model supports reasoning
636 if modelInfo, ok := models.SupportedModels[model]; ok && modelInfo.CanReason {
637 reasoningEffort = "medium"
638 }
639
640 cfg.Agents[agent] = Agent{
641 Model: model,
642 MaxTokens: maxTokens,
643 ReasoningEffort: reasoningEffort,
644 }
645 return true
646 }
647
648 if apiKey := os.Getenv("OPENROUTER_API_KEY"); apiKey != "" {
649 var model models.ModelID
650 maxTokens := int64(5000)
651 reasoningEffort := ""
652
653 switch agent {
654 case AgentTitle:
655 model = models.OpenRouterClaude35Haiku
656 maxTokens = 80
657 case AgentTask:
658 model = models.OpenRouterClaude37Sonnet
659 default:
660 model = models.OpenRouterClaude37Sonnet
661 }
662
663 // Check if model supports reasoning
664 if modelInfo, ok := models.SupportedModels[model]; ok && modelInfo.CanReason {
665 reasoningEffort = "medium"
666 }
667
668 cfg.Agents[agent] = Agent{
669 Model: model,
670 MaxTokens: maxTokens,
671 ReasoningEffort: reasoningEffort,
672 }
673 return true
674 }
675
676 if apiKey := os.Getenv("GEMINI_API_KEY"); apiKey != "" {
677 var model models.ModelID
678 maxTokens := int64(5000)
679
680 if agent == AgentTitle {
681 model = models.Gemini25Flash
682 maxTokens = 80
683 } else {
684 model = models.Gemini25
685 }
686
687 cfg.Agents[agent] = Agent{
688 Model: model,
689 MaxTokens: maxTokens,
690 }
691 return true
692 }
693
694 if apiKey := os.Getenv("GROQ_API_KEY"); apiKey != "" {
695 maxTokens := int64(5000)
696 if agent == AgentTitle {
697 maxTokens = 80
698 }
699
700 cfg.Agents[agent] = Agent{
701 Model: models.QWENQwq,
702 MaxTokens: maxTokens,
703 }
704 return true
705 }
706
707 if hasAWSCredentials() {
708 maxTokens := int64(5000)
709 if agent == AgentTitle {
710 maxTokens = 80
711 }
712
713 cfg.Agents[agent] = Agent{
714 Model: models.BedrockClaude37Sonnet,
715 MaxTokens: maxTokens,
716 ReasoningEffort: "medium", // Claude models support reasoning
717 }
718 return true
719 }
720
721 return false
722}
723
724func updateCfgFile(updateCfg func(config *Config)) error {
725 if cfg == nil {
726 return fmt.Errorf("config not loaded")
727 }
728
729 // Get the config file path
730 configFile := viper.ConfigFileUsed()
731 var configData []byte
732 if configFile == "" {
733 homeDir, err := os.UserHomeDir()
734 if err != nil {
735 return fmt.Errorf("failed to get home directory: %w", err)
736 }
737 configFile = filepath.Join(homeDir, fmt.Sprintf(".%s.json", appName))
738 logging.Info("config file not found, creating new one", "path", configFile)
739 configData = []byte(`{}`)
740 } else {
741 // Read the existing config file
742 data, err := os.ReadFile(configFile)
743 if err != nil {
744 return fmt.Errorf("failed to read config file: %w", err)
745 }
746 configData = data
747 }
748
749 // Parse the JSON
750 var userCfg *Config
751 if err := json.Unmarshal(configData, &userCfg); err != nil {
752 return fmt.Errorf("failed to parse config file: %w", err)
753 }
754
755 updateCfg(userCfg)
756
757 // Write the updated config back to file
758 updatedData, err := json.MarshalIndent(userCfg, "", " ")
759 if err != nil {
760 return fmt.Errorf("failed to marshal config: %w", err)
761 }
762
763 if err := os.WriteFile(configFile, updatedData, 0o644); err != nil {
764 return fmt.Errorf("failed to write config file: %w", err)
765 }
766
767 return nil
768}
769
770// Get returns the current configuration.
771// It's safe to call this function multiple times.
772func Get() *Config {
773 return cfg
774}
775
776// WorkingDirectory returns the current working directory from the configuration.
777func WorkingDirectory() string {
778 if cfg == nil {
779 panic("config not loaded")
780 }
781 return cfg.WorkingDir
782}
783
784func UpdateAgentModel(agentName AgentName, modelID models.ModelID) error {
785 if cfg == nil {
786 panic("config not loaded")
787 }
788
789 existingAgentCfg := cfg.Agents[agentName]
790
791 model, ok := models.SupportedModels[modelID]
792 if !ok {
793 return fmt.Errorf("model %s not supported", modelID)
794 }
795
796 maxTokens := existingAgentCfg.MaxTokens
797 if model.DefaultMaxTokens > 0 {
798 maxTokens = model.DefaultMaxTokens
799 }
800
801 newAgentCfg := Agent{
802 Model: modelID,
803 MaxTokens: maxTokens,
804 ReasoningEffort: existingAgentCfg.ReasoningEffort,
805 }
806 cfg.Agents[agentName] = newAgentCfg
807
808 if err := validateAgent(cfg, agentName, newAgentCfg); err != nil {
809 // revert config update on failure
810 cfg.Agents[agentName] = existingAgentCfg
811 return fmt.Errorf("failed to update agent model: %w", err)
812 }
813
814 return updateCfgFile(func(config *Config) {
815 if config.Agents == nil {
816 config.Agents = make(map[AgentName]Agent)
817 }
818 config.Agents[agentName] = newAgentCfg
819 })
820}
821
822// UpdateTheme updates the theme in the configuration and writes it to the config file.
823func UpdateTheme(themeName string) error {
824 if cfg == nil {
825 return fmt.Errorf("config not loaded")
826 }
827
828 // Update the in-memory config
829 cfg.TUI.Theme = themeName
830
831 // Update the file config
832 return updateCfgFile(func(config *Config) {
833 config.TUI.Theme = themeName
834 })
835}