1// Package config manages application configuration from various sources.
2package config
3
4import (
5 "encoding/json"
6 "fmt"
7 "log/slog"
8 "os"
9 "path/filepath"
10 "runtime"
11 "strings"
12
13 "github.com/opencode-ai/opencode/internal/llm/models"
14 "github.com/opencode-ai/opencode/internal/logging"
15 "github.com/spf13/viper"
16)
17
18// MCPType defines the type of MCP (Model Control Protocol) server.
19type MCPType string
20
21// Supported MCP types
22const (
23 MCPStdio MCPType = "stdio"
24 MCPSse MCPType = "sse"
25)
26
27// MCPServer defines the configuration for a Model Control Protocol server.
28type MCPServer struct {
29 Command string `json:"command"`
30 Env []string `json:"env"`
31 Args []string `json:"args"`
32 Type MCPType `json:"type"`
33 URL string `json:"url"`
34 Headers map[string]string `json:"headers"`
35}
36
37type AgentName string
38
39const (
40 AgentCoder AgentName = "coder"
41 AgentSummarizer AgentName = "summarizer"
42 AgentTask AgentName = "task"
43 AgentTitle AgentName = "title"
44)
45
46// Agent defines configuration for different LLM models and their token limits.
47type Agent struct {
48 Model models.ModelID `json:"model"`
49 MaxTokens int64 `json:"maxTokens"`
50 ReasoningEffort string `json:"reasoningEffort"` // For openai models low,medium,heigh
51}
52
53// Provider defines configuration for an LLM provider.
54type Provider struct {
55 APIKey string `json:"apiKey"`
56 Disabled bool `json:"disabled"`
57}
58
59// Data defines storage configuration.
60type Data struct {
61 Directory string `json:"directory,omitempty"`
62}
63
64// LSPConfig defines configuration for Language Server Protocol integration.
65type LSPConfig struct {
66 Disabled bool `json:"enabled"`
67 Command string `json:"command"`
68 Args []string `json:"args"`
69 Options any `json:"options"`
70}
71
72// TUIConfig defines the configuration for the Terminal User Interface.
73type TUIConfig struct {
74 Theme string `json:"theme,omitempty"`
75}
76
77// ShellConfig defines the configuration for the shell used by the bash tool.
78type ShellConfig struct {
79 Path string `json:"path,omitempty"`
80 Args []string `json:"args,omitempty"`
81}
82
83// Config is the main configuration structure for the application.
84type Config struct {
85 Data Data `json:"data"`
86 WorkingDir string `json:"wd,omitempty"`
87 MCPServers map[string]MCPServer `json:"mcpServers,omitempty"`
88 Providers map[models.ModelProvider]Provider `json:"providers,omitempty"`
89 LSP map[string]LSPConfig `json:"lsp,omitempty"`
90 Agents map[AgentName]Agent `json:"agents,omitempty"`
91 Debug bool `json:"debug,omitempty"`
92 DebugLSP bool `json:"debugLSP,omitempty"`
93 ContextPaths []string `json:"contextPaths,omitempty"`
94 TUI TUIConfig `json:"tui"`
95 Shell ShellConfig `json:"shell,omitempty"`
96 AutoCompact bool `json:"autoCompact,omitempty"`
97}
98
99// Application constants
100const (
101 defaultDataDirectory = ".opencode"
102 defaultLogLevel = "info"
103 appName = "opencode"
104
105 MaxTokensFallbackDefault = 4096
106)
107
108var defaultContextPaths = []string{
109 ".github/copilot-instructions.md",
110 ".cursorrules",
111 ".cursor/rules/",
112 "CLAUDE.md",
113 "CLAUDE.local.md",
114 "opencode.md",
115 "opencode.local.md",
116 "OpenCode.md",
117 "OpenCode.local.md",
118 "OPENCODE.md",
119 "OPENCODE.local.md",
120}
121
122// Global configuration instance
123var cfg *Config
124
125// Load initializes the configuration from environment variables and config files.
126// If debug is true, debug mode is enabled and log level is set to debug.
127// It returns an error if configuration loading fails.
128func Load(workingDir string, debug bool) (*Config, error) {
129 if cfg != nil {
130 return cfg, nil
131 }
132
133 cfg = &Config{
134 WorkingDir: workingDir,
135 MCPServers: make(map[string]MCPServer),
136 Providers: make(map[models.ModelProvider]Provider),
137 LSP: make(map[string]LSPConfig),
138 }
139
140 configureViper()
141 setDefaults(debug)
142
143 // Read global config
144 if err := readConfig(viper.ReadInConfig()); err != nil {
145 return cfg, err
146 }
147
148 // Load and merge local config
149 mergeLocalConfig(workingDir)
150
151 setProviderDefaults()
152
153 // Apply configuration to the struct
154 if err := viper.Unmarshal(cfg); err != nil {
155 return cfg, fmt.Errorf("failed to unmarshal config: %w", err)
156 }
157
158 applyDefaultValues()
159 defaultLevel := slog.LevelInfo
160 if cfg.Debug {
161 defaultLevel = slog.LevelDebug
162 }
163 if os.Getenv("OPENCODE_DEV_DEBUG") == "true" {
164 loggingFile := fmt.Sprintf("%s/%s", cfg.Data.Directory, "debug.log")
165 messagesPath := fmt.Sprintf("%s/%s", cfg.Data.Directory, "messages")
166
167 // if file does not exist create it
168 if _, err := os.Stat(loggingFile); os.IsNotExist(err) {
169 if err := os.MkdirAll(cfg.Data.Directory, 0o755); err != nil {
170 return cfg, fmt.Errorf("failed to create directory: %w", err)
171 }
172 if _, err := os.Create(loggingFile); err != nil {
173 return cfg, fmt.Errorf("failed to create log file: %w", err)
174 }
175 }
176
177 if _, err := os.Stat(messagesPath); os.IsNotExist(err) {
178 if err := os.MkdirAll(messagesPath, 0o756); err != nil {
179 return cfg, fmt.Errorf("failed to create directory: %w", err)
180 }
181 }
182 logging.MessageDir = messagesPath
183
184 sloggingFileWriter, err := os.OpenFile(loggingFile, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0o666)
185 if err != nil {
186 return cfg, fmt.Errorf("failed to open log file: %w", err)
187 }
188 // Configure logger
189 logger := slog.New(slog.NewTextHandler(sloggingFileWriter, &slog.HandlerOptions{
190 Level: defaultLevel,
191 }))
192 slog.SetDefault(logger)
193 } else {
194 // Configure logger
195 logger := slog.New(slog.NewTextHandler(logging.NewWriter(), &slog.HandlerOptions{
196 Level: defaultLevel,
197 }))
198 slog.SetDefault(logger)
199 }
200
201 // Validate configuration
202 if err := Validate(); err != nil {
203 return cfg, fmt.Errorf("config validation failed: %w", err)
204 }
205
206 if cfg.Agents == nil {
207 cfg.Agents = make(map[AgentName]Agent)
208 }
209
210 // Override the max tokens for title agent
211 cfg.Agents[AgentTitle] = Agent{
212 Model: cfg.Agents[AgentTitle].Model,
213 MaxTokens: 80,
214 }
215 return cfg, nil
216}
217
218// configureViper sets up viper's configuration paths and environment variables.
219func configureViper() {
220 viper.SetConfigName(fmt.Sprintf(".%s", appName))
221 viper.SetConfigType("json")
222 viper.AddConfigPath("$HOME")
223 viper.AddConfigPath(fmt.Sprintf("$XDG_CONFIG_HOME/%s", appName))
224 viper.AddConfigPath(fmt.Sprintf("$HOME/.config/%s", appName))
225 viper.SetEnvPrefix(strings.ToUpper(appName))
226 viper.AutomaticEnv()
227}
228
229// setDefaults configures default values for configuration options.
230func setDefaults(debug bool) {
231 viper.SetDefault("data.directory", defaultDataDirectory)
232 viper.SetDefault("contextPaths", defaultContextPaths)
233 viper.SetDefault("tui.theme", "opencode")
234 viper.SetDefault("autoCompact", true)
235
236 // Set default shell from environment or fallback to /bin/bash
237 shellPath := os.Getenv("SHELL")
238 if shellPath == "" {
239 shellPath = "/bin/bash"
240 }
241 viper.SetDefault("shell.path", shellPath)
242 viper.SetDefault("shell.args", []string{"-l"})
243
244 if debug {
245 viper.SetDefault("debug", true)
246 viper.Set("log.level", "debug")
247 } else {
248 viper.SetDefault("debug", false)
249 viper.SetDefault("log.level", defaultLogLevel)
250 }
251}
252
253// setProviderDefaults configures LLM provider defaults based on provider provided by
254// environment variables and configuration file.
255func setProviderDefaults() {
256 // Set all API keys we can find in the environment
257 // Note: Viper does not default if the json apiKey is ""
258 if apiKey := os.Getenv("ANTHROPIC_API_KEY"); apiKey != "" {
259 viper.SetDefault("providers.anthropic.apiKey", apiKey)
260 }
261 if apiKey := os.Getenv("OPENAI_API_KEY"); apiKey != "" {
262 viper.SetDefault("providers.openai.apiKey", apiKey)
263 }
264 if apiKey := os.Getenv("GEMINI_API_KEY"); apiKey != "" {
265 viper.SetDefault("providers.gemini.apiKey", apiKey)
266 }
267 if apiKey := os.Getenv("GROQ_API_KEY"); apiKey != "" {
268 viper.SetDefault("providers.groq.apiKey", apiKey)
269 }
270 if apiKey := os.Getenv("OPENROUTER_API_KEY"); apiKey != "" {
271 viper.SetDefault("providers.openrouter.apiKey", apiKey)
272 }
273 if apiKey := os.Getenv("XAI_API_KEY"); apiKey != "" {
274 viper.SetDefault("providers.xai.apiKey", apiKey)
275 }
276 if apiKey := os.Getenv("AZURE_OPENAI_ENDPOINT"); apiKey != "" {
277 // api-key may be empty when using Entra ID credentials – that's okay
278 viper.SetDefault("providers.azure.apiKey", os.Getenv("AZURE_OPENAI_API_KEY"))
279 }
280 if apiKey, err := LoadGitHubToken(); err == nil && apiKey != "" {
281 viper.SetDefault("providers.copilot.apiKey", apiKey)
282 if viper.GetString("providers.copilot.apiKey") == "" {
283 viper.Set("providers.copilot.apiKey", apiKey)
284 }
285 }
286
287 // Use this order to set the default models
288 // 1. Copilot
289 // 2. Anthropic
290 // 3. OpenAI
291 // 4. Google Gemini
292 // 5. Groq
293 // 6. OpenRouter
294 // 7. AWS Bedrock
295 // 8. Azure
296 // 9. Google Cloud VertexAI
297
298 // copilot configuration
299 if key := viper.GetString("providers.copilot.apiKey"); strings.TrimSpace(key) != "" {
300 viper.SetDefault("agents.coder.model", models.CopilotGPT4o)
301 viper.SetDefault("agents.summarizer.model", models.CopilotGPT4o)
302 viper.SetDefault("agents.task.model", models.CopilotGPT4o)
303 viper.SetDefault("agents.title.model", models.CopilotGPT4o)
304 return
305 }
306
307 // Anthropic configuration
308 if key := viper.GetString("providers.anthropic.apiKey"); strings.TrimSpace(key) != "" {
309 viper.SetDefault("agents.coder.model", models.Claude4Sonnet)
310 viper.SetDefault("agents.summarizer.model", models.Claude4Sonnet)
311 viper.SetDefault("agents.task.model", models.Claude4Sonnet)
312 viper.SetDefault("agents.title.model", models.Claude4Sonnet)
313 return
314 }
315
316 // OpenAI configuration
317 if key := viper.GetString("providers.openai.apiKey"); strings.TrimSpace(key) != "" {
318 viper.SetDefault("agents.coder.model", models.GPT41)
319 viper.SetDefault("agents.summarizer.model", models.GPT41)
320 viper.SetDefault("agents.task.model", models.GPT41Mini)
321 viper.SetDefault("agents.title.model", models.GPT41Mini)
322 return
323 }
324
325 // Google Gemini configuration
326 if key := viper.GetString("providers.gemini.apiKey"); strings.TrimSpace(key) != "" {
327 viper.SetDefault("agents.coder.model", models.Gemini25)
328 viper.SetDefault("agents.summarizer.model", models.Gemini25)
329 viper.SetDefault("agents.task.model", models.Gemini25Flash)
330 viper.SetDefault("agents.title.model", models.Gemini25Flash)
331 return
332 }
333
334 // Groq configuration
335 if key := viper.GetString("providers.groq.apiKey"); strings.TrimSpace(key) != "" {
336 viper.SetDefault("agents.coder.model", models.QWENQwq)
337 viper.SetDefault("agents.summarizer.model", models.QWENQwq)
338 viper.SetDefault("agents.task.model", models.QWENQwq)
339 viper.SetDefault("agents.title.model", models.QWENQwq)
340 return
341 }
342
343 // OpenRouter configuration
344 if key := viper.GetString("providers.openrouter.apiKey"); strings.TrimSpace(key) != "" {
345 viper.SetDefault("agents.coder.model", models.OpenRouterClaude37Sonnet)
346 viper.SetDefault("agents.summarizer.model", models.OpenRouterClaude37Sonnet)
347 viper.SetDefault("agents.task.model", models.OpenRouterClaude37Sonnet)
348 viper.SetDefault("agents.title.model", models.OpenRouterClaude35Haiku)
349 return
350 }
351
352 // XAI configuration
353 if key := viper.GetString("providers.xai.apiKey"); strings.TrimSpace(key) != "" {
354 viper.SetDefault("agents.coder.model", models.XAIGrok3Beta)
355 viper.SetDefault("agents.summarizer.model", models.XAIGrok3Beta)
356 viper.SetDefault("agents.task.model", models.XAIGrok3Beta)
357 viper.SetDefault("agents.title.model", models.XAiGrok3MiniFastBeta)
358 return
359 }
360
361 // AWS Bedrock configuration
362 if hasAWSCredentials() {
363 viper.SetDefault("agents.coder.model", models.BedrockClaude37Sonnet)
364 viper.SetDefault("agents.summarizer.model", models.BedrockClaude37Sonnet)
365 viper.SetDefault("agents.task.model", models.BedrockClaude37Sonnet)
366 viper.SetDefault("agents.title.model", models.BedrockClaude37Sonnet)
367 return
368 }
369
370 // Azure OpenAI configuration
371 if os.Getenv("AZURE_OPENAI_ENDPOINT") != "" {
372 viper.SetDefault("agents.coder.model", models.AzureGPT41)
373 viper.SetDefault("agents.summarizer.model", models.AzureGPT41)
374 viper.SetDefault("agents.task.model", models.AzureGPT41Mini)
375 viper.SetDefault("agents.title.model", models.AzureGPT41Mini)
376 return
377 }
378
379 // Google Cloud VertexAI configuration
380 if hasVertexAICredentials() {
381 viper.SetDefault("agents.coder.model", models.VertexAIGemini25)
382 viper.SetDefault("agents.summarizer.model", models.VertexAIGemini25)
383 viper.SetDefault("agents.task.model", models.VertexAIGemini25Flash)
384 viper.SetDefault("agents.title.model", models.VertexAIGemini25Flash)
385 return
386 }
387}
388
389// hasAWSCredentials checks if AWS credentials are available in the environment.
390func hasAWSCredentials() bool {
391 // Check for explicit AWS credentials
392 if os.Getenv("AWS_ACCESS_KEY_ID") != "" && os.Getenv("AWS_SECRET_ACCESS_KEY") != "" {
393 return true
394 }
395
396 // Check for AWS profile
397 if os.Getenv("AWS_PROFILE") != "" || os.Getenv("AWS_DEFAULT_PROFILE") != "" {
398 return true
399 }
400
401 // Check for AWS region
402 if os.Getenv("AWS_REGION") != "" || os.Getenv("AWS_DEFAULT_REGION") != "" {
403 return true
404 }
405
406 // Check if running on EC2 with instance profile
407 if os.Getenv("AWS_CONTAINER_CREDENTIALS_RELATIVE_URI") != "" ||
408 os.Getenv("AWS_CONTAINER_CREDENTIALS_FULL_URI") != "" {
409 return true
410 }
411
412 return false
413}
414
415// hasVertexAICredentials checks if VertexAI credentials are available in the environment.
416func hasVertexAICredentials() bool {
417 // Check for explicit VertexAI parameters
418 if os.Getenv("VERTEXAI_PROJECT") != "" && os.Getenv("VERTEXAI_LOCATION") != "" {
419 return true
420 }
421 // Check for Google Cloud project and location
422 if os.Getenv("GOOGLE_CLOUD_PROJECT") != "" && (os.Getenv("GOOGLE_CLOUD_REGION") != "" || os.Getenv("GOOGLE_CLOUD_LOCATION") != "") {
423 return true
424 }
425 return false
426}
427
428func hasCopilotCredentials() bool {
429 // Check for explicit Copilot parameters
430 if token, _ := LoadGitHubToken(); token != "" {
431 return true
432 }
433 return false
434}
435
436// readConfig handles the result of reading a configuration file.
437func readConfig(err error) error {
438 if err == nil {
439 return nil
440 }
441
442 // It's okay if the config file doesn't exist
443 if _, ok := err.(viper.ConfigFileNotFoundError); ok {
444 return nil
445 }
446
447 return fmt.Errorf("failed to read config: %w", err)
448}
449
450// mergeLocalConfig loads and merges configuration from the local directory.
451func mergeLocalConfig(workingDir string) {
452 local := viper.New()
453 local.SetConfigName(fmt.Sprintf(".%s", appName))
454 local.SetConfigType("json")
455 local.AddConfigPath(workingDir)
456
457 // Merge local config if it exists
458 if err := local.ReadInConfig(); err == nil {
459 viper.MergeConfigMap(local.AllSettings())
460 }
461}
462
463// applyDefaultValues sets default values for configuration fields that need processing.
464func applyDefaultValues() {
465 // Set default MCP type if not specified
466 for k, v := range cfg.MCPServers {
467 if v.Type == "" {
468 v.Type = MCPStdio
469 cfg.MCPServers[k] = v
470 }
471 }
472}
473
474// It validates model IDs and providers, ensuring they are supported.
475func validateAgent(cfg *Config, name AgentName, agent Agent) error {
476 // Check if model exists
477 // TODO: If a copilot model is specified, but model is not found,
478 // it might be new model. The https://api.githubcopilot.com/models
479 // endpoint should be queried to validate if the model is supported.
480 model, modelExists := models.SupportedModels[agent.Model]
481 if !modelExists {
482 logging.Warn("unsupported model configured, reverting to default",
483 "agent", name,
484 "configured_model", agent.Model)
485
486 // Set default model based on available providers
487 if setDefaultModelForAgent(name) {
488 logging.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
489 } else {
490 return fmt.Errorf("no valid provider available for agent %s", name)
491 }
492 return nil
493 }
494
495 // Check if provider for the model is configured
496 provider := model.Provider
497 providerCfg, providerExists := cfg.Providers[provider]
498
499 if !providerExists {
500 // Provider not configured, check if we have environment variables
501 apiKey := getProviderAPIKey(provider)
502 if apiKey == "" {
503 logging.Warn("provider not configured for model, reverting to default",
504 "agent", name,
505 "model", agent.Model,
506 "provider", provider)
507
508 // Set default model based on available providers
509 if setDefaultModelForAgent(name) {
510 logging.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
511 } else {
512 return fmt.Errorf("no valid provider available for agent %s", name)
513 }
514 } else {
515 // Add provider with API key from environment
516 cfg.Providers[provider] = Provider{
517 APIKey: apiKey,
518 }
519 logging.Info("added provider from environment", "provider", provider)
520 }
521 } else if providerCfg.Disabled || providerCfg.APIKey == "" {
522 // Provider is disabled or has no API key
523 logging.Warn("provider is disabled or has no API key, reverting to default",
524 "agent", name,
525 "model", agent.Model,
526 "provider", provider)
527
528 // Set default model based on available providers
529 if setDefaultModelForAgent(name) {
530 logging.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
531 } else {
532 return fmt.Errorf("no valid provider available for agent %s", name)
533 }
534 }
535
536 // Validate max tokens
537 if agent.MaxTokens <= 0 {
538 logging.Warn("invalid max tokens, setting to default",
539 "agent", name,
540 "model", agent.Model,
541 "max_tokens", agent.MaxTokens)
542
543 // Update the agent with default max tokens
544 updatedAgent := cfg.Agents[name]
545 if model.DefaultMaxTokens > 0 {
546 updatedAgent.MaxTokens = model.DefaultMaxTokens
547 } else {
548 updatedAgent.MaxTokens = MaxTokensFallbackDefault
549 }
550 cfg.Agents[name] = updatedAgent
551 } else if model.ContextWindow > 0 && agent.MaxTokens > model.ContextWindow/2 {
552 // Ensure max tokens doesn't exceed half the context window (reasonable limit)
553 logging.Warn("max tokens exceeds half the context window, adjusting",
554 "agent", name,
555 "model", agent.Model,
556 "max_tokens", agent.MaxTokens,
557 "context_window", model.ContextWindow)
558
559 // Update the agent with adjusted max tokens
560 updatedAgent := cfg.Agents[name]
561 updatedAgent.MaxTokens = model.ContextWindow / 2
562 cfg.Agents[name] = updatedAgent
563 }
564
565 // Validate reasoning effort for models that support reasoning
566 if model.CanReason && provider == models.ProviderOpenAI || provider == models.ProviderLocal {
567 if agent.ReasoningEffort == "" {
568 // Set default reasoning effort for models that support it
569 logging.Info("setting default reasoning effort for model that supports reasoning",
570 "agent", name,
571 "model", agent.Model)
572
573 // Update the agent with default reasoning effort
574 updatedAgent := cfg.Agents[name]
575 updatedAgent.ReasoningEffort = "medium"
576 cfg.Agents[name] = updatedAgent
577 } else {
578 // Check if reasoning effort is valid (low, medium, high)
579 effort := strings.ToLower(agent.ReasoningEffort)
580 if effort != "low" && effort != "medium" && effort != "high" {
581 logging.Warn("invalid reasoning effort, setting to medium",
582 "agent", name,
583 "model", agent.Model,
584 "reasoning_effort", agent.ReasoningEffort)
585
586 // Update the agent with valid reasoning effort
587 updatedAgent := cfg.Agents[name]
588 updatedAgent.ReasoningEffort = "medium"
589 cfg.Agents[name] = updatedAgent
590 }
591 }
592 } else if !model.CanReason && agent.ReasoningEffort != "" {
593 // Model doesn't support reasoning but reasoning effort is set
594 logging.Warn("model doesn't support reasoning but reasoning effort is set, ignoring",
595 "agent", name,
596 "model", agent.Model,
597 "reasoning_effort", agent.ReasoningEffort)
598
599 // Update the agent to remove reasoning effort
600 updatedAgent := cfg.Agents[name]
601 updatedAgent.ReasoningEffort = ""
602 cfg.Agents[name] = updatedAgent
603 }
604
605 return nil
606}
607
608// Validate checks if the configuration is valid and applies defaults where needed.
609func Validate() error {
610 if cfg == nil {
611 return fmt.Errorf("config not loaded")
612 }
613
614 // Validate agent models
615 for name, agent := range cfg.Agents {
616 if err := validateAgent(cfg, name, agent); err != nil {
617 return err
618 }
619 }
620
621 // Validate providers
622 for provider, providerCfg := range cfg.Providers {
623 if providerCfg.APIKey == "" && !providerCfg.Disabled {
624 fmt.Printf("provider has no API key, marking as disabled %s", provider)
625 logging.Warn("provider has no API key, marking as disabled", "provider", provider)
626 providerCfg.Disabled = true
627 cfg.Providers[provider] = providerCfg
628 }
629 }
630
631 // Validate LSP configurations
632 for language, lspConfig := range cfg.LSP {
633 if lspConfig.Command == "" && !lspConfig.Disabled {
634 logging.Warn("LSP configuration has no command, marking as disabled", "language", language)
635 lspConfig.Disabled = true
636 cfg.LSP[language] = lspConfig
637 }
638 }
639
640 return nil
641}
642
643// getProviderAPIKey gets the API key for a provider from environment variables
644func getProviderAPIKey(provider models.ModelProvider) string {
645 switch provider {
646 case models.ProviderAnthropic:
647 return os.Getenv("ANTHROPIC_API_KEY")
648 case models.ProviderOpenAI:
649 return os.Getenv("OPENAI_API_KEY")
650 case models.ProviderGemini:
651 return os.Getenv("GEMINI_API_KEY")
652 case models.ProviderGROQ:
653 return os.Getenv("GROQ_API_KEY")
654 case models.ProviderAzure:
655 return os.Getenv("AZURE_OPENAI_API_KEY")
656 case models.ProviderOpenRouter:
657 return os.Getenv("OPENROUTER_API_KEY")
658 case models.ProviderBedrock:
659 if hasAWSCredentials() {
660 return "aws-credentials-available"
661 }
662 case models.ProviderVertexAI:
663 if hasVertexAICredentials() {
664 return "vertex-ai-credentials-available"
665 }
666 }
667 return ""
668}
669
670// setDefaultModelForAgent sets a default model for an agent based on available providers
671func setDefaultModelForAgent(agent AgentName) bool {
672 if hasCopilotCredentials() {
673 maxTokens := int64(5000)
674 if agent == AgentTitle {
675 maxTokens = 80
676 }
677
678 cfg.Agents[agent] = Agent{
679 Model: models.CopilotGPT4o,
680 MaxTokens: maxTokens,
681 }
682 return true
683 }
684 // Check providers in order of preference
685 if apiKey := os.Getenv("ANTHROPIC_API_KEY"); apiKey != "" {
686 maxTokens := int64(5000)
687 if agent == AgentTitle {
688 maxTokens = 80
689 }
690 cfg.Agents[agent] = Agent{
691 Model: models.Claude37Sonnet,
692 MaxTokens: maxTokens,
693 }
694 return true
695 }
696
697 if apiKey := os.Getenv("OPENAI_API_KEY"); apiKey != "" {
698 var model models.ModelID
699 maxTokens := int64(5000)
700 reasoningEffort := ""
701
702 switch agent {
703 case AgentTitle:
704 model = models.GPT41Mini
705 maxTokens = 80
706 case AgentTask:
707 model = models.GPT41Mini
708 default:
709 model = models.GPT41
710 }
711
712 // Check if model supports reasoning
713 if modelInfo, ok := models.SupportedModels[model]; ok && modelInfo.CanReason {
714 reasoningEffort = "medium"
715 }
716
717 cfg.Agents[agent] = Agent{
718 Model: model,
719 MaxTokens: maxTokens,
720 ReasoningEffort: reasoningEffort,
721 }
722 return true
723 }
724
725 if apiKey := os.Getenv("OPENROUTER_API_KEY"); apiKey != "" {
726 var model models.ModelID
727 maxTokens := int64(5000)
728 reasoningEffort := ""
729
730 switch agent {
731 case AgentTitle:
732 model = models.OpenRouterClaude35Haiku
733 maxTokens = 80
734 case AgentTask:
735 model = models.OpenRouterClaude37Sonnet
736 default:
737 model = models.OpenRouterClaude37Sonnet
738 }
739
740 // Check if model supports reasoning
741 if modelInfo, ok := models.SupportedModels[model]; ok && modelInfo.CanReason {
742 reasoningEffort = "medium"
743 }
744
745 cfg.Agents[agent] = Agent{
746 Model: model,
747 MaxTokens: maxTokens,
748 ReasoningEffort: reasoningEffort,
749 }
750 return true
751 }
752
753 if apiKey := os.Getenv("GEMINI_API_KEY"); apiKey != "" {
754 var model models.ModelID
755 maxTokens := int64(5000)
756
757 if agent == AgentTitle {
758 model = models.Gemini25Flash
759 maxTokens = 80
760 } else {
761 model = models.Gemini25
762 }
763
764 cfg.Agents[agent] = Agent{
765 Model: model,
766 MaxTokens: maxTokens,
767 }
768 return true
769 }
770
771 if apiKey := os.Getenv("GROQ_API_KEY"); apiKey != "" {
772 maxTokens := int64(5000)
773 if agent == AgentTitle {
774 maxTokens = 80
775 }
776
777 cfg.Agents[agent] = Agent{
778 Model: models.QWENQwq,
779 MaxTokens: maxTokens,
780 }
781 return true
782 }
783
784 if hasAWSCredentials() {
785 maxTokens := int64(5000)
786 if agent == AgentTitle {
787 maxTokens = 80
788 }
789
790 cfg.Agents[agent] = Agent{
791 Model: models.BedrockClaude37Sonnet,
792 MaxTokens: maxTokens,
793 ReasoningEffort: "medium", // Claude models support reasoning
794 }
795 return true
796 }
797
798 if hasVertexAICredentials() {
799 var model models.ModelID
800 maxTokens := int64(5000)
801
802 if agent == AgentTitle {
803 model = models.VertexAIGemini25Flash
804 maxTokens = 80
805 } else {
806 model = models.VertexAIGemini25
807 }
808
809 cfg.Agents[agent] = Agent{
810 Model: model,
811 MaxTokens: maxTokens,
812 }
813 return true
814 }
815
816 return false
817}
818
819func updateCfgFile(updateCfg func(config *Config)) error {
820 if cfg == nil {
821 return fmt.Errorf("config not loaded")
822 }
823
824 // Get the config file path
825 configFile := viper.ConfigFileUsed()
826 var configData []byte
827 if configFile == "" {
828 homeDir, err := os.UserHomeDir()
829 if err != nil {
830 return fmt.Errorf("failed to get home directory: %w", err)
831 }
832 configFile = filepath.Join(homeDir, fmt.Sprintf(".%s.json", appName))
833 logging.Info("config file not found, creating new one", "path", configFile)
834 configData = []byte(`{}`)
835 } else {
836 // Read the existing config file
837 data, err := os.ReadFile(configFile)
838 if err != nil {
839 return fmt.Errorf("failed to read config file: %w", err)
840 }
841 configData = data
842 }
843
844 // Parse the JSON
845 var userCfg *Config
846 if err := json.Unmarshal(configData, &userCfg); err != nil {
847 return fmt.Errorf("failed to parse config file: %w", err)
848 }
849
850 updateCfg(userCfg)
851
852 // Write the updated config back to file
853 updatedData, err := json.MarshalIndent(userCfg, "", " ")
854 if err != nil {
855 return fmt.Errorf("failed to marshal config: %w", err)
856 }
857
858 if err := os.WriteFile(configFile, updatedData, 0o644); err != nil {
859 return fmt.Errorf("failed to write config file: %w", err)
860 }
861
862 return nil
863}
864
865// Get returns the current configuration.
866// It's safe to call this function multiple times.
867func Get() *Config {
868 return cfg
869}
870
871// WorkingDirectory returns the current working directory from the configuration.
872func WorkingDirectory() string {
873 if cfg == nil {
874 panic("config not loaded")
875 }
876 return cfg.WorkingDir
877}
878
879func UpdateAgentModel(agentName AgentName, modelID models.ModelID) error {
880 if cfg == nil {
881 panic("config not loaded")
882 }
883
884 existingAgentCfg := cfg.Agents[agentName]
885
886 model, ok := models.SupportedModels[modelID]
887 if !ok {
888 return fmt.Errorf("model %s not supported", modelID)
889 }
890
891 maxTokens := existingAgentCfg.MaxTokens
892 if model.DefaultMaxTokens > 0 {
893 maxTokens = model.DefaultMaxTokens
894 }
895
896 newAgentCfg := Agent{
897 Model: modelID,
898 MaxTokens: maxTokens,
899 ReasoningEffort: existingAgentCfg.ReasoningEffort,
900 }
901 cfg.Agents[agentName] = newAgentCfg
902
903 if err := validateAgent(cfg, agentName, newAgentCfg); err != nil {
904 // revert config update on failure
905 cfg.Agents[agentName] = existingAgentCfg
906 return fmt.Errorf("failed to update agent model: %w", err)
907 }
908
909 return updateCfgFile(func(config *Config) {
910 if config.Agents == nil {
911 config.Agents = make(map[AgentName]Agent)
912 }
913 config.Agents[agentName] = newAgentCfg
914 })
915}
916
917// UpdateTheme updates the theme in the configuration and writes it to the config file.
918func UpdateTheme(themeName string) error {
919 if cfg == nil {
920 return fmt.Errorf("config not loaded")
921 }
922
923 // Update the in-memory config
924 cfg.TUI.Theme = themeName
925
926 // Update the file config
927 return updateCfgFile(func(config *Config) {
928 config.TUI.Theme = themeName
929 })
930}
931
932// Tries to load Github token from all possible locations
933func LoadGitHubToken() (string, error) {
934 // First check environment variable
935 if token := os.Getenv("GITHUB_TOKEN"); token != "" {
936 return token, nil
937 }
938
939 // Get config directory
940 var configDir string
941 if xdgConfig := os.Getenv("XDG_CONFIG_HOME"); xdgConfig != "" {
942 configDir = xdgConfig
943 } else if runtime.GOOS == "windows" {
944 if localAppData := os.Getenv("LOCALAPPDATA"); localAppData != "" {
945 configDir = localAppData
946 } else {
947 configDir = filepath.Join(os.Getenv("HOME"), "AppData", "Local")
948 }
949 } else {
950 configDir = filepath.Join(os.Getenv("HOME"), ".config")
951 }
952
953 // Try both hosts.json and apps.json files
954 filePaths := []string{
955 filepath.Join(configDir, "github-copilot", "hosts.json"),
956 filepath.Join(configDir, "github-copilot", "apps.json"),
957 }
958
959 for _, filePath := range filePaths {
960 data, err := os.ReadFile(filePath)
961 if err != nil {
962 continue
963 }
964
965 var config map[string]map[string]interface{}
966 if err := json.Unmarshal(data, &config); err != nil {
967 continue
968 }
969
970 for key, value := range config {
971 if strings.Contains(key, "github.com") {
972 if oauthToken, ok := value["oauth_token"].(string); ok {
973 return oauthToken, nil
974 }
975 }
976 }
977 }
978
979 return "", fmt.Errorf("GitHub token not found in standard locations")
980}