1// Package config manages application configuration from various sources.
2package config
3
4import (
5 "encoding/json"
6 "fmt"
7 "log/slog"
8 "os"
9 "path/filepath"
10 "strings"
11
12 "github.com/charmbracelet/crush/internal/llm/models"
13 "github.com/charmbracelet/crush/internal/logging"
14 "github.com/spf13/viper"
15)
16
17// MCPType defines the type of MCP (Model Control Protocol) server.
18type MCPType string
19
20// Supported MCP types
21const (
22 MCPStdio MCPType = "stdio"
23 MCPSse MCPType = "sse"
24)
25
26// MCPServer defines the configuration for a Model Control Protocol server.
27type MCPServer struct {
28 Command string `json:"command"`
29 Env []string `json:"env"`
30 Args []string `json:"args"`
31 Type MCPType `json:"type"`
32 URL string `json:"url"`
33 Headers map[string]string `json:"headers"`
34}
35
36type AgentName string
37
38const (
39 AgentCoder AgentName = "coder"
40 AgentSummarizer AgentName = "summarizer"
41 AgentTask AgentName = "task"
42 AgentTitle AgentName = "title"
43)
44
45// Agent defines configuration for different LLM models and their token limits.
46type Agent struct {
47 Model models.ModelID `json:"model"`
48 MaxTokens int64 `json:"maxTokens"`
49 ReasoningEffort string `json:"reasoningEffort"` // For openai models low,medium,heigh
50}
51
52// Provider defines configuration for an LLM provider.
53type Provider struct {
54 APIKey string `json:"apiKey"`
55 Disabled bool `json:"disabled"`
56}
57
58// Data defines storage configuration.
59type Data struct {
60 Directory string `json:"directory,omitempty"`
61}
62
63// LSPConfig defines configuration for Language Server Protocol integration.
64type LSPConfig struct {
65 Disabled bool `json:"enabled"`
66 Command string `json:"command"`
67 Args []string `json:"args"`
68 Options any `json:"options"`
69}
70
71// TUIConfig defines the configuration for the Terminal User Interface.
72type TUIConfig struct {
73 Theme string `json:"theme,omitempty"`
74}
75
76// ShellConfig defines the configuration for the shell used by the bash tool.
77type ShellConfig struct {
78 Path string `json:"path,omitempty"`
79 Args []string `json:"args,omitempty"`
80}
81
82// Config is the main configuration structure for the application.
83type Config struct {
84 Data Data `json:"data"`
85 WorkingDir string `json:"wd,omitempty"`
86 MCPServers map[string]MCPServer `json:"mcpServers,omitempty"`
87 Providers map[models.ModelProvider]Provider `json:"providers,omitempty"`
88 LSP map[string]LSPConfig `json:"lsp,omitempty"`
89 Agents map[AgentName]Agent `json:"agents,omitempty"`
90 Debug bool `json:"debug,omitempty"`
91 DebugLSP bool `json:"debugLSP,omitempty"`
92 ContextPaths []string `json:"contextPaths,omitempty"`
93 TUI TUIConfig `json:"tui"`
94 Shell ShellConfig `json:"shell,omitempty"`
95 AutoCompact bool `json:"autoCompact,omitempty"`
96}
97
98// Application constants
99const (
100 defaultDataDirectory = ".crush"
101 defaultLogLevel = "info"
102 appName = "crush"
103
104 MaxTokensFallbackDefault = 4096
105)
106
107var defaultContextPaths = []string{
108 ".github/copilot-instructions.md",
109 ".cursorrules",
110 ".cursor/rules/",
111 "CLAUDE.md",
112 "CLAUDE.local.md",
113 "crush.md",
114 "crush.local.md",
115 "Crush.md",
116 "Crush.local.md",
117 "CRUSH.md",
118 "CRUSH.local.md",
119}
120
121// Global configuration instance
122var cfg *Config
123
124// Load initializes the configuration from environment variables and config files.
125// If debug is true, debug mode is enabled and log level is set to debug.
126// It returns an error if configuration loading fails.
127func Load(workingDir string, debug bool) (*Config, error) {
128 if cfg != nil {
129 return cfg, nil
130 }
131
132 cfg = &Config{
133 WorkingDir: workingDir,
134 MCPServers: make(map[string]MCPServer),
135 Providers: make(map[models.ModelProvider]Provider),
136 LSP: make(map[string]LSPConfig),
137 }
138
139 configureViper()
140 setDefaults(debug)
141
142 // Read global config
143 if err := readConfig(viper.ReadInConfig()); err != nil {
144 return cfg, err
145 }
146
147 // Load and merge local config
148 mergeLocalConfig(workingDir)
149
150 setProviderDefaults()
151
152 // Apply configuration to the struct
153 if err := viper.Unmarshal(cfg); err != nil {
154 return cfg, fmt.Errorf("failed to unmarshal config: %w", err)
155 }
156
157 applyDefaultValues()
158 defaultLevel := slog.LevelInfo
159 if cfg.Debug {
160 defaultLevel = slog.LevelDebug
161 }
162 if os.Getenv("CRUSH_DEV_DEBUG") == "true" {
163 loggingFile := fmt.Sprintf("%s/%s", cfg.Data.Directory, "debug.log")
164
165 // if file does not exist create it
166 if _, err := os.Stat(loggingFile); os.IsNotExist(err) {
167 if err := os.MkdirAll(cfg.Data.Directory, 0o755); err != nil {
168 return cfg, fmt.Errorf("failed to create directory: %w", err)
169 }
170 if _, err := os.Create(loggingFile); err != nil {
171 return cfg, fmt.Errorf("failed to create log file: %w", err)
172 }
173 }
174
175 sloggingFileWriter, err := os.OpenFile(loggingFile, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0o666)
176 if err != nil {
177 return cfg, fmt.Errorf("failed to open log file: %w", err)
178 }
179 // Configure logger
180 logger := slog.New(slog.NewTextHandler(sloggingFileWriter, &slog.HandlerOptions{
181 Level: defaultLevel,
182 }))
183 slog.SetDefault(logger)
184 } else {
185 // Configure logger
186 logger := slog.New(slog.NewTextHandler(logging.NewWriter(), &slog.HandlerOptions{
187 Level: defaultLevel,
188 }))
189 slog.SetDefault(logger)
190 }
191
192 // Validate configuration
193 if err := Validate(); err != nil {
194 return cfg, fmt.Errorf("config validation failed: %w", err)
195 }
196
197 if cfg.Agents == nil {
198 cfg.Agents = make(map[AgentName]Agent)
199 }
200
201 // Override the max tokens for title agent
202 cfg.Agents[AgentTitle] = Agent{
203 Model: cfg.Agents[AgentTitle].Model,
204 MaxTokens: 80,
205 }
206 return cfg, nil
207}
208
209// configureViper sets up viper's configuration paths and environment variables.
210func configureViper() {
211 viper.SetConfigName(fmt.Sprintf(".%s", appName))
212 viper.SetConfigType("json")
213 viper.AddConfigPath("$HOME")
214 viper.AddConfigPath(fmt.Sprintf("$XDG_CONFIG_HOME/%s", appName))
215 viper.AddConfigPath(fmt.Sprintf("$HOME/.config/%s", appName))
216 viper.SetEnvPrefix(strings.ToUpper(appName))
217 viper.AutomaticEnv()
218}
219
220// setDefaults configures default values for configuration options.
221func setDefaults(debug bool) {
222 viper.SetDefault("data.directory", defaultDataDirectory)
223 viper.SetDefault("contextPaths", defaultContextPaths)
224 viper.SetDefault("tui.theme", "crush")
225 viper.SetDefault("autoCompact", true)
226
227 // Set default shell from environment or fallback to /bin/bash
228 shellPath := os.Getenv("SHELL")
229 if shellPath == "" {
230 shellPath = "/bin/bash"
231 }
232 viper.SetDefault("shell.path", shellPath)
233 viper.SetDefault("shell.args", []string{"-l"})
234
235 if debug {
236 viper.SetDefault("debug", true)
237 viper.Set("log.level", "debug")
238 } else {
239 viper.SetDefault("debug", false)
240 viper.SetDefault("log.level", defaultLogLevel)
241 }
242}
243
244// setProviderDefaults configures LLM provider defaults based on provider provided by
245// environment variables and configuration file.
246func setProviderDefaults() {
247 // Set all API keys we can find in the environment
248 if apiKey := os.Getenv("ANTHROPIC_API_KEY"); apiKey != "" {
249 viper.SetDefault("providers.anthropic.apiKey", apiKey)
250 }
251 if apiKey := os.Getenv("OPENAI_API_KEY"); apiKey != "" {
252 viper.SetDefault("providers.openai.apiKey", apiKey)
253 }
254 if apiKey := os.Getenv("GEMINI_API_KEY"); apiKey != "" {
255 viper.SetDefault("providers.gemini.apiKey", apiKey)
256 }
257 if apiKey := os.Getenv("GROQ_API_KEY"); apiKey != "" {
258 viper.SetDefault("providers.groq.apiKey", apiKey)
259 }
260 if apiKey := os.Getenv("OPENROUTER_API_KEY"); apiKey != "" {
261 viper.SetDefault("providers.openrouter.apiKey", apiKey)
262 }
263 if apiKey := os.Getenv("XAI_API_KEY"); apiKey != "" {
264 viper.SetDefault("providers.xai.apiKey", apiKey)
265 }
266 if apiKey := os.Getenv("AZURE_OPENAI_ENDPOINT"); apiKey != "" {
267 // api-key may be empty when using Entra ID credentials – that's okay
268 viper.SetDefault("providers.azure.apiKey", os.Getenv("AZURE_OPENAI_API_KEY"))
269 }
270
271 // Use this order to set the default models
272 // 1. Anthropic
273 // 2. OpenAI
274 // 3. Google Gemini
275 // 4. Groq
276 // 5. OpenRouter
277 // 6. AWS Bedrock
278 // 7. Azure
279 // 8. Google Cloud VertexAI
280
281 // Anthropic configuration
282 if key := viper.GetString("providers.anthropic.apiKey"); strings.TrimSpace(key) != "" {
283 viper.SetDefault("agents.coder.model", models.Claude4Sonnet)
284 viper.SetDefault("agents.summarizer.model", models.Claude4Sonnet)
285 viper.SetDefault("agents.task.model", models.Claude4Sonnet)
286 viper.SetDefault("agents.title.model", models.Claude4Sonnet)
287 return
288 }
289
290 // OpenAI configuration
291 if key := viper.GetString("providers.openai.apiKey"); strings.TrimSpace(key) != "" {
292 viper.SetDefault("agents.coder.model", models.GPT41)
293 viper.SetDefault("agents.summarizer.model", models.GPT41)
294 viper.SetDefault("agents.task.model", models.GPT41Mini)
295 viper.SetDefault("agents.title.model", models.GPT41Mini)
296 return
297 }
298
299 // Google Gemini configuration
300 if key := viper.GetString("providers.gemini.apiKey"); strings.TrimSpace(key) != "" {
301 viper.SetDefault("agents.coder.model", models.Gemini25)
302 viper.SetDefault("agents.summarizer.model", models.Gemini25)
303 viper.SetDefault("agents.task.model", models.Gemini25Flash)
304 viper.SetDefault("agents.title.model", models.Gemini25Flash)
305 return
306 }
307
308 // Groq configuration
309 if key := viper.GetString("providers.groq.apiKey"); strings.TrimSpace(key) != "" {
310 viper.SetDefault("agents.coder.model", models.QWENQwq)
311 viper.SetDefault("agents.summarizer.model", models.QWENQwq)
312 viper.SetDefault("agents.task.model", models.QWENQwq)
313 viper.SetDefault("agents.title.model", models.QWENQwq)
314 return
315 }
316
317 // OpenRouter configuration
318 if key := viper.GetString("providers.openrouter.apiKey"); strings.TrimSpace(key) != "" {
319 viper.SetDefault("agents.coder.model", models.OpenRouterClaude37Sonnet)
320 viper.SetDefault("agents.summarizer.model", models.OpenRouterClaude37Sonnet)
321 viper.SetDefault("agents.task.model", models.OpenRouterClaude37Sonnet)
322 viper.SetDefault("agents.title.model", models.OpenRouterClaude35Haiku)
323 return
324 }
325
326 // XAI configuration
327 if key := viper.GetString("providers.xai.apiKey"); strings.TrimSpace(key) != "" {
328 viper.SetDefault("agents.coder.model", models.XAIGrok3Beta)
329 viper.SetDefault("agents.summarizer.model", models.XAIGrok3Beta)
330 viper.SetDefault("agents.task.model", models.XAIGrok3Beta)
331 viper.SetDefault("agents.title.model", models.XAiGrok3MiniFastBeta)
332 return
333 }
334
335 // AWS Bedrock configuration
336 if hasAWSCredentials() {
337 viper.SetDefault("agents.coder.model", models.BedrockClaude37Sonnet)
338 viper.SetDefault("agents.summarizer.model", models.BedrockClaude37Sonnet)
339 viper.SetDefault("agents.task.model", models.BedrockClaude37Sonnet)
340 viper.SetDefault("agents.title.model", models.BedrockClaude37Sonnet)
341 return
342 }
343
344 // Azure OpenAI configuration
345 if os.Getenv("AZURE_OPENAI_ENDPOINT") != "" {
346 viper.SetDefault("agents.coder.model", models.AzureGPT41)
347 viper.SetDefault("agents.summarizer.model", models.AzureGPT41)
348 viper.SetDefault("agents.task.model", models.AzureGPT41Mini)
349 viper.SetDefault("agents.title.model", models.AzureGPT41Mini)
350 return
351 }
352
353 // Google Cloud VertexAI configuration
354 if hasVertexAICredentials() {
355 viper.SetDefault("agents.coder.model", models.VertexAIGemini25)
356 viper.SetDefault("agents.summarizer.model", models.VertexAIGemini25)
357 viper.SetDefault("agents.task.model", models.VertexAIGemini25Flash)
358 viper.SetDefault("agents.title.model", models.VertexAIGemini25Flash)
359 return
360 }
361}
362
363// hasAWSCredentials checks if AWS credentials are available in the environment.
364func hasAWSCredentials() bool {
365 // Check for explicit AWS credentials
366 if os.Getenv("AWS_ACCESS_KEY_ID") != "" && os.Getenv("AWS_SECRET_ACCESS_KEY") != "" {
367 return true
368 }
369
370 // Check for AWS profile
371 if os.Getenv("AWS_PROFILE") != "" || os.Getenv("AWS_DEFAULT_PROFILE") != "" {
372 return true
373 }
374
375 // Check for AWS region
376 if os.Getenv("AWS_REGION") != "" || os.Getenv("AWS_DEFAULT_REGION") != "" {
377 return true
378 }
379
380 // Check if running on EC2 with instance profile
381 if os.Getenv("AWS_CONTAINER_CREDENTIALS_RELATIVE_URI") != "" ||
382 os.Getenv("AWS_CONTAINER_CREDENTIALS_FULL_URI") != "" {
383 return true
384 }
385
386 return false
387}
388
389// hasVertexAICredentials checks if VertexAI credentials are available in the environment.
390func hasVertexAICredentials() bool {
391 // Check for explicit VertexAI parameters
392 if os.Getenv("VERTEXAI_PROJECT") != "" && os.Getenv("VERTEXAI_LOCATION") != "" {
393 return true
394 }
395 // Check for Google Cloud project and location
396 if os.Getenv("GOOGLE_CLOUD_PROJECT") != "" && (os.Getenv("GOOGLE_CLOUD_REGION") != "" || os.Getenv("GOOGLE_CLOUD_LOCATION") != "") {
397 return true
398 }
399 return false
400}
401
402// readConfig handles the result of reading a configuration file.
403func readConfig(err error) error {
404 if err == nil {
405 return nil
406 }
407
408 // It's okay if the config file doesn't exist
409 if _, ok := err.(viper.ConfigFileNotFoundError); ok {
410 return nil
411 }
412
413 return fmt.Errorf("failed to read config: %w", err)
414}
415
416// mergeLocalConfig loads and merges configuration from the local directory.
417func mergeLocalConfig(workingDir string) {
418 local := viper.New()
419 local.SetConfigName(fmt.Sprintf(".%s", appName))
420 local.SetConfigType("json")
421 local.AddConfigPath(workingDir)
422
423 // Merge local config if it exists
424 if err := local.ReadInConfig(); err == nil {
425 viper.MergeConfigMap(local.AllSettings())
426 }
427}
428
429// applyDefaultValues sets default values for configuration fields that need processing.
430func applyDefaultValues() {
431 // Set default MCP type if not specified
432 for k, v := range cfg.MCPServers {
433 if v.Type == "" {
434 v.Type = MCPStdio
435 cfg.MCPServers[k] = v
436 }
437 }
438}
439
440// It validates model IDs and providers, ensuring they are supported.
441func validateAgent(cfg *Config, name AgentName, agent Agent) error {
442 // Check if model exists
443 model, modelExists := models.SupportedModels[agent.Model]
444 if !modelExists {
445 logging.Warn("unsupported model configured, reverting to default",
446 "agent", name,
447 "configured_model", agent.Model)
448
449 // Set default model based on available providers
450 if setDefaultModelForAgent(name) {
451 logging.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
452 } else {
453 return fmt.Errorf("no valid provider available for agent %s", name)
454 }
455 return nil
456 }
457
458 // Check if provider for the model is configured
459 provider := model.Provider
460 providerCfg, providerExists := cfg.Providers[provider]
461
462 if !providerExists {
463 // Provider not configured, check if we have environment variables
464 apiKey := getProviderAPIKey(provider)
465 if apiKey == "" {
466 logging.Warn("provider not configured for model, reverting to default",
467 "agent", name,
468 "model", agent.Model,
469 "provider", provider)
470
471 // Set default model based on available providers
472 if setDefaultModelForAgent(name) {
473 logging.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
474 } else {
475 return fmt.Errorf("no valid provider available for agent %s", name)
476 }
477 } else {
478 // Add provider with API key from environment
479 cfg.Providers[provider] = Provider{
480 APIKey: apiKey,
481 }
482 logging.Info("added provider from environment", "provider", provider)
483 }
484 } else if providerCfg.Disabled || providerCfg.APIKey == "" {
485 // Provider is disabled or has no API key
486 logging.Warn("provider is disabled or has no API key, reverting to default",
487 "agent", name,
488 "model", agent.Model,
489 "provider", provider)
490
491 // Set default model based on available providers
492 if setDefaultModelForAgent(name) {
493 logging.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
494 } else {
495 return fmt.Errorf("no valid provider available for agent %s", name)
496 }
497 }
498
499 // Validate max tokens
500 if agent.MaxTokens <= 0 {
501 logging.Warn("invalid max tokens, setting to default",
502 "agent", name,
503 "model", agent.Model,
504 "max_tokens", agent.MaxTokens)
505
506 // Update the agent with default max tokens
507 updatedAgent := cfg.Agents[name]
508 if model.DefaultMaxTokens > 0 {
509 updatedAgent.MaxTokens = model.DefaultMaxTokens
510 } else {
511 updatedAgent.MaxTokens = MaxTokensFallbackDefault
512 }
513 cfg.Agents[name] = updatedAgent
514 } else if model.ContextWindow > 0 && agent.MaxTokens > model.ContextWindow/2 {
515 // Ensure max tokens doesn't exceed half the context window (reasonable limit)
516 logging.Warn("max tokens exceeds half the context window, adjusting",
517 "agent", name,
518 "model", agent.Model,
519 "max_tokens", agent.MaxTokens,
520 "context_window", model.ContextWindow)
521
522 // Update the agent with adjusted max tokens
523 updatedAgent := cfg.Agents[name]
524 updatedAgent.MaxTokens = model.ContextWindow / 2
525 cfg.Agents[name] = updatedAgent
526 }
527
528 // Validate reasoning effort for models that support reasoning
529 if model.CanReason && provider == models.ProviderOpenAI || provider == models.ProviderLocal {
530 if agent.ReasoningEffort == "" {
531 // Set default reasoning effort for models that support it
532 logging.Info("setting default reasoning effort for model that supports reasoning",
533 "agent", name,
534 "model", agent.Model)
535
536 // Update the agent with default reasoning effort
537 updatedAgent := cfg.Agents[name]
538 updatedAgent.ReasoningEffort = "medium"
539 cfg.Agents[name] = updatedAgent
540 } else {
541 // Check if reasoning effort is valid (low, medium, high)
542 effort := strings.ToLower(agent.ReasoningEffort)
543 if effort != "low" && effort != "medium" && effort != "high" {
544 logging.Warn("invalid reasoning effort, setting to medium",
545 "agent", name,
546 "model", agent.Model,
547 "reasoning_effort", agent.ReasoningEffort)
548
549 // Update the agent with valid reasoning effort
550 updatedAgent := cfg.Agents[name]
551 updatedAgent.ReasoningEffort = "medium"
552 cfg.Agents[name] = updatedAgent
553 }
554 }
555 } else if !model.CanReason && agent.ReasoningEffort != "" {
556 // Model doesn't support reasoning but reasoning effort is set
557 logging.Warn("model doesn't support reasoning but reasoning effort is set, ignoring",
558 "agent", name,
559 "model", agent.Model,
560 "reasoning_effort", agent.ReasoningEffort)
561
562 // Update the agent to remove reasoning effort
563 updatedAgent := cfg.Agents[name]
564 updatedAgent.ReasoningEffort = ""
565 cfg.Agents[name] = updatedAgent
566 }
567
568 return nil
569}
570
571// Validate checks if the configuration is valid and applies defaults where needed.
572func Validate() error {
573 if cfg == nil {
574 return fmt.Errorf("config not loaded")
575 }
576
577 // Validate agent models
578 for name, agent := range cfg.Agents {
579 if err := validateAgent(cfg, name, agent); err != nil {
580 return err
581 }
582 }
583
584 // Validate providers
585 for provider, providerCfg := range cfg.Providers {
586 if providerCfg.APIKey == "" && !providerCfg.Disabled {
587 logging.Warn("provider has no API key, marking as disabled", "provider", provider)
588 providerCfg.Disabled = true
589 cfg.Providers[provider] = providerCfg
590 }
591 }
592
593 // Validate LSP configurations
594 for language, lspConfig := range cfg.LSP {
595 if lspConfig.Command == "" && !lspConfig.Disabled {
596 logging.Warn("LSP configuration has no command, marking as disabled", "language", language)
597 lspConfig.Disabled = true
598 cfg.LSP[language] = lspConfig
599 }
600 }
601
602 return nil
603}
604
605// getProviderAPIKey gets the API key for a provider from environment variables
606func getProviderAPIKey(provider models.ModelProvider) string {
607 switch provider {
608 case models.ProviderAnthropic:
609 return os.Getenv("ANTHROPIC_API_KEY")
610 case models.ProviderOpenAI:
611 return os.Getenv("OPENAI_API_KEY")
612 case models.ProviderGemini:
613 return os.Getenv("GEMINI_API_KEY")
614 case models.ProviderGROQ:
615 return os.Getenv("GROQ_API_KEY")
616 case models.ProviderAzure:
617 return os.Getenv("AZURE_OPENAI_API_KEY")
618 case models.ProviderOpenRouter:
619 return os.Getenv("OPENROUTER_API_KEY")
620 case models.ProviderBedrock:
621 if hasAWSCredentials() {
622 return "aws-credentials-available"
623 }
624 case models.ProviderVertexAI:
625 if hasVertexAICredentials() {
626 return "vertex-ai-credentials-available"
627 }
628 }
629 return ""
630}
631
632// setDefaultModelForAgent sets a default model for an agent based on available providers
633func setDefaultModelForAgent(agent AgentName) bool {
634 // Check providers in order of preference
635 if apiKey := os.Getenv("ANTHROPIC_API_KEY"); apiKey != "" {
636 maxTokens := int64(5000)
637 if agent == AgentTitle {
638 maxTokens = 80
639 }
640 cfg.Agents[agent] = Agent{
641 Model: models.Claude37Sonnet,
642 MaxTokens: maxTokens,
643 }
644 return true
645 }
646
647 if apiKey := os.Getenv("OPENAI_API_KEY"); apiKey != "" {
648 var model models.ModelID
649 maxTokens := int64(5000)
650 reasoningEffort := ""
651
652 switch agent {
653 case AgentTitle:
654 model = models.GPT41Mini
655 maxTokens = 80
656 case AgentTask:
657 model = models.GPT41Mini
658 default:
659 model = models.GPT41
660 }
661
662 // Check if model supports reasoning
663 if modelInfo, ok := models.SupportedModels[model]; ok && modelInfo.CanReason {
664 reasoningEffort = "medium"
665 }
666
667 cfg.Agents[agent] = Agent{
668 Model: model,
669 MaxTokens: maxTokens,
670 ReasoningEffort: reasoningEffort,
671 }
672 return true
673 }
674
675 if apiKey := os.Getenv("OPENROUTER_API_KEY"); apiKey != "" {
676 var model models.ModelID
677 maxTokens := int64(5000)
678 reasoningEffort := ""
679
680 switch agent {
681 case AgentTitle:
682 model = models.OpenRouterClaude35Haiku
683 maxTokens = 80
684 case AgentTask:
685 model = models.OpenRouterClaude37Sonnet
686 default:
687 model = models.OpenRouterClaude37Sonnet
688 }
689
690 // Check if model supports reasoning
691 if modelInfo, ok := models.SupportedModels[model]; ok && modelInfo.CanReason {
692 reasoningEffort = "medium"
693 }
694
695 cfg.Agents[agent] = Agent{
696 Model: model,
697 MaxTokens: maxTokens,
698 ReasoningEffort: reasoningEffort,
699 }
700 return true
701 }
702
703 if apiKey := os.Getenv("GEMINI_API_KEY"); apiKey != "" {
704 var model models.ModelID
705 maxTokens := int64(5000)
706
707 if agent == AgentTitle {
708 model = models.Gemini25Flash
709 maxTokens = 80
710 } else {
711 model = models.Gemini25
712 }
713
714 cfg.Agents[agent] = Agent{
715 Model: model,
716 MaxTokens: maxTokens,
717 }
718 return true
719 }
720
721 if apiKey := os.Getenv("GROQ_API_KEY"); apiKey != "" {
722 maxTokens := int64(5000)
723 if agent == AgentTitle {
724 maxTokens = 80
725 }
726
727 cfg.Agents[agent] = Agent{
728 Model: models.QWENQwq,
729 MaxTokens: maxTokens,
730 }
731 return true
732 }
733
734 if hasAWSCredentials() {
735 maxTokens := int64(5000)
736 if agent == AgentTitle {
737 maxTokens = 80
738 }
739
740 cfg.Agents[agent] = Agent{
741 Model: models.BedrockClaude37Sonnet,
742 MaxTokens: maxTokens,
743 ReasoningEffort: "medium", // Claude models support reasoning
744 }
745 return true
746 }
747
748 if hasVertexAICredentials() {
749 var model models.ModelID
750 maxTokens := int64(5000)
751
752 if agent == AgentTitle {
753 model = models.VertexAIGemini25Flash
754 maxTokens = 80
755 } else {
756 model = models.VertexAIGemini25
757 }
758
759 cfg.Agents[agent] = Agent{
760 Model: model,
761 MaxTokens: maxTokens,
762 }
763 return true
764 }
765
766 return false
767}
768
769func updateCfgFile(updateCfg func(config *Config)) error {
770 if cfg == nil {
771 return fmt.Errorf("config not loaded")
772 }
773
774 // Get the config file path
775 configFile := viper.ConfigFileUsed()
776 var configData []byte
777 if configFile == "" {
778 homeDir, err := os.UserHomeDir()
779 if err != nil {
780 return fmt.Errorf("failed to get home directory: %w", err)
781 }
782 configFile = filepath.Join(homeDir, fmt.Sprintf(".%s.json", appName))
783 logging.Info("config file not found, creating new one", "path", configFile)
784 configData = []byte(`{}`)
785 } else {
786 // Read the existing config file
787 data, err := os.ReadFile(configFile)
788 if err != nil {
789 return fmt.Errorf("failed to read config file: %w", err)
790 }
791 configData = data
792 }
793
794 // Parse the JSON
795 var userCfg *Config
796 if err := json.Unmarshal(configData, &userCfg); err != nil {
797 return fmt.Errorf("failed to parse config file: %w", err)
798 }
799
800 updateCfg(userCfg)
801
802 // Write the updated config back to file
803 updatedData, err := json.MarshalIndent(userCfg, "", " ")
804 if err != nil {
805 return fmt.Errorf("failed to marshal config: %w", err)
806 }
807
808 if err := os.WriteFile(configFile, updatedData, 0o644); err != nil {
809 return fmt.Errorf("failed to write config file: %w", err)
810 }
811
812 return nil
813}
814
815// Get returns the current configuration.
816// It's safe to call this function multiple times.
817func Get() *Config {
818 return cfg
819}
820
821// WorkingDirectory returns the current working directory from the configuration.
822func WorkingDirectory() string {
823 if cfg == nil {
824 panic("config not loaded")
825 }
826 return cfg.WorkingDir
827}
828
829func UpdateAgentModel(agentName AgentName, modelID models.ModelID) error {
830 if cfg == nil {
831 panic("config not loaded")
832 }
833
834 existingAgentCfg := cfg.Agents[agentName]
835
836 model, ok := models.SupportedModels[modelID]
837 if !ok {
838 return fmt.Errorf("model %s not supported", modelID)
839 }
840
841 maxTokens := existingAgentCfg.MaxTokens
842 if model.DefaultMaxTokens > 0 {
843 maxTokens = model.DefaultMaxTokens
844 }
845
846 newAgentCfg := Agent{
847 Model: modelID,
848 MaxTokens: maxTokens,
849 ReasoningEffort: existingAgentCfg.ReasoningEffort,
850 }
851 cfg.Agents[agentName] = newAgentCfg
852
853 if err := validateAgent(cfg, agentName, newAgentCfg); err != nil {
854 // revert config update on failure
855 cfg.Agents[agentName] = existingAgentCfg
856 return fmt.Errorf("failed to update agent model: %w", err)
857 }
858
859 return updateCfgFile(func(config *Config) {
860 if config.Agents == nil {
861 config.Agents = make(map[AgentName]Agent)
862 }
863 config.Agents[agentName] = newAgentCfg
864 })
865}
866
867// UpdateTheme updates the theme in the configuration and writes it to the config file.
868func UpdateTheme(themeName string) error {
869 if cfg == nil {
870 return fmt.Errorf("config not loaded")
871 }
872
873 // Update the in-memory config
874 cfg.TUI.Theme = themeName
875
876 // Update the file config
877 return updateCfgFile(func(config *Config) {
878 config.TUI.Theme = themeName
879 })
880}