1// Package config manages application configuration from various sources.
2package config
3
4import (
5 "encoding/json"
6 "fmt"
7 "log/slog"
8 "os"
9 "path/filepath"
10 "strings"
11
12 "github.com/charmbracelet/crush/internal/llm/models"
13 "github.com/charmbracelet/crush/internal/logging"
14 "github.com/spf13/viper"
15)
16
17// MCPType defines the type of MCP (Model Control Protocol) server.
18type MCPType string
19
20// Supported MCP types
21const (
22 MCPStdio MCPType = "stdio"
23 MCPSse MCPType = "sse"
24)
25
26// MCPServer defines the configuration for a Model Control Protocol server.
27type MCPServer struct {
28 Command string `json:"command"`
29 Env []string `json:"env"`
30 Args []string `json:"args"`
31 Type MCPType `json:"type"`
32 URL string `json:"url"`
33 Headers map[string]string `json:"headers"`
34}
35
36type AgentName string
37
38const (
39 AgentCoder AgentName = "coder"
40 AgentSummarizer AgentName = "summarizer"
41 AgentTask AgentName = "task"
42 AgentTitle AgentName = "title"
43)
44
45// Agent defines configuration for different LLM models and their token limits.
46type Agent struct {
47 Model models.ModelID `json:"model"`
48 MaxTokens int64 `json:"maxTokens"`
49 ReasoningEffort string `json:"reasoningEffort"` // For openai models low,medium,heigh
50}
51
52// Provider defines configuration for an LLM provider.
53type Provider struct {
54 APIKey string `json:"apiKey"`
55 Disabled bool `json:"disabled"`
56}
57
58// Data defines storage configuration.
59type Data struct {
60 Directory string `json:"directory,omitempty"`
61}
62
63// LSPConfig defines configuration for Language Server Protocol integration.
64type LSPConfig struct {
65 Disabled bool `json:"enabled"`
66 Command string `json:"command"`
67 Args []string `json:"args"`
68 Options any `json:"options"`
69}
70
71// TUIConfig defines the configuration for the Terminal User Interface.
72type TUIConfig struct {
73 Theme string `json:"theme,omitempty"`
74}
75
76// Config is the main configuration structure for the application.
77type Config struct {
78 Data Data `json:"data"`
79 WorkingDir string `json:"wd,omitempty"`
80 MCPServers map[string]MCPServer `json:"mcpServers,omitempty"`
81 Providers map[models.ModelProvider]Provider `json:"providers,omitempty"`
82 LSP map[string]LSPConfig `json:"lsp,omitempty"`
83 Agents map[AgentName]Agent `json:"agents,omitempty"`
84 Debug bool `json:"debug,omitempty"`
85 DebugLSP bool `json:"debugLSP,omitempty"`
86 ContextPaths []string `json:"contextPaths,omitempty"`
87 TUI TUIConfig `json:"tui"`
88 AutoCompact bool `json:"autoCompact,omitempty"`
89}
90
91// Application constants
92const (
93 defaultDataDirectory = ".crush"
94 defaultLogLevel = "info"
95 appName = "crush"
96
97 MaxTokensFallbackDefault = 4096
98)
99
100var defaultContextPaths = []string{
101 ".github/copilot-instructions.md",
102 ".cursorrules",
103 ".cursor/rules/",
104 "CLAUDE.md",
105 "CLAUDE.local.md",
106 "crush.md",
107 "crush.local.md",
108 "Crush.md",
109 "Crush.local.md",
110 "CRUSH.md",
111 "CRUSH.local.md",
112}
113
114// Global configuration instance
115var cfg *Config
116
117// Load initializes the configuration from environment variables and config files.
118// If debug is true, debug mode is enabled and log level is set to debug.
119// It returns an error if configuration loading fails.
120func Load(workingDir string, debug bool) (*Config, error) {
121 if cfg != nil {
122 return cfg, nil
123 }
124
125 cfg = &Config{
126 WorkingDir: workingDir,
127 MCPServers: make(map[string]MCPServer),
128 Providers: make(map[models.ModelProvider]Provider),
129 LSP: make(map[string]LSPConfig),
130 }
131
132 configureViper()
133 setDefaults(debug)
134
135 // Read global config
136 if err := readConfig(viper.ReadInConfig()); err != nil {
137 return cfg, err
138 }
139
140 // Load and merge local config
141 mergeLocalConfig(workingDir)
142
143 setProviderDefaults()
144
145 // Apply configuration to the struct
146 if err := viper.Unmarshal(cfg); err != nil {
147 return cfg, fmt.Errorf("failed to unmarshal config: %w", err)
148 }
149
150 applyDefaultValues()
151 defaultLevel := slog.LevelInfo
152 if cfg.Debug {
153 defaultLevel = slog.LevelDebug
154 }
155 if os.Getenv("CRUSH_DEV_DEBUG") == "true" {
156 loggingFile := fmt.Sprintf("%s/%s", cfg.Data.Directory, "debug.log")
157
158 // if file does not exist create it
159 if _, err := os.Stat(loggingFile); os.IsNotExist(err) {
160 if err := os.MkdirAll(cfg.Data.Directory, 0o755); err != nil {
161 return cfg, fmt.Errorf("failed to create directory: %w", err)
162 }
163 if _, err := os.Create(loggingFile); err != nil {
164 return cfg, fmt.Errorf("failed to create log file: %w", err)
165 }
166 }
167
168 sloggingFileWriter, err := os.OpenFile(loggingFile, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0o666)
169 if err != nil {
170 return cfg, fmt.Errorf("failed to open log file: %w", err)
171 }
172 // Configure logger
173 logger := slog.New(slog.NewTextHandler(sloggingFileWriter, &slog.HandlerOptions{
174 Level: defaultLevel,
175 }))
176 slog.SetDefault(logger)
177 } else {
178 // Configure logger
179 logger := slog.New(slog.NewTextHandler(logging.NewWriter(), &slog.HandlerOptions{
180 Level: defaultLevel,
181 }))
182 slog.SetDefault(logger)
183 }
184
185 // Validate configuration
186 if err := Validate(); err != nil {
187 return cfg, fmt.Errorf("config validation failed: %w", err)
188 }
189
190 if cfg.Agents == nil {
191 cfg.Agents = make(map[AgentName]Agent)
192 }
193
194 // Override the max tokens for title agent
195 cfg.Agents[AgentTitle] = Agent{
196 Model: cfg.Agents[AgentTitle].Model,
197 MaxTokens: 80,
198 }
199 return cfg, nil
200}
201
202// configureViper sets up viper's configuration paths and environment variables.
203func configureViper() {
204 viper.SetConfigName(fmt.Sprintf(".%s", appName))
205 viper.SetConfigType("json")
206 viper.AddConfigPath("$HOME")
207 viper.AddConfigPath(fmt.Sprintf("$XDG_CONFIG_HOME/%s", appName))
208 viper.AddConfigPath(fmt.Sprintf("$HOME/.config/%s", appName))
209 viper.SetEnvPrefix(strings.ToUpper(appName))
210 viper.AutomaticEnv()
211}
212
213// setDefaults configures default values for configuration options.
214func setDefaults(debug bool) {
215 viper.SetDefault("data.directory", defaultDataDirectory)
216 viper.SetDefault("contextPaths", defaultContextPaths)
217 viper.SetDefault("tui.theme", "crush")
218 viper.SetDefault("autoCompact", true)
219
220 if debug {
221 viper.SetDefault("debug", true)
222 viper.Set("log.level", "debug")
223 } else {
224 viper.SetDefault("debug", false)
225 viper.SetDefault("log.level", defaultLogLevel)
226 }
227}
228
229// setProviderDefaults configures LLM provider defaults based on provider provided by
230// environment variables and configuration file.
231func setProviderDefaults() {
232 // Set all API keys we can find in the environment
233 if apiKey := os.Getenv("ANTHROPIC_API_KEY"); apiKey != "" {
234 viper.SetDefault("providers.anthropic.apiKey", apiKey)
235 }
236 if apiKey := os.Getenv("OPENAI_API_KEY"); apiKey != "" {
237 viper.SetDefault("providers.openai.apiKey", apiKey)
238 }
239 if apiKey := os.Getenv("GEMINI_API_KEY"); apiKey != "" {
240 viper.SetDefault("providers.gemini.apiKey", apiKey)
241 }
242 if apiKey := os.Getenv("GROQ_API_KEY"); apiKey != "" {
243 viper.SetDefault("providers.groq.apiKey", apiKey)
244 }
245 if apiKey := os.Getenv("OPENROUTER_API_KEY"); apiKey != "" {
246 viper.SetDefault("providers.openrouter.apiKey", apiKey)
247 }
248 if apiKey := os.Getenv("XAI_API_KEY"); apiKey != "" {
249 viper.SetDefault("providers.xai.apiKey", apiKey)
250 }
251 if apiKey := os.Getenv("AZURE_OPENAI_ENDPOINT"); apiKey != "" {
252 // api-key may be empty when using Entra ID credentials – that's okay
253 viper.SetDefault("providers.azure.apiKey", os.Getenv("AZURE_OPENAI_API_KEY"))
254 }
255
256 // Use this order to set the default models
257 // 1. Anthropic
258 // 2. OpenAI
259 // 3. Google Gemini
260 // 4. Groq
261 // 5. OpenRouter
262 // 6. AWS Bedrock
263 // 7. Azure
264 // 8. Google Cloud VertexAI
265
266 // Anthropic configuration
267 if key := viper.GetString("providers.anthropic.apiKey"); strings.TrimSpace(key) != "" {
268 viper.SetDefault("agents.coder.model", models.Claude4Sonnet)
269 viper.SetDefault("agents.summarizer.model", models.Claude4Sonnet)
270 viper.SetDefault("agents.task.model", models.Claude4Sonnet)
271 viper.SetDefault("agents.title.model", models.Claude4Sonnet)
272 return
273 }
274
275 // OpenAI configuration
276 if key := viper.GetString("providers.openai.apiKey"); strings.TrimSpace(key) != "" {
277 viper.SetDefault("agents.coder.model", models.GPT41)
278 viper.SetDefault("agents.summarizer.model", models.GPT41)
279 viper.SetDefault("agents.task.model", models.GPT41Mini)
280 viper.SetDefault("agents.title.model", models.GPT41Mini)
281 return
282 }
283
284 // Google Gemini configuration
285 if key := viper.GetString("providers.gemini.apiKey"); strings.TrimSpace(key) != "" {
286 viper.SetDefault("agents.coder.model", models.Gemini25)
287 viper.SetDefault("agents.summarizer.model", models.Gemini25)
288 viper.SetDefault("agents.task.model", models.Gemini25Flash)
289 viper.SetDefault("agents.title.model", models.Gemini25Flash)
290 return
291 }
292
293 // Groq configuration
294 if key := viper.GetString("providers.groq.apiKey"); strings.TrimSpace(key) != "" {
295 viper.SetDefault("agents.coder.model", models.QWENQwq)
296 viper.SetDefault("agents.summarizer.model", models.QWENQwq)
297 viper.SetDefault("agents.task.model", models.QWENQwq)
298 viper.SetDefault("agents.title.model", models.QWENQwq)
299 return
300 }
301
302 // OpenRouter configuration
303 if key := viper.GetString("providers.openrouter.apiKey"); strings.TrimSpace(key) != "" {
304 viper.SetDefault("agents.coder.model", models.OpenRouterClaude37Sonnet)
305 viper.SetDefault("agents.summarizer.model", models.OpenRouterClaude37Sonnet)
306 viper.SetDefault("agents.task.model", models.OpenRouterClaude37Sonnet)
307 viper.SetDefault("agents.title.model", models.OpenRouterClaude35Haiku)
308 return
309 }
310
311 // XAI configuration
312 if key := viper.GetString("providers.xai.apiKey"); strings.TrimSpace(key) != "" {
313 viper.SetDefault("agents.coder.model", models.XAIGrok3Beta)
314 viper.SetDefault("agents.summarizer.model", models.XAIGrok3Beta)
315 viper.SetDefault("agents.task.model", models.XAIGrok3Beta)
316 viper.SetDefault("agents.title.model", models.XAiGrok3MiniFastBeta)
317 return
318 }
319
320 // AWS Bedrock configuration
321 if hasAWSCredentials() {
322 viper.SetDefault("agents.coder.model", models.BedrockClaude37Sonnet)
323 viper.SetDefault("agents.summarizer.model", models.BedrockClaude37Sonnet)
324 viper.SetDefault("agents.task.model", models.BedrockClaude37Sonnet)
325 viper.SetDefault("agents.title.model", models.BedrockClaude37Sonnet)
326 return
327 }
328
329 // Azure OpenAI configuration
330 if os.Getenv("AZURE_OPENAI_ENDPOINT") != "" {
331 viper.SetDefault("agents.coder.model", models.AzureGPT41)
332 viper.SetDefault("agents.summarizer.model", models.AzureGPT41)
333 viper.SetDefault("agents.task.model", models.AzureGPT41Mini)
334 viper.SetDefault("agents.title.model", models.AzureGPT41Mini)
335 return
336 }
337
338 // Google Cloud VertexAI configuration
339 if hasVertexAICredentials() {
340 viper.SetDefault("agents.coder.model", models.VertexAIGemini25)
341 viper.SetDefault("agents.summarizer.model", models.VertexAIGemini25)
342 viper.SetDefault("agents.task.model", models.VertexAIGemini25Flash)
343 viper.SetDefault("agents.title.model", models.VertexAIGemini25Flash)
344 return
345 }
346}
347
348// hasAWSCredentials checks if AWS credentials are available in the environment.
349func hasAWSCredentials() bool {
350 // Check for explicit AWS credentials
351 if os.Getenv("AWS_ACCESS_KEY_ID") != "" && os.Getenv("AWS_SECRET_ACCESS_KEY") != "" {
352 return true
353 }
354
355 // Check for AWS profile
356 if os.Getenv("AWS_PROFILE") != "" || os.Getenv("AWS_DEFAULT_PROFILE") != "" {
357 return true
358 }
359
360 // Check for AWS region
361 if os.Getenv("AWS_REGION") != "" || os.Getenv("AWS_DEFAULT_REGION") != "" {
362 return true
363 }
364
365 // Check if running on EC2 with instance profile
366 if os.Getenv("AWS_CONTAINER_CREDENTIALS_RELATIVE_URI") != "" ||
367 os.Getenv("AWS_CONTAINER_CREDENTIALS_FULL_URI") != "" {
368 return true
369 }
370
371 return false
372}
373
374// hasVertexAICredentials checks if VertexAI credentials are available in the environment.
375func hasVertexAICredentials() bool {
376 // Check for explicit VertexAI parameters
377 if os.Getenv("VERTEXAI_PROJECT") != "" && os.Getenv("VERTEXAI_LOCATION") != "" {
378 return true
379 }
380 // Check for Google Cloud project and location
381 if os.Getenv("GOOGLE_CLOUD_PROJECT") != "" && (os.Getenv("GOOGLE_CLOUD_REGION") != "" || os.Getenv("GOOGLE_CLOUD_LOCATION") != "") {
382 return true
383 }
384 return false
385}
386
387// readConfig handles the result of reading a configuration file.
388func readConfig(err error) error {
389 if err == nil {
390 return nil
391 }
392
393 // It's okay if the config file doesn't exist
394 if _, ok := err.(viper.ConfigFileNotFoundError); ok {
395 return nil
396 }
397
398 return fmt.Errorf("failed to read config: %w", err)
399}
400
401// mergeLocalConfig loads and merges configuration from the local directory.
402func mergeLocalConfig(workingDir string) {
403 local := viper.New()
404 local.SetConfigName(fmt.Sprintf(".%s", appName))
405 local.SetConfigType("json")
406 local.AddConfigPath(workingDir)
407
408 // Merge local config if it exists
409 if err := local.ReadInConfig(); err == nil {
410 viper.MergeConfigMap(local.AllSettings())
411 }
412}
413
414// applyDefaultValues sets default values for configuration fields that need processing.
415func applyDefaultValues() {
416 // Set default MCP type if not specified
417 for k, v := range cfg.MCPServers {
418 if v.Type == "" {
419 v.Type = MCPStdio
420 cfg.MCPServers[k] = v
421 }
422 }
423}
424
425// It validates model IDs and providers, ensuring they are supported.
426func validateAgent(cfg *Config, name AgentName, agent Agent) error {
427 // Check if model exists
428 model, modelExists := models.SupportedModels[agent.Model]
429 if !modelExists {
430 logging.Warn("unsupported model configured, reverting to default",
431 "agent", name,
432 "configured_model", agent.Model)
433
434 // Set default model based on available providers
435 if setDefaultModelForAgent(name) {
436 logging.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
437 } else {
438 return fmt.Errorf("no valid provider available for agent %s", name)
439 }
440 return nil
441 }
442
443 // Check if provider for the model is configured
444 provider := model.Provider
445 providerCfg, providerExists := cfg.Providers[provider]
446
447 if !providerExists {
448 // Provider not configured, check if we have environment variables
449 apiKey := getProviderAPIKey(provider)
450 if apiKey == "" {
451 logging.Warn("provider not configured for model, reverting to default",
452 "agent", name,
453 "model", agent.Model,
454 "provider", provider)
455
456 // Set default model based on available providers
457 if setDefaultModelForAgent(name) {
458 logging.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
459 } else {
460 return fmt.Errorf("no valid provider available for agent %s", name)
461 }
462 } else {
463 // Add provider with API key from environment
464 cfg.Providers[provider] = Provider{
465 APIKey: apiKey,
466 }
467 logging.Info("added provider from environment", "provider", provider)
468 }
469 } else if providerCfg.Disabled || providerCfg.APIKey == "" {
470 // Provider is disabled or has no API key
471 logging.Warn("provider is disabled or has no API key, reverting to default",
472 "agent", name,
473 "model", agent.Model,
474 "provider", provider)
475
476 // Set default model based on available providers
477 if setDefaultModelForAgent(name) {
478 logging.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
479 } else {
480 return fmt.Errorf("no valid provider available for agent %s", name)
481 }
482 }
483
484 // Validate max tokens
485 if agent.MaxTokens <= 0 {
486 logging.Warn("invalid max tokens, setting to default",
487 "agent", name,
488 "model", agent.Model,
489 "max_tokens", agent.MaxTokens)
490
491 // Update the agent with default max tokens
492 updatedAgent := cfg.Agents[name]
493 if model.DefaultMaxTokens > 0 {
494 updatedAgent.MaxTokens = model.DefaultMaxTokens
495 } else {
496 updatedAgent.MaxTokens = MaxTokensFallbackDefault
497 }
498 cfg.Agents[name] = updatedAgent
499 } else if model.ContextWindow > 0 && agent.MaxTokens > model.ContextWindow/2 {
500 // Ensure max tokens doesn't exceed half the context window (reasonable limit)
501 logging.Warn("max tokens exceeds half the context window, adjusting",
502 "agent", name,
503 "model", agent.Model,
504 "max_tokens", agent.MaxTokens,
505 "context_window", model.ContextWindow)
506
507 // Update the agent with adjusted max tokens
508 updatedAgent := cfg.Agents[name]
509 updatedAgent.MaxTokens = model.ContextWindow / 2
510 cfg.Agents[name] = updatedAgent
511 }
512
513 // Validate reasoning effort for models that support reasoning
514 if model.CanReason && provider == models.ProviderOpenAI || provider == models.ProviderLocal {
515 if agent.ReasoningEffort == "" {
516 // Set default reasoning effort for models that support it
517 logging.Info("setting default reasoning effort for model that supports reasoning",
518 "agent", name,
519 "model", agent.Model)
520
521 // Update the agent with default reasoning effort
522 updatedAgent := cfg.Agents[name]
523 updatedAgent.ReasoningEffort = "medium"
524 cfg.Agents[name] = updatedAgent
525 } else {
526 // Check if reasoning effort is valid (low, medium, high)
527 effort := strings.ToLower(agent.ReasoningEffort)
528 if effort != "low" && effort != "medium" && effort != "high" {
529 logging.Warn("invalid reasoning effort, setting to medium",
530 "agent", name,
531 "model", agent.Model,
532 "reasoning_effort", agent.ReasoningEffort)
533
534 // Update the agent with valid reasoning effort
535 updatedAgent := cfg.Agents[name]
536 updatedAgent.ReasoningEffort = "medium"
537 cfg.Agents[name] = updatedAgent
538 }
539 }
540 } else if !model.CanReason && agent.ReasoningEffort != "" {
541 // Model doesn't support reasoning but reasoning effort is set
542 logging.Warn("model doesn't support reasoning but reasoning effort is set, ignoring",
543 "agent", name,
544 "model", agent.Model,
545 "reasoning_effort", agent.ReasoningEffort)
546
547 // Update the agent to remove reasoning effort
548 updatedAgent := cfg.Agents[name]
549 updatedAgent.ReasoningEffort = ""
550 cfg.Agents[name] = updatedAgent
551 }
552
553 return nil
554}
555
556// Validate checks if the configuration is valid and applies defaults where needed.
557func Validate() error {
558 if cfg == nil {
559 return fmt.Errorf("config not loaded")
560 }
561
562 // Validate agent models
563 for name, agent := range cfg.Agents {
564 if err := validateAgent(cfg, name, agent); err != nil {
565 return err
566 }
567 }
568
569 // Validate providers
570 for provider, providerCfg := range cfg.Providers {
571 if providerCfg.APIKey == "" && !providerCfg.Disabled {
572 logging.Warn("provider has no API key, marking as disabled", "provider", provider)
573 providerCfg.Disabled = true
574 cfg.Providers[provider] = providerCfg
575 }
576 }
577
578 // Validate LSP configurations
579 for language, lspConfig := range cfg.LSP {
580 if lspConfig.Command == "" && !lspConfig.Disabled {
581 logging.Warn("LSP configuration has no command, marking as disabled", "language", language)
582 lspConfig.Disabled = true
583 cfg.LSP[language] = lspConfig
584 }
585 }
586
587 return nil
588}
589
590// getProviderAPIKey gets the API key for a provider from environment variables
591func getProviderAPIKey(provider models.ModelProvider) string {
592 switch provider {
593 case models.ProviderAnthropic:
594 return os.Getenv("ANTHROPIC_API_KEY")
595 case models.ProviderOpenAI:
596 return os.Getenv("OPENAI_API_KEY")
597 case models.ProviderGemini:
598 return os.Getenv("GEMINI_API_KEY")
599 case models.ProviderGROQ:
600 return os.Getenv("GROQ_API_KEY")
601 case models.ProviderAzure:
602 return os.Getenv("AZURE_OPENAI_API_KEY")
603 case models.ProviderOpenRouter:
604 return os.Getenv("OPENROUTER_API_KEY")
605 case models.ProviderBedrock:
606 if hasAWSCredentials() {
607 return "aws-credentials-available"
608 }
609 case models.ProviderVertexAI:
610 if hasVertexAICredentials() {
611 return "vertex-ai-credentials-available"
612 }
613 }
614 return ""
615}
616
617// setDefaultModelForAgent sets a default model for an agent based on available providers
618func setDefaultModelForAgent(agent AgentName) bool {
619 // Check providers in order of preference
620 if apiKey := os.Getenv("ANTHROPIC_API_KEY"); apiKey != "" {
621 maxTokens := int64(5000)
622 if agent == AgentTitle {
623 maxTokens = 80
624 }
625 cfg.Agents[agent] = Agent{
626 Model: models.Claude37Sonnet,
627 MaxTokens: maxTokens,
628 }
629 return true
630 }
631
632 if apiKey := os.Getenv("OPENAI_API_KEY"); apiKey != "" {
633 var model models.ModelID
634 maxTokens := int64(5000)
635 reasoningEffort := ""
636
637 switch agent {
638 case AgentTitle:
639 model = models.GPT41Mini
640 maxTokens = 80
641 case AgentTask:
642 model = models.GPT41Mini
643 default:
644 model = models.GPT41
645 }
646
647 // Check if model supports reasoning
648 if modelInfo, ok := models.SupportedModels[model]; ok && modelInfo.CanReason {
649 reasoningEffort = "medium"
650 }
651
652 cfg.Agents[agent] = Agent{
653 Model: model,
654 MaxTokens: maxTokens,
655 ReasoningEffort: reasoningEffort,
656 }
657 return true
658 }
659
660 if apiKey := os.Getenv("OPENROUTER_API_KEY"); apiKey != "" {
661 var model models.ModelID
662 maxTokens := int64(5000)
663 reasoningEffort := ""
664
665 switch agent {
666 case AgentTitle:
667 model = models.OpenRouterClaude35Haiku
668 maxTokens = 80
669 case AgentTask:
670 model = models.OpenRouterClaude37Sonnet
671 default:
672 model = models.OpenRouterClaude37Sonnet
673 }
674
675 // Check if model supports reasoning
676 if modelInfo, ok := models.SupportedModels[model]; ok && modelInfo.CanReason {
677 reasoningEffort = "medium"
678 }
679
680 cfg.Agents[agent] = Agent{
681 Model: model,
682 MaxTokens: maxTokens,
683 ReasoningEffort: reasoningEffort,
684 }
685 return true
686 }
687
688 if apiKey := os.Getenv("GEMINI_API_KEY"); apiKey != "" {
689 var model models.ModelID
690 maxTokens := int64(5000)
691
692 if agent == AgentTitle {
693 model = models.Gemini25Flash
694 maxTokens = 80
695 } else {
696 model = models.Gemini25
697 }
698
699 cfg.Agents[agent] = Agent{
700 Model: model,
701 MaxTokens: maxTokens,
702 }
703 return true
704 }
705
706 if apiKey := os.Getenv("GROQ_API_KEY"); apiKey != "" {
707 maxTokens := int64(5000)
708 if agent == AgentTitle {
709 maxTokens = 80
710 }
711
712 cfg.Agents[agent] = Agent{
713 Model: models.QWENQwq,
714 MaxTokens: maxTokens,
715 }
716 return true
717 }
718
719 if hasAWSCredentials() {
720 maxTokens := int64(5000)
721 if agent == AgentTitle {
722 maxTokens = 80
723 }
724
725 cfg.Agents[agent] = Agent{
726 Model: models.BedrockClaude37Sonnet,
727 MaxTokens: maxTokens,
728 ReasoningEffort: "medium", // Claude models support reasoning
729 }
730 return true
731 }
732
733 if hasVertexAICredentials() {
734 var model models.ModelID
735 maxTokens := int64(5000)
736
737 if agent == AgentTitle {
738 model = models.VertexAIGemini25Flash
739 maxTokens = 80
740 } else {
741 model = models.VertexAIGemini25
742 }
743
744 cfg.Agents[agent] = Agent{
745 Model: model,
746 MaxTokens: maxTokens,
747 }
748 return true
749 }
750
751 return false
752}
753
754func updateCfgFile(updateCfg func(config *Config)) error {
755 if cfg == nil {
756 return fmt.Errorf("config not loaded")
757 }
758
759 // Get the config file path
760 configFile := viper.ConfigFileUsed()
761 var configData []byte
762 if configFile == "" {
763 homeDir, err := os.UserHomeDir()
764 if err != nil {
765 return fmt.Errorf("failed to get home directory: %w", err)
766 }
767 configFile = filepath.Join(homeDir, fmt.Sprintf(".%s.json", appName))
768 logging.Info("config file not found, creating new one", "path", configFile)
769 configData = []byte(`{}`)
770 } else {
771 // Read the existing config file
772 data, err := os.ReadFile(configFile)
773 if err != nil {
774 return fmt.Errorf("failed to read config file: %w", err)
775 }
776 configData = data
777 }
778
779 // Parse the JSON
780 var userCfg *Config
781 if err := json.Unmarshal(configData, &userCfg); err != nil {
782 return fmt.Errorf("failed to parse config file: %w", err)
783 }
784
785 updateCfg(userCfg)
786
787 // Write the updated config back to file
788 updatedData, err := json.MarshalIndent(userCfg, "", " ")
789 if err != nil {
790 return fmt.Errorf("failed to marshal config: %w", err)
791 }
792
793 if err := os.WriteFile(configFile, updatedData, 0o644); err != nil {
794 return fmt.Errorf("failed to write config file: %w", err)
795 }
796
797 return nil
798}
799
800// Get returns the current configuration.
801// It's safe to call this function multiple times.
802func Get() *Config {
803 return cfg
804}
805
806// WorkingDirectory returns the current working directory from the configuration.
807func WorkingDirectory() string {
808 if cfg == nil {
809 panic("config not loaded")
810 }
811 return cfg.WorkingDir
812}
813
814func UpdateAgentModel(agentName AgentName, modelID models.ModelID) error {
815 if cfg == nil {
816 panic("config not loaded")
817 }
818
819 existingAgentCfg := cfg.Agents[agentName]
820
821 model, ok := models.SupportedModels[modelID]
822 if !ok {
823 return fmt.Errorf("model %s not supported", modelID)
824 }
825
826 maxTokens := existingAgentCfg.MaxTokens
827 if model.DefaultMaxTokens > 0 {
828 maxTokens = model.DefaultMaxTokens
829 }
830
831 newAgentCfg := Agent{
832 Model: modelID,
833 MaxTokens: maxTokens,
834 ReasoningEffort: existingAgentCfg.ReasoningEffort,
835 }
836 cfg.Agents[agentName] = newAgentCfg
837
838 if err := validateAgent(cfg, agentName, newAgentCfg); err != nil {
839 // revert config update on failure
840 cfg.Agents[agentName] = existingAgentCfg
841 return fmt.Errorf("failed to update agent model: %w", err)
842 }
843
844 return updateCfgFile(func(config *Config) {
845 if config.Agents == nil {
846 config.Agents = make(map[AgentName]Agent)
847 }
848 config.Agents[agentName] = newAgentCfg
849 })
850}
851
852// UpdateTheme updates the theme in the configuration and writes it to the config file.
853func UpdateTheme(themeName string) error {
854 if cfg == nil {
855 return fmt.Errorf("config not loaded")
856 }
857
858 // Update the in-memory config
859 cfg.TUI.Theme = themeName
860
861 // Update the file config
862 return updateCfgFile(func(config *Config) {
863 config.TUI.Theme = themeName
864 })
865}