1package config
2
3import (
4 "encoding/json"
5 "errors"
6 "fmt"
7 "log/slog"
8 "maps"
9 "os"
10 "path/filepath"
11 "slices"
12 "strings"
13 "sync"
14
15 "github.com/charmbracelet/crush/internal/fur/provider"
16 "github.com/charmbracelet/crush/internal/logging"
17 "github.com/invopop/jsonschema"
18)
19
20const (
21 defaultDataDirectory = ".crush"
22 defaultLogLevel = "info"
23 appName = "crush"
24
25 MaxTokensFallbackDefault = 4096
26)
27
28var defaultContextPaths = []string{
29 ".github/copilot-instructions.md",
30 ".cursorrules",
31 ".cursor/rules/",
32 "CLAUDE.md",
33 "CLAUDE.local.md",
34 "GEMINI.md",
35 "gemini.md",
36 "crush.md",
37 "crush.local.md",
38 "Crush.md",
39 "Crush.local.md",
40 "CRUSH.md",
41 "CRUSH.local.md",
42}
43
44type AgentID string
45
46const (
47 AgentCoder AgentID = "coder"
48 AgentTask AgentID = "task"
49)
50
51type ModelType string
52
53const (
54 LargeModel ModelType = "large"
55 SmallModel ModelType = "small"
56)
57
58type Model struct {
59 ID string `json:"id" jsonschema:"title=Model ID,description=Unique identifier for the model, the API model"`
60 Name string `json:"name" jsonschema:"title=Model Name,description=Display name of the model"`
61 CostPer1MIn float64 `json:"cost_per_1m_in,omitempty" jsonschema:"title=Input Cost,description=Cost per 1 million input tokens,minimum=0"`
62 CostPer1MOut float64 `json:"cost_per_1m_out,omitempty" jsonschema:"title=Output Cost,description=Cost per 1 million output tokens,minimum=0"`
63 CostPer1MInCached float64 `json:"cost_per_1m_in_cached,omitempty" jsonschema:"title=Cached Input Cost,description=Cost per 1 million cached input tokens,minimum=0"`
64 CostPer1MOutCached float64 `json:"cost_per_1m_out_cached,omitempty" jsonschema:"title=Cached Output Cost,description=Cost per 1 million cached output tokens,minimum=0"`
65 ContextWindow int64 `json:"context_window" jsonschema:"title=Context Window,description=Maximum context window size in tokens,minimum=1"`
66 DefaultMaxTokens int64 `json:"default_max_tokens" jsonschema:"title=Default Max Tokens,description=Default maximum tokens for responses,minimum=1"`
67 CanReason bool `json:"can_reason,omitempty" jsonschema:"title=Can Reason,description=Whether the model supports reasoning capabilities"`
68 ReasoningEffort string `json:"reasoning_effort,omitempty" jsonschema:"title=Reasoning Effort,description=Default reasoning effort level for reasoning models"`
69 HasReasoningEffort bool `json:"has_reasoning_effort,omitempty" jsonschema:"title=Has Reasoning Effort,description=Whether the model supports reasoning effort configuration"`
70 SupportsImages bool `json:"supports_attachments,omitempty" jsonschema:"title=Supports Images,description=Whether the model supports image attachments"`
71}
72
73type VertexAIOptions struct {
74 APIKey string `json:"api_key,omitempty"`
75 Project string `json:"project,omitempty"`
76 Location string `json:"location,omitempty"`
77}
78
79type ProviderConfig struct {
80 ID provider.InferenceProvider `json:"id,omitempty" jsonschema:"title=Provider ID,description=Unique identifier for the provider"`
81 BaseURL string `json:"base_url,omitempty" jsonschema:"title=Base URL,description=Base URL for the provider API (required for custom providers)"`
82 ProviderType provider.Type `json:"provider_type" jsonschema:"title=Provider Type,description=Type of the provider (openai, anthropic, etc.)"`
83 APIKey string `json:"api_key,omitempty" jsonschema:"title=API Key,description=API key for authenticating with the provider"`
84 Disabled bool `json:"disabled,omitempty" jsonschema:"title=Disabled,description=Whether this provider is disabled,default=false"`
85 ExtraHeaders map[string]string `json:"extra_headers,omitempty" jsonschema:"title=Extra Headers,description=Additional HTTP headers to send with requests"`
86 // used for e.x for vertex to set the project
87 ExtraParams map[string]string `json:"extra_params,omitempty" jsonschema:"title=Extra Parameters,description=Additional provider-specific parameters"`
88
89 DefaultLargeModel string `json:"default_large_model,omitempty" jsonschema:"title=Default Large Model,description=Default model ID for large model type"`
90 DefaultSmallModel string `json:"default_small_model,omitempty" jsonschema:"title=Default Small Model,description=Default model ID for small model type"`
91
92 Models []Model `json:"models,omitempty" jsonschema:"title=Models,description=List of available models for this provider"`
93}
94
95type Agent struct {
96 ID AgentID `json:"id,omitempty" jsonschema:"title=Agent ID,description=Unique identifier for the agent,enum=coder,enum=task"`
97 Name string `json:"name,omitempty" jsonschema:"title=Name,description=Display name of the agent"`
98 Description string `json:"description,omitempty" jsonschema:"title=Description,description=Description of what the agent does"`
99 // This is the id of the system prompt used by the agent
100 Disabled bool `json:"disabled,omitempty" jsonschema:"title=Disabled,description=Whether this agent is disabled,default=false"`
101
102 Model ModelType `json:"model" jsonschema:"title=Model Type,description=Type of model to use (large or small),enum=large,enum=small"`
103
104 // The available tools for the agent
105 // if this is nil, all tools are available
106 AllowedTools []string `json:"allowed_tools,omitempty" jsonschema:"title=Allowed Tools,description=List of tools this agent is allowed to use (if nil all tools are allowed)"`
107
108 // this tells us which MCPs are available for this agent
109 // if this is empty all mcps are available
110 // the string array is the list of tools from the AllowedMCP the agent has available
111 // if the string array is nil, all tools from the AllowedMCP are available
112 AllowedMCP map[string][]string `json:"allowed_mcp,omitempty" jsonschema:"title=Allowed MCP,description=Map of MCP servers this agent can use and their allowed tools"`
113
114 // The list of LSPs that this agent can use
115 // if this is nil, all LSPs are available
116 AllowedLSP []string `json:"allowed_lsp,omitempty" jsonschema:"title=Allowed LSP,description=List of LSP servers this agent can use (if nil all LSPs are allowed)"`
117
118 // Overrides the context paths for this agent
119 ContextPaths []string `json:"context_paths,omitempty" jsonschema:"title=Context Paths,description=Custom context paths for this agent (additive to global context paths)"`
120}
121
122type MCPType string
123
124const (
125 MCPStdio MCPType = "stdio"
126 MCPSse MCPType = "sse"
127 MCPHttp MCPType = "http"
128)
129
130type MCP struct {
131 Command string `json:"command,omitempty" jsonschema:"title=Command,description=Command to execute for stdio MCP servers"`
132 Env []string `json:"env,omitempty" jsonschema:"title=Environment,description=Environment variables for the MCP server"`
133 Args []string `json:"args,omitempty" jsonschema:"title=Arguments,description=Command line arguments for the MCP server"`
134 Type MCPType `json:"type" jsonschema:"title=Type,description=Type of MCP connection,enum=stdio,enum=sse,enum=http,default=stdio"`
135 URL string `json:"url,omitempty" jsonschema:"title=URL,description=URL for SSE MCP servers"`
136 // TODO: maybe make it possible to get the value from the env
137 Headers map[string]string `json:"headers,omitempty" jsonschema:"title=Headers,description=HTTP headers for SSE MCP servers"`
138}
139
140type LSPConfig struct {
141 Disabled bool `json:"enabled,omitempty" jsonschema:"title=Enabled,description=Whether this LSP server is enabled,default=true"`
142 Command string `json:"command" jsonschema:"title=Command,description=Command to execute for the LSP server"`
143 Args []string `json:"args,omitempty" jsonschema:"title=Arguments,description=Command line arguments for the LSP server"`
144 Options any `json:"options,omitempty" jsonschema:"title=Options,description=LSP server specific options"`
145}
146
147type TUIOptions struct {
148 CompactMode bool `json:"compact_mode" jsonschema:"title=Compact Mode,description=Enable compact mode for the TUI,default=false"`
149 // Here we can add themes later or any TUI related options
150}
151
152type Options struct {
153 ContextPaths []string `json:"context_paths,omitempty" jsonschema:"title=Context Paths,description=List of paths to search for context files"`
154 TUI TUIOptions `json:"tui,omitempty" jsonschema:"title=TUI Options,description=Terminal UI configuration options"`
155 Debug bool `json:"debug,omitempty" jsonschema:"title=Debug,description=Enable debug logging,default=false"`
156 DebugLSP bool `json:"debug_lsp,omitempty" jsonschema:"title=Debug LSP,description=Enable LSP debug logging,default=false"`
157 DisableAutoSummarize bool `json:"disable_auto_summarize,omitempty" jsonschema:"title=Disable Auto Summarize,description=Disable automatic conversation summarization,default=false"`
158 // Relative to the cwd
159 DataDirectory string `json:"data_directory,omitempty" jsonschema:"title=Data Directory,description=Directory for storing application data,default=.crush"`
160}
161
162type PreferredModel struct {
163 ModelID string `json:"model_id" jsonschema:"title=Model ID,description=ID of the preferred model"`
164 Provider provider.InferenceProvider `json:"provider" jsonschema:"title=Provider,description=Provider for the preferred model"`
165 // ReasoningEffort overrides the default reasoning effort for this model
166 ReasoningEffort string `json:"reasoning_effort,omitempty" jsonschema:"title=Reasoning Effort,description=Override reasoning effort for this model"`
167 // MaxTokens overrides the default max tokens for this model
168 MaxTokens int64 `json:"max_tokens,omitempty" jsonschema:"title=Max Tokens,description=Override max tokens for this model,minimum=1"`
169
170 // Think indicates if the model should think, only applicable for anthropic reasoning models
171 Think bool `json:"think,omitempty" jsonschema:"title=Think,description=Enable thinking for reasoning models,default=false"`
172}
173
174type PreferredModels struct {
175 Large PreferredModel `json:"large,omitempty" jsonschema:"title=Large Model,description=Preferred model configuration for large model type"`
176 Small PreferredModel `json:"small,omitempty" jsonschema:"title=Small Model,description=Preferred model configuration for small model type"`
177}
178
179type Config struct {
180 Models PreferredModels `json:"models,omitempty" jsonschema:"title=Models,description=Preferred model configurations for large and small model types"`
181 // List of configured providers
182 Providers map[provider.InferenceProvider]ProviderConfig `json:"providers,omitempty" jsonschema:"title=Providers,description=LLM provider configurations"`
183
184 // List of configured agents
185 Agents map[AgentID]Agent `json:"agents,omitempty" jsonschema:"title=Agents,description=Agent configurations for different tasks"`
186
187 // List of configured MCPs
188 MCP map[string]MCP `json:"mcp,omitempty" jsonschema:"title=MCP,description=Model Control Protocol server configurations"`
189
190 // List of configured LSPs
191 LSP map[string]LSPConfig `json:"lsp,omitempty" jsonschema:"title=LSP,description=Language Server Protocol configurations"`
192
193 // Miscellaneous options
194 Options Options `json:"options,omitempty" jsonschema:"title=Options,description=General application options and settings"`
195}
196
197var (
198 instance *Config // The single instance of the Singleton
199 cwd string
200 once sync.Once // Ensures the initialization happens only once
201
202)
203
204func readConfigFile(path string) (*Config, error) {
205 var cfg *Config
206 if _, err := os.Stat(path); err != nil && !os.IsNotExist(err) {
207 // some other error occurred while checking the file
208 return nil, err
209 } else if err == nil {
210 // config file exists, read it
211 file, err := os.ReadFile(path)
212 if err != nil {
213 return nil, err
214 }
215 cfg = &Config{}
216 if err := json.Unmarshal(file, cfg); err != nil {
217 return nil, err
218 }
219 } else {
220 // config file does not exist, create a new one
221 cfg = &Config{}
222 }
223 return cfg, nil
224}
225
226func loadConfig(cwd string, debug bool) (*Config, error) {
227 // First read the global config file
228 cfgPath := ConfigPath()
229
230 cfg := defaultConfigBasedOnEnv()
231 cfg.Options.Debug = debug
232 defaultLevel := slog.LevelInfo
233 if cfg.Options.Debug {
234 defaultLevel = slog.LevelDebug
235 }
236 if os.Getenv("CRUSH_DEV_DEBUG") == "true" {
237 loggingFile := fmt.Sprintf("%s/%s", cfg.Options.DataDirectory, "debug.log")
238
239 // if file does not exist create it
240 if _, err := os.Stat(loggingFile); os.IsNotExist(err) {
241 if err := os.MkdirAll(cfg.Options.DataDirectory, 0o755); err != nil {
242 return cfg, fmt.Errorf("failed to create directory: %w", err)
243 }
244 if _, err := os.Create(loggingFile); err != nil {
245 return cfg, fmt.Errorf("failed to create log file: %w", err)
246 }
247 }
248
249 messagesPath := fmt.Sprintf("%s/%s", cfg.Options.DataDirectory, "messages")
250
251 if _, err := os.Stat(messagesPath); os.IsNotExist(err) {
252 if err := os.MkdirAll(messagesPath, 0o756); err != nil {
253 return cfg, fmt.Errorf("failed to create directory: %w", err)
254 }
255 }
256 logging.MessageDir = messagesPath
257
258 sloggingFileWriter, err := os.OpenFile(loggingFile, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0o666)
259 if err != nil {
260 return cfg, fmt.Errorf("failed to open log file: %w", err)
261 }
262 // Configure logger
263 logger := slog.New(slog.NewTextHandler(sloggingFileWriter, &slog.HandlerOptions{
264 Level: defaultLevel,
265 }))
266 slog.SetDefault(logger)
267 } else {
268 // Configure logger
269 logger := slog.New(slog.NewTextHandler(logging.NewWriter(), &slog.HandlerOptions{
270 Level: defaultLevel,
271 }))
272 slog.SetDefault(logger)
273 }
274
275 priorityOrderedConfigFiles := []string{
276 cfgPath, // Global config file
277 filepath.Join(cwd, "crush.json"), // Local config file
278 filepath.Join(cwd, ".crush.json"), // Local config file
279 }
280
281 configs := make([]*Config, 0)
282 for _, path := range priorityOrderedConfigFiles {
283 localConfig, err := readConfigFile(path)
284 if err != nil {
285 return nil, fmt.Errorf("failed to read config file %s: %w", path, err)
286 }
287 if localConfig != nil {
288 // If the config file was read successfully, add it to the list
289 configs = append(configs, localConfig)
290 }
291 }
292
293 // merge options
294 mergeOptions(cfg, configs...)
295
296 mergeProviderConfigs(cfg, configs...)
297 // no providers found the app is not initialized yet
298 if len(cfg.Providers) == 0 {
299 return cfg, nil
300 }
301 preferredProvider := getPreferredProvider(cfg.Providers)
302 if preferredProvider != nil {
303 cfg.Models = PreferredModels{
304 Large: PreferredModel{
305 ModelID: preferredProvider.DefaultLargeModel,
306 Provider: preferredProvider.ID,
307 },
308 Small: PreferredModel{
309 ModelID: preferredProvider.DefaultSmallModel,
310 Provider: preferredProvider.ID,
311 },
312 }
313 } else {
314 // No valid providers found, set empty models
315 cfg.Models = PreferredModels{}
316 }
317
318 mergeModels(cfg, configs...)
319
320 agents := map[AgentID]Agent{
321 AgentCoder: {
322 ID: AgentCoder,
323 Name: "Coder",
324 Description: "An agent that helps with executing coding tasks.",
325 Model: LargeModel,
326 ContextPaths: cfg.Options.ContextPaths,
327 // All tools allowed
328 },
329 AgentTask: {
330 ID: AgentTask,
331 Name: "Task",
332 Description: "An agent that helps with searching for context and finding implementation details.",
333 Model: LargeModel,
334 ContextPaths: cfg.Options.ContextPaths,
335 AllowedTools: []string{
336 "glob",
337 "grep",
338 "ls",
339 "sourcegraph",
340 "view",
341 },
342 // NO MCPs or LSPs by default
343 AllowedMCP: map[string][]string{},
344 AllowedLSP: []string{},
345 },
346 }
347 cfg.Agents = agents
348 mergeAgents(cfg, configs...)
349 mergeMCPs(cfg, configs...)
350 mergeLSPs(cfg, configs...)
351
352 // Validate the final configuration
353 if err := cfg.Validate(); err != nil {
354 return cfg, fmt.Errorf("configuration validation failed: %w", err)
355 }
356
357 return cfg, nil
358}
359
360func Init(workingDir string, debug bool) (*Config, error) {
361 var err error
362 once.Do(func() {
363 cwd = workingDir
364 instance, err = loadConfig(cwd, debug)
365 if err != nil {
366 logging.Error("Failed to load config", "error", err)
367 }
368 })
369
370 return instance, err
371}
372
373func Get() *Config {
374 if instance == nil {
375 // TODO: Handle this better
376 panic("Config not initialized. Call InitConfig first.")
377 }
378 return instance
379}
380
381func getPreferredProvider(configuredProviders map[provider.InferenceProvider]ProviderConfig) *ProviderConfig {
382 providers := Providers()
383 for _, p := range providers {
384 if providerConfig, ok := configuredProviders[p.ID]; ok && !providerConfig.Disabled {
385 return &providerConfig
386 }
387 }
388 // if none found return the first configured provider
389 for _, providerConfig := range configuredProviders {
390 if !providerConfig.Disabled {
391 return &providerConfig
392 }
393 }
394 return nil
395}
396
397func mergeProviderConfig(p provider.InferenceProvider, base, other ProviderConfig) ProviderConfig {
398 if other.APIKey != "" {
399 base.APIKey = other.APIKey
400 }
401 // Only change these options if the provider is not a known provider
402 if !slices.Contains(provider.KnownProviders(), p) {
403 if other.BaseURL != "" {
404 base.BaseURL = other.BaseURL
405 }
406 if other.ProviderType != "" {
407 base.ProviderType = other.ProviderType
408 }
409 if len(other.ExtraHeaders) > 0 {
410 if base.ExtraHeaders == nil {
411 base.ExtraHeaders = make(map[string]string)
412 }
413 maps.Copy(base.ExtraHeaders, other.ExtraHeaders)
414 }
415 if len(other.ExtraParams) > 0 {
416 if base.ExtraParams == nil {
417 base.ExtraParams = make(map[string]string)
418 }
419 maps.Copy(base.ExtraParams, other.ExtraParams)
420 }
421 }
422
423 if other.Disabled {
424 base.Disabled = other.Disabled
425 }
426
427 if other.DefaultLargeModel != "" {
428 base.DefaultLargeModel = other.DefaultLargeModel
429 }
430 // Add new models if they don't exist
431 if other.Models != nil {
432 for _, model := range other.Models {
433 // check if the model already exists
434 exists := false
435 for _, existingModel := range base.Models {
436 if existingModel.ID == model.ID {
437 exists = true
438 break
439 }
440 }
441 if !exists {
442 base.Models = append(base.Models, model)
443 }
444 }
445 }
446
447 return base
448}
449
450func validateProvider(p provider.InferenceProvider, providerConfig ProviderConfig) error {
451 if !slices.Contains(provider.KnownProviders(), p) {
452 if providerConfig.ProviderType != provider.TypeOpenAI {
453 return errors.New("invalid provider type: " + string(providerConfig.ProviderType))
454 }
455 if providerConfig.BaseURL == "" {
456 return errors.New("base URL must be set for custom providers")
457 }
458 if providerConfig.APIKey == "" {
459 return errors.New("API key must be set for custom providers")
460 }
461 }
462 return nil
463}
464
465func mergeModels(base *Config, others ...*Config) {
466 for _, cfg := range others {
467 if cfg == nil {
468 continue
469 }
470 if cfg.Models.Large.ModelID != "" && cfg.Models.Large.Provider != "" {
471 base.Models.Large = cfg.Models.Large
472 }
473
474 if cfg.Models.Small.ModelID != "" && cfg.Models.Small.Provider != "" {
475 base.Models.Small = cfg.Models.Small
476 }
477 }
478}
479
480func mergeOptions(base *Config, others ...*Config) {
481 for _, cfg := range others {
482 if cfg == nil {
483 continue
484 }
485 baseOptions := base.Options
486 other := cfg.Options
487 if len(other.ContextPaths) > 0 {
488 baseOptions.ContextPaths = append(baseOptions.ContextPaths, other.ContextPaths...)
489 }
490
491 if other.TUI.CompactMode {
492 baseOptions.TUI.CompactMode = other.TUI.CompactMode
493 }
494
495 if other.Debug {
496 baseOptions.Debug = other.Debug
497 }
498
499 if other.DebugLSP {
500 baseOptions.DebugLSP = other.DebugLSP
501 }
502
503 if other.DisableAutoSummarize {
504 baseOptions.DisableAutoSummarize = other.DisableAutoSummarize
505 }
506
507 if other.DataDirectory != "" {
508 baseOptions.DataDirectory = other.DataDirectory
509 }
510 base.Options = baseOptions
511 }
512}
513
514func mergeAgents(base *Config, others ...*Config) {
515 for _, cfg := range others {
516 if cfg == nil {
517 continue
518 }
519 for agentID, newAgent := range cfg.Agents {
520 if _, ok := base.Agents[agentID]; !ok {
521 newAgent.ID = agentID
522 if newAgent.Model == "" {
523 newAgent.Model = LargeModel
524 }
525 if len(newAgent.ContextPaths) > 0 {
526 newAgent.ContextPaths = append(base.Options.ContextPaths, newAgent.ContextPaths...)
527 } else {
528 newAgent.ContextPaths = base.Options.ContextPaths
529 }
530 base.Agents[agentID] = newAgent
531 } else {
532 baseAgent := base.Agents[agentID]
533
534 if agentID == AgentCoder || agentID == AgentTask {
535 if newAgent.Model != "" {
536 baseAgent.Model = newAgent.Model
537 }
538 if newAgent.AllowedMCP != nil {
539 baseAgent.AllowedMCP = newAgent.AllowedMCP
540 }
541 if newAgent.AllowedLSP != nil {
542 baseAgent.AllowedLSP = newAgent.AllowedLSP
543 }
544 // Context paths are additive for known agents too
545 if len(newAgent.ContextPaths) > 0 {
546 baseAgent.ContextPaths = append(baseAgent.ContextPaths, newAgent.ContextPaths...)
547 }
548 } else {
549 if newAgent.Name != "" {
550 baseAgent.Name = newAgent.Name
551 }
552 if newAgent.Description != "" {
553 baseAgent.Description = newAgent.Description
554 }
555 if newAgent.Model != "" {
556 baseAgent.Model = newAgent.Model
557 } else if baseAgent.Model == "" {
558 baseAgent.Model = LargeModel
559 }
560
561 baseAgent.Disabled = newAgent.Disabled
562
563 if newAgent.AllowedTools != nil {
564 baseAgent.AllowedTools = newAgent.AllowedTools
565 }
566 if newAgent.AllowedMCP != nil {
567 baseAgent.AllowedMCP = newAgent.AllowedMCP
568 }
569 if newAgent.AllowedLSP != nil {
570 baseAgent.AllowedLSP = newAgent.AllowedLSP
571 }
572 if len(newAgent.ContextPaths) > 0 {
573 baseAgent.ContextPaths = append(baseAgent.ContextPaths, newAgent.ContextPaths...)
574 }
575 }
576
577 base.Agents[agentID] = baseAgent
578 }
579 }
580 }
581}
582
583func mergeMCPs(base *Config, others ...*Config) {
584 for _, cfg := range others {
585 if cfg == nil {
586 continue
587 }
588 maps.Copy(base.MCP, cfg.MCP)
589 }
590}
591
592func mergeLSPs(base *Config, others ...*Config) {
593 for _, cfg := range others {
594 if cfg == nil {
595 continue
596 }
597 maps.Copy(base.LSP, cfg.LSP)
598 }
599}
600
601func mergeProviderConfigs(base *Config, others ...*Config) {
602 for _, cfg := range others {
603 if cfg == nil {
604 continue
605 }
606 for providerName, p := range cfg.Providers {
607 p.ID = providerName
608 if _, ok := base.Providers[providerName]; !ok {
609 if slices.Contains(provider.KnownProviders(), providerName) {
610 providers := Providers()
611 for _, providerDef := range providers {
612 if providerDef.ID == providerName {
613 logging.Info("Using default provider config for", "provider", providerName)
614 baseProvider := getDefaultProviderConfig(providerDef, providerDef.APIKey)
615 base.Providers[providerName] = mergeProviderConfig(providerName, baseProvider, p)
616 break
617 }
618 }
619 } else {
620 base.Providers[providerName] = p
621 }
622 } else {
623 base.Providers[providerName] = mergeProviderConfig(providerName, base.Providers[providerName], p)
624 }
625 }
626 }
627
628 finalProviders := make(map[provider.InferenceProvider]ProviderConfig)
629 for providerName, providerConfig := range base.Providers {
630 err := validateProvider(providerName, providerConfig)
631 if err != nil {
632 logging.Warn("Skipping provider", "name", providerName, "error", err)
633 continue // Skip invalid providers
634 }
635 finalProviders[providerName] = providerConfig
636 }
637 base.Providers = finalProviders
638}
639
640func providerDefaultConfig(providerID provider.InferenceProvider) ProviderConfig {
641 switch providerID {
642 case provider.InferenceProviderAnthropic:
643 return ProviderConfig{
644 ID: providerID,
645 ProviderType: provider.TypeAnthropic,
646 }
647 case provider.InferenceProviderOpenAI:
648 return ProviderConfig{
649 ID: providerID,
650 ProviderType: provider.TypeOpenAI,
651 }
652 case provider.InferenceProviderGemini:
653 return ProviderConfig{
654 ID: providerID,
655 ProviderType: provider.TypeGemini,
656 }
657 case provider.InferenceProviderBedrock:
658 return ProviderConfig{
659 ID: providerID,
660 ProviderType: provider.TypeBedrock,
661 }
662 case provider.InferenceProviderAzure:
663 return ProviderConfig{
664 ID: providerID,
665 ProviderType: provider.TypeAzure,
666 }
667 case provider.InferenceProviderOpenRouter:
668 return ProviderConfig{
669 ID: providerID,
670 ProviderType: provider.TypeOpenAI,
671 BaseURL: "https://openrouter.ai/api/v1",
672 ExtraHeaders: map[string]string{
673 "HTTP-Referer": "crush.charm.land",
674 "X-Title": "Crush",
675 },
676 }
677 case provider.InferenceProviderXAI:
678 return ProviderConfig{
679 ID: providerID,
680 ProviderType: provider.TypeXAI,
681 BaseURL: "https://api.x.ai/v1",
682 }
683 case provider.InferenceProviderVertexAI:
684 return ProviderConfig{
685 ID: providerID,
686 ProviderType: provider.TypeVertexAI,
687 }
688 default:
689 return ProviderConfig{
690 ID: providerID,
691 ProviderType: provider.TypeOpenAI,
692 }
693 }
694}
695
696func getDefaultProviderConfig(p provider.Provider, apiKey string) ProviderConfig {
697 providerConfig := providerDefaultConfig(p.ID)
698 providerConfig.APIKey = apiKey
699 providerConfig.DefaultLargeModel = p.DefaultLargeModelID
700 providerConfig.DefaultSmallModel = p.DefaultSmallModelID
701 baseURL := p.APIEndpoint
702 if strings.HasPrefix(baseURL, "$") {
703 envVar := strings.TrimPrefix(baseURL, "$")
704 baseURL = os.Getenv(envVar)
705 }
706 providerConfig.BaseURL = baseURL
707 for _, model := range p.Models {
708 configModel := Model{
709 ID: model.ID,
710 Name: model.Name,
711 CostPer1MIn: model.CostPer1MIn,
712 CostPer1MOut: model.CostPer1MOut,
713 CostPer1MInCached: model.CostPer1MInCached,
714 CostPer1MOutCached: model.CostPer1MOutCached,
715 ContextWindow: model.ContextWindow,
716 DefaultMaxTokens: model.DefaultMaxTokens,
717 CanReason: model.CanReason,
718 SupportsImages: model.SupportsImages,
719 }
720 // Set reasoning effort for reasoning models
721 if model.HasReasoningEffort && model.DefaultReasoningEffort != "" {
722 configModel.HasReasoningEffort = model.HasReasoningEffort
723 configModel.ReasoningEffort = model.DefaultReasoningEffort
724 }
725 providerConfig.Models = append(providerConfig.Models, configModel)
726 }
727 return providerConfig
728}
729
730func defaultConfigBasedOnEnv() *Config {
731 cfg := &Config{
732 Options: Options{
733 DataDirectory: defaultDataDirectory,
734 ContextPaths: defaultContextPaths,
735 },
736 Providers: make(map[provider.InferenceProvider]ProviderConfig),
737 Agents: make(map[AgentID]Agent),
738 LSP: make(map[string]LSPConfig),
739 MCP: make(map[string]MCP),
740 }
741
742 providers := Providers()
743
744 for _, p := range providers {
745 if strings.HasPrefix(p.APIKey, "$") {
746 envVar := strings.TrimPrefix(p.APIKey, "$")
747 if apiKey := os.Getenv(envVar); apiKey != "" {
748 cfg.Providers[p.ID] = getDefaultProviderConfig(p, apiKey)
749 }
750 }
751 }
752 // TODO: support local models
753
754 if useVertexAI := os.Getenv("GOOGLE_GENAI_USE_VERTEXAI"); useVertexAI == "true" {
755 providerConfig := providerDefaultConfig(provider.InferenceProviderVertexAI)
756 providerConfig.ExtraParams = map[string]string{
757 "project": os.Getenv("GOOGLE_CLOUD_PROJECT"),
758 "location": os.Getenv("GOOGLE_CLOUD_LOCATION"),
759 }
760 // Find the VertexAI provider definition to get default models
761 for _, p := range providers {
762 if p.ID == provider.InferenceProviderVertexAI {
763 providerConfig.DefaultLargeModel = p.DefaultLargeModelID
764 providerConfig.DefaultSmallModel = p.DefaultSmallModelID
765 for _, model := range p.Models {
766 configModel := Model{
767 ID: model.ID,
768 Name: model.Name,
769 CostPer1MIn: model.CostPer1MIn,
770 CostPer1MOut: model.CostPer1MOut,
771 CostPer1MInCached: model.CostPer1MInCached,
772 CostPer1MOutCached: model.CostPer1MOutCached,
773 ContextWindow: model.ContextWindow,
774 DefaultMaxTokens: model.DefaultMaxTokens,
775 CanReason: model.CanReason,
776 SupportsImages: model.SupportsImages,
777 }
778 // Set reasoning effort for reasoning models
779 if model.HasReasoningEffort && model.DefaultReasoningEffort != "" {
780 configModel.HasReasoningEffort = model.HasReasoningEffort
781 configModel.ReasoningEffort = model.DefaultReasoningEffort
782 }
783 providerConfig.Models = append(providerConfig.Models, configModel)
784 }
785 break
786 }
787 }
788 cfg.Providers[provider.InferenceProviderVertexAI] = providerConfig
789 }
790
791 if hasAWSCredentials() {
792 providerConfig := providerDefaultConfig(provider.InferenceProviderBedrock)
793 providerConfig.ExtraParams = map[string]string{
794 "region": os.Getenv("AWS_DEFAULT_REGION"),
795 }
796 if providerConfig.ExtraParams["region"] == "" {
797 providerConfig.ExtraParams["region"] = os.Getenv("AWS_REGION")
798 }
799 // Find the Bedrock provider definition to get default models
800 for _, p := range providers {
801 if p.ID == provider.InferenceProviderBedrock {
802 providerConfig.DefaultLargeModel = p.DefaultLargeModelID
803 providerConfig.DefaultSmallModel = p.DefaultSmallModelID
804 for _, model := range p.Models {
805 configModel := Model{
806 ID: model.ID,
807 Name: model.Name,
808 CostPer1MIn: model.CostPer1MIn,
809 CostPer1MOut: model.CostPer1MOut,
810 CostPer1MInCached: model.CostPer1MInCached,
811 CostPer1MOutCached: model.CostPer1MOutCached,
812 ContextWindow: model.ContextWindow,
813 DefaultMaxTokens: model.DefaultMaxTokens,
814 CanReason: model.CanReason,
815 SupportsImages: model.SupportsImages,
816 }
817 // Set reasoning effort for reasoning models
818 if model.HasReasoningEffort && model.DefaultReasoningEffort != "" {
819 configModel.HasReasoningEffort = model.HasReasoningEffort
820 configModel.ReasoningEffort = model.DefaultReasoningEffort
821 }
822 providerConfig.Models = append(providerConfig.Models, configModel)
823 }
824 break
825 }
826 }
827 cfg.Providers[provider.InferenceProviderBedrock] = providerConfig
828 }
829 return cfg
830}
831
832func hasAWSCredentials() bool {
833 if os.Getenv("AWS_ACCESS_KEY_ID") != "" && os.Getenv("AWS_SECRET_ACCESS_KEY") != "" {
834 return true
835 }
836
837 if os.Getenv("AWS_PROFILE") != "" || os.Getenv("AWS_DEFAULT_PROFILE") != "" {
838 return true
839 }
840
841 if os.Getenv("AWS_REGION") != "" || os.Getenv("AWS_DEFAULT_REGION") != "" {
842 return true
843 }
844
845 if os.Getenv("AWS_CONTAINER_CREDENTIALS_RELATIVE_URI") != "" ||
846 os.Getenv("AWS_CONTAINER_CREDENTIALS_FULL_URI") != "" {
847 return true
848 }
849
850 return false
851}
852
853func WorkingDirectory() string {
854 return cwd
855}
856
857// TODO: Handle error state
858
859func GetAgentModel(agentID AgentID) Model {
860 cfg := Get()
861 agent, ok := cfg.Agents[agentID]
862 if !ok {
863 logging.Error("Agent not found", "agent_id", agentID)
864 return Model{}
865 }
866
867 var model PreferredModel
868 switch agent.Model {
869 case LargeModel:
870 model = cfg.Models.Large
871 case SmallModel:
872 model = cfg.Models.Small
873 default:
874 logging.Warn("Unknown model type for agent", "agent_id", agentID, "model_type", agent.Model)
875 model = cfg.Models.Large // Fallback to large model
876 }
877 providerConfig, ok := cfg.Providers[model.Provider]
878 if !ok {
879 logging.Error("Provider not found for agent", "agent_id", agentID, "provider", model.Provider)
880 return Model{}
881 }
882
883 for _, m := range providerConfig.Models {
884 if m.ID == model.ModelID {
885 return m
886 }
887 }
888
889 logging.Error("Model not found for agent", "agent_id", agentID, "model", agent.Model)
890 return Model{}
891}
892
893// GetAgentEffectiveMaxTokens returns the effective max tokens for an agent,
894// considering any overrides from the preferred model configuration
895func GetAgentEffectiveMaxTokens(agentID AgentID) int64 {
896 cfg := Get()
897 agent, ok := cfg.Agents[agentID]
898 if !ok {
899 logging.Error("Agent not found", "agent_id", agentID)
900 return 0
901 }
902
903 var preferredModel PreferredModel
904 switch agent.Model {
905 case LargeModel:
906 preferredModel = cfg.Models.Large
907 case SmallModel:
908 preferredModel = cfg.Models.Small
909 default:
910 logging.Warn("Unknown model type for agent", "agent_id", agentID, "model_type", agent.Model)
911 preferredModel = cfg.Models.Large // Fallback to large model
912 }
913
914 // Get the base model configuration
915 baseModel := GetAgentModel(agentID)
916 if baseModel.ID == "" {
917 return 0
918 }
919
920 // Start with the default max tokens from the base model
921 maxTokens := baseModel.DefaultMaxTokens
922
923 // Override with preferred model max tokens if set
924 if preferredModel.MaxTokens > 0 {
925 maxTokens = preferredModel.MaxTokens
926 }
927
928 return maxTokens
929}
930
931func GetAgentProvider(agentID AgentID) ProviderConfig {
932 cfg := Get()
933 agent, ok := cfg.Agents[agentID]
934 if !ok {
935 logging.Error("Agent not found", "agent_id", agentID)
936 return ProviderConfig{}
937 }
938
939 var model PreferredModel
940 switch agent.Model {
941 case LargeModel:
942 model = cfg.Models.Large
943 case SmallModel:
944 model = cfg.Models.Small
945 default:
946 logging.Warn("Unknown model type for agent", "agent_id", agentID, "model_type", agent.Model)
947 model = cfg.Models.Large // Fallback to large model
948 }
949
950 providerConfig, ok := cfg.Providers[model.Provider]
951 if !ok {
952 logging.Error("Provider not found for agent", "agent_id", agentID, "provider", model.Provider)
953 return ProviderConfig{}
954 }
955
956 return providerConfig
957}
958
959func GetProviderModel(provider provider.InferenceProvider, modelID string) Model {
960 cfg := Get()
961 providerConfig, ok := cfg.Providers[provider]
962 if !ok {
963 logging.Error("Provider not found", "provider", provider)
964 return Model{}
965 }
966
967 for _, model := range providerConfig.Models {
968 if model.ID == modelID {
969 return model
970 }
971 }
972
973 logging.Error("Model not found for provider", "provider", provider, "model_id", modelID)
974 return Model{}
975}
976
977func GetModel(modelType ModelType) Model {
978 cfg := Get()
979 var model PreferredModel
980 switch modelType {
981 case LargeModel:
982 model = cfg.Models.Large
983 case SmallModel:
984 model = cfg.Models.Small
985 default:
986 model = cfg.Models.Large // Fallback to large model
987 }
988 providerConfig, ok := cfg.Providers[model.Provider]
989 if !ok {
990 return Model{}
991 }
992
993 for _, m := range providerConfig.Models {
994 if m.ID == model.ModelID {
995 return m
996 }
997 }
998 return Model{}
999}
1000
1001func UpdatePreferredModel(modelType ModelType, model PreferredModel) error {
1002 cfg := Get()
1003 switch modelType {
1004 case LargeModel:
1005 cfg.Models.Large = model
1006 case SmallModel:
1007 cfg.Models.Small = model
1008 default:
1009 return fmt.Errorf("unknown model type: %s", modelType)
1010 }
1011 return nil
1012}
1013
1014// ValidationError represents a configuration validation error
1015type ValidationError struct {
1016 Field string
1017 Message string
1018}
1019
1020func (e ValidationError) Error() string {
1021 return fmt.Sprintf("validation error in %s: %s", e.Field, e.Message)
1022}
1023
1024// ValidationErrors represents multiple validation errors
1025type ValidationErrors []ValidationError
1026
1027func (e ValidationErrors) Error() string {
1028 if len(e) == 0 {
1029 return "no validation errors"
1030 }
1031 if len(e) == 1 {
1032 return e[0].Error()
1033 }
1034
1035 var messages []string
1036 for _, err := range e {
1037 messages = append(messages, err.Error())
1038 }
1039 return fmt.Sprintf("multiple validation errors: %s", strings.Join(messages, "; "))
1040}
1041
1042// HasErrors returns true if there are any validation errors
1043func (e ValidationErrors) HasErrors() bool {
1044 return len(e) > 0
1045}
1046
1047// Add appends a new validation error
1048func (e *ValidationErrors) Add(field, message string) {
1049 *e = append(*e, ValidationError{Field: field, Message: message})
1050}
1051
1052// Validate performs comprehensive validation of the configuration
1053func (c *Config) Validate() error {
1054 var errors ValidationErrors
1055
1056 // Validate providers
1057 c.validateProviders(&errors)
1058
1059 // Validate models
1060 c.validateModels(&errors)
1061
1062 // Validate agents
1063 c.validateAgents(&errors)
1064
1065 // Validate options
1066 c.validateOptions(&errors)
1067
1068 // Validate MCP configurations
1069 c.validateMCPs(&errors)
1070
1071 // Validate LSP configurations
1072 c.validateLSPs(&errors)
1073
1074 // Validate cross-references
1075 c.validateCrossReferences(&errors)
1076
1077 // Validate completeness
1078 c.validateCompleteness(&errors)
1079
1080 if errors.HasErrors() {
1081 return errors
1082 }
1083
1084 return nil
1085}
1086
1087// validateProviders validates all provider configurations
1088func (c *Config) validateProviders(errors *ValidationErrors) {
1089 if c.Providers == nil {
1090 c.Providers = make(map[provider.InferenceProvider]ProviderConfig)
1091 }
1092
1093 knownProviders := provider.KnownProviders()
1094 validTypes := []provider.Type{
1095 provider.TypeOpenAI,
1096 provider.TypeAnthropic,
1097 provider.TypeGemini,
1098 provider.TypeAzure,
1099 provider.TypeBedrock,
1100 provider.TypeVertexAI,
1101 provider.TypeXAI,
1102 }
1103
1104 for providerID, providerConfig := range c.Providers {
1105 fieldPrefix := fmt.Sprintf("providers.%s", providerID)
1106
1107 // Validate API key for non-disabled providers
1108 if !providerConfig.Disabled && providerConfig.APIKey == "" {
1109 // Special case for AWS Bedrock and VertexAI which may use other auth methods
1110 if providerID != provider.InferenceProviderBedrock && providerID != provider.InferenceProviderVertexAI {
1111 errors.Add(fieldPrefix+".api_key", "API key is required for non-disabled providers")
1112 }
1113 }
1114
1115 // Validate provider type
1116 validType := slices.Contains(validTypes, providerConfig.ProviderType)
1117 if !validType {
1118 errors.Add(fieldPrefix+".provider_type", fmt.Sprintf("invalid provider type: %s", providerConfig.ProviderType))
1119 }
1120
1121 // Validate custom providers
1122 isKnownProvider := slices.Contains(knownProviders, providerID)
1123
1124 if !isKnownProvider {
1125 // Custom provider validation
1126 if providerConfig.BaseURL == "" {
1127 errors.Add(fieldPrefix+".base_url", "BaseURL is required for custom providers")
1128 }
1129 if providerConfig.ProviderType != provider.TypeOpenAI {
1130 errors.Add(fieldPrefix+".provider_type", "custom providers currently only support OpenAI type")
1131 }
1132 }
1133
1134 // Validate models
1135 modelIDs := make(map[string]bool)
1136 for i, model := range providerConfig.Models {
1137 modelFieldPrefix := fmt.Sprintf("%s.models[%d]", fieldPrefix, i)
1138
1139 // Check for duplicate model IDs
1140 if modelIDs[model.ID] {
1141 errors.Add(modelFieldPrefix+".id", fmt.Sprintf("duplicate model ID: %s", model.ID))
1142 }
1143 modelIDs[model.ID] = true
1144
1145 // Validate required model fields
1146 if model.ID == "" {
1147 errors.Add(modelFieldPrefix+".id", "model ID is required")
1148 }
1149 if model.Name == "" {
1150 errors.Add(modelFieldPrefix+".name", "model name is required")
1151 }
1152 if model.ContextWindow <= 0 {
1153 errors.Add(modelFieldPrefix+".context_window", "context window must be positive")
1154 }
1155 if model.DefaultMaxTokens <= 0 {
1156 errors.Add(modelFieldPrefix+".default_max_tokens", "default max tokens must be positive")
1157 }
1158 if model.DefaultMaxTokens > model.ContextWindow {
1159 errors.Add(modelFieldPrefix+".default_max_tokens", "default max tokens cannot exceed context window")
1160 }
1161
1162 // Validate cost fields
1163 if model.CostPer1MIn < 0 {
1164 errors.Add(modelFieldPrefix+".cost_per_1m_in", "cost per 1M input tokens cannot be negative")
1165 }
1166 if model.CostPer1MOut < 0 {
1167 errors.Add(modelFieldPrefix+".cost_per_1m_out", "cost per 1M output tokens cannot be negative")
1168 }
1169 if model.CostPer1MInCached < 0 {
1170 errors.Add(modelFieldPrefix+".cost_per_1m_in_cached", "cached cost per 1M input tokens cannot be negative")
1171 }
1172 if model.CostPer1MOutCached < 0 {
1173 errors.Add(modelFieldPrefix+".cost_per_1m_out_cached", "cached cost per 1M output tokens cannot be negative")
1174 }
1175 }
1176
1177 // Validate default model references
1178 if providerConfig.DefaultLargeModel != "" {
1179 if !modelIDs[providerConfig.DefaultLargeModel] {
1180 errors.Add(fieldPrefix+".default_large_model", fmt.Sprintf("default large model '%s' not found in provider models", providerConfig.DefaultLargeModel))
1181 }
1182 }
1183 if providerConfig.DefaultSmallModel != "" {
1184 if !modelIDs[providerConfig.DefaultSmallModel] {
1185 errors.Add(fieldPrefix+".default_small_model", fmt.Sprintf("default small model '%s' not found in provider models", providerConfig.DefaultSmallModel))
1186 }
1187 }
1188
1189 // Validate provider-specific requirements
1190 c.validateProviderSpecific(providerID, providerConfig, errors)
1191 }
1192}
1193
1194// validateProviderSpecific validates provider-specific requirements
1195func (c *Config) validateProviderSpecific(providerID provider.InferenceProvider, providerConfig ProviderConfig, errors *ValidationErrors) {
1196 fieldPrefix := fmt.Sprintf("providers.%s", providerID)
1197
1198 switch providerID {
1199 case provider.InferenceProviderVertexAI:
1200 if !providerConfig.Disabled {
1201 if providerConfig.ExtraParams == nil {
1202 errors.Add(fieldPrefix+".extra_params", "VertexAI requires extra_params configuration")
1203 } else {
1204 if providerConfig.ExtraParams["project"] == "" {
1205 errors.Add(fieldPrefix+".extra_params.project", "VertexAI requires project parameter")
1206 }
1207 if providerConfig.ExtraParams["location"] == "" {
1208 errors.Add(fieldPrefix+".extra_params.location", "VertexAI requires location parameter")
1209 }
1210 }
1211 }
1212 case provider.InferenceProviderBedrock:
1213 if !providerConfig.Disabled {
1214 if providerConfig.ExtraParams == nil || providerConfig.ExtraParams["region"] == "" {
1215 errors.Add(fieldPrefix+".extra_params.region", "Bedrock requires region parameter")
1216 }
1217 // Check for AWS credentials in environment
1218 if !hasAWSCredentials() {
1219 errors.Add(fieldPrefix, "Bedrock requires AWS credentials in environment")
1220 }
1221 }
1222 }
1223}
1224
1225// validateModels validates preferred model configurations
1226func (c *Config) validateModels(errors *ValidationErrors) {
1227 // Validate large model
1228 if c.Models.Large.ModelID != "" || c.Models.Large.Provider != "" {
1229 if c.Models.Large.ModelID == "" {
1230 errors.Add("models.large.model_id", "large model ID is required when provider is set")
1231 }
1232 if c.Models.Large.Provider == "" {
1233 errors.Add("models.large.provider", "large model provider is required when model ID is set")
1234 }
1235
1236 // Check if provider exists and is not disabled
1237 if providerConfig, exists := c.Providers[c.Models.Large.Provider]; exists {
1238 if providerConfig.Disabled {
1239 errors.Add("models.large.provider", "large model provider is disabled")
1240 }
1241
1242 // Check if model exists in provider
1243 modelExists := false
1244 for _, model := range providerConfig.Models {
1245 if model.ID == c.Models.Large.ModelID {
1246 modelExists = true
1247 break
1248 }
1249 }
1250 if !modelExists {
1251 errors.Add("models.large.model_id", fmt.Sprintf("large model '%s' not found in provider '%s'", c.Models.Large.ModelID, c.Models.Large.Provider))
1252 }
1253 } else {
1254 errors.Add("models.large.provider", fmt.Sprintf("large model provider '%s' not found", c.Models.Large.Provider))
1255 }
1256 }
1257
1258 // Validate small model
1259 if c.Models.Small.ModelID != "" || c.Models.Small.Provider != "" {
1260 if c.Models.Small.ModelID == "" {
1261 errors.Add("models.small.model_id", "small model ID is required when provider is set")
1262 }
1263 if c.Models.Small.Provider == "" {
1264 errors.Add("models.small.provider", "small model provider is required when model ID is set")
1265 }
1266
1267 // Check if provider exists and is not disabled
1268 if providerConfig, exists := c.Providers[c.Models.Small.Provider]; exists {
1269 if providerConfig.Disabled {
1270 errors.Add("models.small.provider", "small model provider is disabled")
1271 }
1272
1273 // Check if model exists in provider
1274 modelExists := false
1275 for _, model := range providerConfig.Models {
1276 if model.ID == c.Models.Small.ModelID {
1277 modelExists = true
1278 break
1279 }
1280 }
1281 if !modelExists {
1282 errors.Add("models.small.model_id", fmt.Sprintf("small model '%s' not found in provider '%s'", c.Models.Small.ModelID, c.Models.Small.Provider))
1283 }
1284 } else {
1285 errors.Add("models.small.provider", fmt.Sprintf("small model provider '%s' not found", c.Models.Small.Provider))
1286 }
1287 }
1288}
1289
1290// validateAgents validates agent configurations
1291func (c *Config) validateAgents(errors *ValidationErrors) {
1292 if c.Agents == nil {
1293 c.Agents = make(map[AgentID]Agent)
1294 }
1295
1296 validTools := []string{
1297 "bash", "edit", "fetch", "glob", "grep", "ls", "sourcegraph", "view", "write", "agent",
1298 }
1299
1300 for agentID, agent := range c.Agents {
1301 fieldPrefix := fmt.Sprintf("agents.%s", agentID)
1302
1303 // Validate agent ID consistency
1304 if agent.ID != agentID {
1305 errors.Add(fieldPrefix+".id", fmt.Sprintf("agent ID mismatch: expected '%s', got '%s'", agentID, agent.ID))
1306 }
1307
1308 // Validate required fields
1309 if agent.ID == "" {
1310 errors.Add(fieldPrefix+".id", "agent ID is required")
1311 }
1312 if agent.Name == "" {
1313 errors.Add(fieldPrefix+".name", "agent name is required")
1314 }
1315
1316 // Validate model type
1317 if agent.Model != LargeModel && agent.Model != SmallModel {
1318 errors.Add(fieldPrefix+".model", fmt.Sprintf("invalid model type: %s (must be 'large' or 'small')", agent.Model))
1319 }
1320
1321 // Validate allowed tools
1322 if agent.AllowedTools != nil {
1323 for i, tool := range agent.AllowedTools {
1324 validTool := slices.Contains(validTools, tool)
1325 if !validTool {
1326 errors.Add(fmt.Sprintf("%s.allowed_tools[%d]", fieldPrefix, i), fmt.Sprintf("unknown tool: %s", tool))
1327 }
1328 }
1329 }
1330
1331 // Validate MCP references
1332 if agent.AllowedMCP != nil {
1333 for mcpName := range agent.AllowedMCP {
1334 if _, exists := c.MCP[mcpName]; !exists {
1335 errors.Add(fieldPrefix+".allowed_mcp", fmt.Sprintf("referenced MCP '%s' not found", mcpName))
1336 }
1337 }
1338 }
1339
1340 // Validate LSP references
1341 if agent.AllowedLSP != nil {
1342 for _, lspName := range agent.AllowedLSP {
1343 if _, exists := c.LSP[lspName]; !exists {
1344 errors.Add(fieldPrefix+".allowed_lsp", fmt.Sprintf("referenced LSP '%s' not found", lspName))
1345 }
1346 }
1347 }
1348
1349 // Validate context paths (basic path validation)
1350 for i, contextPath := range agent.ContextPaths {
1351 if contextPath == "" {
1352 errors.Add(fmt.Sprintf("%s.context_paths[%d]", fieldPrefix, i), "context path cannot be empty")
1353 }
1354 // Check for invalid characters in path
1355 if strings.Contains(contextPath, "\x00") {
1356 errors.Add(fmt.Sprintf("%s.context_paths[%d]", fieldPrefix, i), "context path contains invalid characters")
1357 }
1358 }
1359
1360 // Validate known agents maintain their core properties
1361 if agentID == AgentCoder {
1362 if agent.Name != "Coder" {
1363 errors.Add(fieldPrefix+".name", "coder agent name cannot be changed")
1364 }
1365 if agent.Description != "An agent that helps with executing coding tasks." {
1366 errors.Add(fieldPrefix+".description", "coder agent description cannot be changed")
1367 }
1368 } else if agentID == AgentTask {
1369 if agent.Name != "Task" {
1370 errors.Add(fieldPrefix+".name", "task agent name cannot be changed")
1371 }
1372 if agent.Description != "An agent that helps with searching for context and finding implementation details." {
1373 errors.Add(fieldPrefix+".description", "task agent description cannot be changed")
1374 }
1375 expectedTools := []string{"glob", "grep", "ls", "sourcegraph", "view"}
1376 if agent.AllowedTools != nil && !slices.Equal(agent.AllowedTools, expectedTools) {
1377 errors.Add(fieldPrefix+".allowed_tools", "task agent allowed tools cannot be changed")
1378 }
1379 }
1380 }
1381}
1382
1383// validateOptions validates configuration options
1384func (c *Config) validateOptions(errors *ValidationErrors) {
1385 // Validate data directory
1386 if c.Options.DataDirectory == "" {
1387 errors.Add("options.data_directory", "data directory is required")
1388 }
1389
1390 // Validate context paths
1391 for i, contextPath := range c.Options.ContextPaths {
1392 if contextPath == "" {
1393 errors.Add(fmt.Sprintf("options.context_paths[%d]", i), "context path cannot be empty")
1394 }
1395 if strings.Contains(contextPath, "\x00") {
1396 errors.Add(fmt.Sprintf("options.context_paths[%d]", i), "context path contains invalid characters")
1397 }
1398 }
1399}
1400
1401// validateMCPs validates MCP configurations
1402func (c *Config) validateMCPs(errors *ValidationErrors) {
1403 if c.MCP == nil {
1404 c.MCP = make(map[string]MCP)
1405 }
1406
1407 for mcpName, mcpConfig := range c.MCP {
1408 fieldPrefix := fmt.Sprintf("mcp.%s", mcpName)
1409
1410 // Validate MCP type
1411 if mcpConfig.Type != MCPStdio && mcpConfig.Type != MCPSse && mcpConfig.Type != MCPHttp {
1412 errors.Add(fieldPrefix+".type", fmt.Sprintf("invalid MCP type: %s (must be 'stdio' or 'sse' or 'http')", mcpConfig.Type))
1413 }
1414
1415 // Validate based on type
1416 if mcpConfig.Type == MCPStdio {
1417 if mcpConfig.Command == "" {
1418 errors.Add(fieldPrefix+".command", "command is required for stdio MCP")
1419 }
1420 } else if mcpConfig.Type == MCPSse {
1421 if mcpConfig.URL == "" {
1422 errors.Add(fieldPrefix+".url", "URL is required for SSE MCP")
1423 }
1424 }
1425 }
1426}
1427
1428// validateLSPs validates LSP configurations
1429func (c *Config) validateLSPs(errors *ValidationErrors) {
1430 if c.LSP == nil {
1431 c.LSP = make(map[string]LSPConfig)
1432 }
1433
1434 for lspName, lspConfig := range c.LSP {
1435 fieldPrefix := fmt.Sprintf("lsp.%s", lspName)
1436
1437 if lspConfig.Command == "" {
1438 errors.Add(fieldPrefix+".command", "command is required for LSP")
1439 }
1440 }
1441}
1442
1443// validateCrossReferences validates cross-references between different config sections
1444func (c *Config) validateCrossReferences(errors *ValidationErrors) {
1445 // Validate that agents can use their assigned model types
1446 for agentID, agent := range c.Agents {
1447 fieldPrefix := fmt.Sprintf("agents.%s", agentID)
1448
1449 var preferredModel PreferredModel
1450 switch agent.Model {
1451 case LargeModel:
1452 preferredModel = c.Models.Large
1453 case SmallModel:
1454 preferredModel = c.Models.Small
1455 }
1456
1457 if preferredModel.Provider != "" {
1458 if providerConfig, exists := c.Providers[preferredModel.Provider]; exists {
1459 if providerConfig.Disabled {
1460 errors.Add(fieldPrefix+".model", fmt.Sprintf("agent cannot use model type '%s' because provider '%s' is disabled", agent.Model, preferredModel.Provider))
1461 }
1462 }
1463 }
1464 }
1465}
1466
1467// validateCompleteness validates that the configuration is complete and usable
1468func (c *Config) validateCompleteness(errors *ValidationErrors) {
1469 // Check for at least one valid, non-disabled provider
1470 hasValidProvider := false
1471 for _, providerConfig := range c.Providers {
1472 if !providerConfig.Disabled {
1473 hasValidProvider = true
1474 break
1475 }
1476 }
1477 if !hasValidProvider {
1478 errors.Add("providers", "at least one non-disabled provider is required")
1479 }
1480
1481 // Check that default agents exist
1482 if _, exists := c.Agents[AgentCoder]; !exists {
1483 errors.Add("agents", "coder agent is required")
1484 }
1485 if _, exists := c.Agents[AgentTask]; !exists {
1486 errors.Add("agents", "task agent is required")
1487 }
1488
1489 // Check that preferred models are set if providers exist
1490 if hasValidProvider {
1491 if c.Models.Large.ModelID == "" || c.Models.Large.Provider == "" {
1492 errors.Add("models.large", "large preferred model must be configured when providers are available")
1493 }
1494 if c.Models.Small.ModelID == "" || c.Models.Small.Provider == "" {
1495 errors.Add("models.small", "small preferred model must be configured when providers are available")
1496 }
1497 }
1498}
1499
1500// JSONSchemaExtend adds custom schema properties for AgentID
1501func (AgentID) JSONSchemaExtend(schema *jsonschema.Schema) {
1502 schema.Enum = []any{
1503 string(AgentCoder),
1504 string(AgentTask),
1505 }
1506}
1507
1508// JSONSchemaExtend adds custom schema properties for ModelType
1509func (ModelType) JSONSchemaExtend(schema *jsonschema.Schema) {
1510 schema.Enum = []any{
1511 string(LargeModel),
1512 string(SmallModel),
1513 }
1514}
1515
1516// JSONSchemaExtend adds custom schema properties for MCPType
1517func (MCPType) JSONSchemaExtend(schema *jsonschema.Schema) {
1518 schema.Enum = []any{
1519 string(MCPStdio),
1520 string(MCPSse),
1521 }
1522}