config.go

   1package config
   2
   3import (
   4	"encoding/json"
   5	"errors"
   6	"fmt"
   7	"log/slog"
   8	"maps"
   9	"os"
  10	"path/filepath"
  11	"slices"
  12	"strings"
  13	"sync"
  14
  15	"github.com/charmbracelet/crush/internal/fur/provider"
  16	"github.com/charmbracelet/crush/internal/logging"
  17	"github.com/invopop/jsonschema"
  18)
  19
  20const (
  21	defaultDataDirectory = ".crush"
  22	defaultLogLevel      = "info"
  23	appName              = "crush"
  24
  25	MaxTokensFallbackDefault = 4096
  26)
  27
  28var defaultContextPaths = []string{
  29	".github/copilot-instructions.md",
  30	".cursorrules",
  31	".cursor/rules/",
  32	"CLAUDE.md",
  33	"CLAUDE.local.md",
  34	"GEMINI.md",
  35	"gemini.md",
  36	"crush.md",
  37	"crush.local.md",
  38	"Crush.md",
  39	"Crush.local.md",
  40	"CRUSH.md",
  41	"CRUSH.local.md",
  42}
  43
  44type AgentID string
  45
  46const (
  47	AgentCoder AgentID = "coder"
  48	AgentTask  AgentID = "task"
  49)
  50
  51type ModelType string
  52
  53const (
  54	LargeModel ModelType = "large"
  55	SmallModel ModelType = "small"
  56)
  57
  58type Model struct {
  59	ID                 string  `json:"id" jsonschema:"title=Model ID,description=Unique identifier for the model"`
  60	Name               string  `json:"model" jsonschema:"title=Model Name,description=Display name of the model"`
  61	CostPer1MIn        float64 `json:"cost_per_1m_in" jsonschema:"title=Input Cost,description=Cost per 1 million input tokens,minimum=0"`
  62	CostPer1MOut       float64 `json:"cost_per_1m_out" jsonschema:"title=Output Cost,description=Cost per 1 million output tokens,minimum=0"`
  63	CostPer1MInCached  float64 `json:"cost_per_1m_in_cached" jsonschema:"title=Cached Input Cost,description=Cost per 1 million cached input tokens,minimum=0"`
  64	CostPer1MOutCached float64 `json:"cost_per_1m_out_cached" jsonschema:"title=Cached Output Cost,description=Cost per 1 million cached output tokens,minimum=0"`
  65	ContextWindow      int64   `json:"context_window" jsonschema:"title=Context Window,description=Maximum context window size in tokens,minimum=1"`
  66	DefaultMaxTokens   int64   `json:"default_max_tokens" jsonschema:"title=Default Max Tokens,description=Default maximum tokens for responses,minimum=1"`
  67	CanReason          bool    `json:"can_reason" jsonschema:"title=Can Reason,description=Whether the model supports reasoning capabilities"`
  68	ReasoningEffort    string  `json:"reasoning_effort" jsonschema:"title=Reasoning Effort,description=Default reasoning effort level for reasoning models"`
  69	HasReasoningEffort bool    `json:"has_reasoning_effort" jsonschema:"title=Has Reasoning Effort,description=Whether the model supports reasoning effort configuration"`
  70	SupportsImages     bool    `json:"supports_attachments" jsonschema:"title=Supports Images,description=Whether the model supports image attachments"`
  71}
  72
  73type VertexAIOptions struct {
  74	APIKey   string `json:"api_key,omitempty"`
  75	Project  string `json:"project,omitempty"`
  76	Location string `json:"location,omitempty"`
  77}
  78
  79type ProviderConfig struct {
  80	ID           provider.InferenceProvider `json:"id,omitempty" jsonschema:"title=Provider ID,description=Unique identifier for the provider"`
  81	BaseURL      string                     `json:"base_url,omitempty" jsonschema:"title=Base URL,description=Base URL for the provider API (required for custom providers)"`
  82	ProviderType provider.Type              `json:"provider_type" jsonschema:"title=Provider Type,description=Type of the provider (openai, anthropic, etc.)"`
  83	APIKey       string                     `json:"api_key,omitempty" jsonschema:"title=API Key,description=API key for authenticating with the provider"`
  84	Disabled     bool                       `json:"disabled,omitempty" jsonschema:"title=Disabled,description=Whether this provider is disabled,default=false"`
  85	ExtraHeaders map[string]string          `json:"extra_headers,omitempty" jsonschema:"title=Extra Headers,description=Additional HTTP headers to send with requests"`
  86	// used for e.x for vertex to set the project
  87	ExtraParams map[string]string `json:"extra_params,omitempty" jsonschema:"title=Extra Parameters,description=Additional provider-specific parameters"`
  88
  89	DefaultLargeModel string `json:"default_large_model,omitempty" jsonschema:"title=Default Large Model,description=Default model ID for large model type"`
  90	DefaultSmallModel string `json:"default_small_model,omitempty" jsonschema:"title=Default Small Model,description=Default model ID for small model type"`
  91
  92	Models []Model `json:"models,omitempty" jsonschema:"title=Models,description=List of available models for this provider"`
  93}
  94
  95type Agent struct {
  96	ID          AgentID `json:"id,omitempty" jsonschema:"title=Agent ID,description=Unique identifier for the agent,enum=coder,enum=task"`
  97	Name        string  `json:"name,omitempty" jsonschema:"title=Name,description=Display name of the agent"`
  98	Description string  `json:"description,omitempty" jsonschema:"title=Description,description=Description of what the agent does"`
  99	// This is the id of the system prompt used by the agent
 100	Disabled bool `json:"disabled,omitempty" jsonschema:"title=Disabled,description=Whether this agent is disabled,default=false"`
 101
 102	Model ModelType `json:"model" jsonschema:"title=Model Type,description=Type of model to use (large or small),enum=large,enum=small"`
 103
 104	// The available tools for the agent
 105	//  if this is nil, all tools are available
 106	AllowedTools []string `json:"allowed_tools,omitempty" jsonschema:"title=Allowed Tools,description=List of tools this agent is allowed to use (if nil all tools are allowed)"`
 107
 108	// this tells us which MCPs are available for this agent
 109	//  if this is empty all mcps are available
 110	//  the string array is the list of tools from the AllowedMCP the agent has available
 111	//  if the string array is nil, all tools from the AllowedMCP are available
 112	AllowedMCP map[string][]string `json:"allowed_mcp,omitempty" jsonschema:"title=Allowed MCP,description=Map of MCP servers this agent can use and their allowed tools"`
 113
 114	// The list of LSPs that this agent can use
 115	//  if this is nil, all LSPs are available
 116	AllowedLSP []string `json:"allowed_lsp,omitempty" jsonschema:"title=Allowed LSP,description=List of LSP servers this agent can use (if nil all LSPs are allowed)"`
 117
 118	// Overrides the context paths for this agent
 119	ContextPaths []string `json:"context_paths,omitempty" jsonschema:"title=Context Paths,description=Custom context paths for this agent (additive to global context paths)"`
 120}
 121
 122type MCPType string
 123
 124const (
 125	MCPStdio MCPType = "stdio"
 126	MCPSse   MCPType = "sse"
 127)
 128
 129type MCP struct {
 130	Command string   `json:"command" jsonschema:"title=Command,description=Command to execute for stdio MCP servers"`
 131	Env     []string `json:"env,omitempty" jsonschema:"title=Environment,description=Environment variables for the MCP server"`
 132	Args    []string `json:"args,omitempty" jsonschema:"title=Arguments,description=Command line arguments for the MCP server"`
 133	Type    MCPType  `json:"type" jsonschema:"title=Type,description=Type of MCP connection,enum=stdio,enum=sse,default=stdio"`
 134	URL     string   `json:"url,omitempty" jsonschema:"title=URL,description=URL for SSE MCP servers"`
 135	// TODO: maybe make it possible to get the value from the env
 136	Headers map[string]string `json:"headers,omitempty" jsonschema:"title=Headers,description=HTTP headers for SSE MCP servers"`
 137}
 138
 139type LSPConfig struct {
 140	Disabled bool     `json:"enabled,omitempty" jsonschema:"title=Enabled,description=Whether this LSP server is enabled,default=true"`
 141	Command  string   `json:"command" jsonschema:"title=Command,description=Command to execute for the LSP server"`
 142	Args     []string `json:"args,omitempty" jsonschema:"title=Arguments,description=Command line arguments for the LSP server"`
 143	Options  any      `json:"options,omitempty" jsonschema:"title=Options,description=LSP server specific options"`
 144}
 145
 146type TUIOptions struct {
 147	CompactMode bool `json:"compact_mode" jsonschema:"title=Compact Mode,description=Enable compact mode for the TUI,default=false"`
 148	// Here we can add themes later or any TUI related options
 149}
 150
 151type Options struct {
 152	ContextPaths         []string   `json:"context_paths,omitempty" jsonschema:"title=Context Paths,description=List of paths to search for context files"`
 153	TUI                  TUIOptions `json:"tui,omitempty" jsonschema:"title=TUI Options,description=Terminal UI configuration options"`
 154	Debug                bool       `json:"debug,omitempty" jsonschema:"title=Debug,description=Enable debug logging,default=false"`
 155	DebugLSP             bool       `json:"debug_lsp,omitempty" jsonschema:"title=Debug LSP,description=Enable LSP debug logging,default=false"`
 156	DisableAutoSummarize bool       `json:"disable_auto_summarize,omitempty" jsonschema:"title=Disable Auto Summarize,description=Disable automatic conversation summarization,default=false"`
 157	// Relative to the cwd
 158	DataDirectory string `json:"data_directory,omitempty" jsonschema:"title=Data Directory,description=Directory for storing application data,default=.crush"`
 159}
 160
 161type PreferredModel struct {
 162	ModelID  string                     `json:"model_id" jsonschema:"title=Model ID,description=ID of the preferred model"`
 163	Provider provider.InferenceProvider `json:"provider" jsonschema:"title=Provider,description=Provider for the preferred model"`
 164	// ReasoningEffort overrides the default reasoning effort for this model
 165	ReasoningEffort string `json:"reasoning_effort,omitempty" jsonschema:"title=Reasoning Effort,description=Override reasoning effort for this model"`
 166	// MaxTokens overrides the default max tokens for this model
 167	MaxTokens int64 `json:"max_tokens,omitempty" jsonschema:"title=Max Tokens,description=Override max tokens for this model,minimum=1"`
 168
 169	// Think indicates if the model should think, only applicable for anthropic reasoning models
 170	Think bool `json:"think,omitempty" jsonschema:"title=Think,description=Enable thinking for reasoning models,default=false"`
 171}
 172
 173type PreferredModels struct {
 174	Large PreferredModel `json:"large,omitempty" jsonschema:"title=Large Model,description=Preferred model configuration for large model type"`
 175	Small PreferredModel `json:"small,omitempty" jsonschema:"title=Small Model,description=Preferred model configuration for small model type"`
 176}
 177
 178type Config struct {
 179	Models PreferredModels `json:"models,omitempty" jsonschema:"title=Models,description=Preferred model configurations for large and small model types"`
 180	// List of configured providers
 181	Providers map[provider.InferenceProvider]ProviderConfig `json:"providers,omitempty" jsonschema:"title=Providers,description=LLM provider configurations"`
 182
 183	// List of configured agents
 184	Agents map[AgentID]Agent `json:"agents,omitempty" jsonschema:"title=Agents,description=Agent configurations for different tasks"`
 185
 186	// List of configured MCPs
 187	MCP map[string]MCP `json:"mcp,omitempty" jsonschema:"title=MCP,description=Model Control Protocol server configurations"`
 188
 189	// List of configured LSPs
 190	LSP map[string]LSPConfig `json:"lsp,omitempty" jsonschema:"title=LSP,description=Language Server Protocol configurations"`
 191
 192	// Miscellaneous options
 193	Options Options `json:"options,omitempty" jsonschema:"title=Options,description=General application options and settings"`
 194}
 195
 196var (
 197	instance *Config // The single instance of the Singleton
 198	cwd      string
 199	once     sync.Once // Ensures the initialization happens only once
 200
 201)
 202
 203func loadConfig(cwd string, debug bool) (*Config, error) {
 204	// First read the global config file
 205	cfgPath := ConfigPath()
 206
 207	cfg := defaultConfigBasedOnEnv()
 208	cfg.Options.Debug = debug
 209	defaultLevel := slog.LevelInfo
 210	if cfg.Options.Debug {
 211		defaultLevel = slog.LevelDebug
 212	}
 213	if os.Getenv("CRUSH_DEV_DEBUG") == "true" {
 214		loggingFile := fmt.Sprintf("%s/%s", cfg.Options.DataDirectory, "debug.log")
 215
 216		// if file does not exist create it
 217		if _, err := os.Stat(loggingFile); os.IsNotExist(err) {
 218			if err := os.MkdirAll(cfg.Options.DataDirectory, 0o755); err != nil {
 219				return cfg, fmt.Errorf("failed to create directory: %w", err)
 220			}
 221			if _, err := os.Create(loggingFile); err != nil {
 222				return cfg, fmt.Errorf("failed to create log file: %w", err)
 223			}
 224		}
 225
 226		sloggingFileWriter, err := os.OpenFile(loggingFile, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0o666)
 227		if err != nil {
 228			return cfg, fmt.Errorf("failed to open log file: %w", err)
 229		}
 230		// Configure logger
 231		logger := slog.New(slog.NewTextHandler(sloggingFileWriter, &slog.HandlerOptions{
 232			Level: defaultLevel,
 233		}))
 234		slog.SetDefault(logger)
 235	} else {
 236		// Configure logger
 237		logger := slog.New(slog.NewTextHandler(logging.NewWriter(), &slog.HandlerOptions{
 238			Level: defaultLevel,
 239		}))
 240		slog.SetDefault(logger)
 241	}
 242	var globalCfg *Config
 243	if _, err := os.Stat(cfgPath); err != nil && !os.IsNotExist(err) {
 244		// some other error occurred while checking the file
 245		return nil, err
 246	} else if err == nil {
 247		// config file exists, read it
 248		file, err := os.ReadFile(cfgPath)
 249		if err != nil {
 250			return nil, err
 251		}
 252		globalCfg = &Config{}
 253		if err := json.Unmarshal(file, globalCfg); err != nil {
 254			return nil, err
 255		}
 256	} else {
 257		// config file does not exist, create a new one
 258		globalCfg = &Config{}
 259	}
 260
 261	var localConfig *Config
 262	// Global config loaded, now read the local config file
 263	localConfigPath := filepath.Join(cwd, "crush.json")
 264	if _, err := os.Stat(localConfigPath); err != nil && !os.IsNotExist(err) {
 265		// some other error occurred while checking the file
 266		return nil, err
 267	} else if err == nil {
 268		// local config file exists, read it
 269		file, err := os.ReadFile(localConfigPath)
 270		if err != nil {
 271			return nil, err
 272		}
 273		localConfig = &Config{}
 274		if err := json.Unmarshal(file, localConfig); err != nil {
 275			return nil, err
 276		}
 277	}
 278
 279	// merge options
 280	mergeOptions(cfg, globalCfg, localConfig)
 281
 282	mergeProviderConfigs(cfg, globalCfg, localConfig)
 283	// no providers found the app is not initialized yet
 284	if len(cfg.Providers) == 0 {
 285		return cfg, nil
 286	}
 287	preferredProvider := getPreferredProvider(cfg.Providers)
 288	if preferredProvider != nil {
 289		cfg.Models = PreferredModels{
 290			Large: PreferredModel{
 291				ModelID:  preferredProvider.DefaultLargeModel,
 292				Provider: preferredProvider.ID,
 293			},
 294			Small: PreferredModel{
 295				ModelID:  preferredProvider.DefaultSmallModel,
 296				Provider: preferredProvider.ID,
 297			},
 298		}
 299	} else {
 300		// No valid providers found, set empty models
 301		cfg.Models = PreferredModels{}
 302	}
 303
 304	mergeModels(cfg, globalCfg, localConfig)
 305
 306	agents := map[AgentID]Agent{
 307		AgentCoder: {
 308			ID:           AgentCoder,
 309			Name:         "Coder",
 310			Description:  "An agent that helps with executing coding tasks.",
 311			Model:        LargeModel,
 312			ContextPaths: cfg.Options.ContextPaths,
 313			// All tools allowed
 314		},
 315		AgentTask: {
 316			ID:           AgentTask,
 317			Name:         "Task",
 318			Description:  "An agent that helps with searching for context and finding implementation details.",
 319			Model:        LargeModel,
 320			ContextPaths: cfg.Options.ContextPaths,
 321			AllowedTools: []string{
 322				"glob",
 323				"grep",
 324				"ls",
 325				"sourcegraph",
 326				"view",
 327			},
 328			// NO MCPs or LSPs by default
 329			AllowedMCP: map[string][]string{},
 330			AllowedLSP: []string{},
 331		},
 332	}
 333	cfg.Agents = agents
 334	mergeAgents(cfg, globalCfg, localConfig)
 335	mergeMCPs(cfg, globalCfg, localConfig)
 336	mergeLSPs(cfg, globalCfg, localConfig)
 337
 338	// Validate the final configuration
 339	if err := cfg.Validate(); err != nil {
 340		return cfg, fmt.Errorf("configuration validation failed: %w", err)
 341	}
 342
 343	return cfg, nil
 344}
 345
 346func Init(workingDir string, debug bool) (*Config, error) {
 347	var err error
 348	once.Do(func() {
 349		cwd = workingDir
 350		instance, err = loadConfig(cwd, debug)
 351		if err != nil {
 352			logging.Error("Failed to load config", "error", err)
 353		}
 354	})
 355
 356	return instance, err
 357}
 358
 359func Get() *Config {
 360	if instance == nil {
 361		// TODO: Handle this better
 362		panic("Config not initialized. Call InitConfig first.")
 363	}
 364	return instance
 365}
 366
 367func getPreferredProvider(configuredProviders map[provider.InferenceProvider]ProviderConfig) *ProviderConfig {
 368	providers := Providers()
 369	for _, p := range providers {
 370		if providerConfig, ok := configuredProviders[p.ID]; ok && !providerConfig.Disabled {
 371			return &providerConfig
 372		}
 373	}
 374	// if none found return the first configured provider
 375	for _, providerConfig := range configuredProviders {
 376		if !providerConfig.Disabled {
 377			return &providerConfig
 378		}
 379	}
 380	return nil
 381}
 382
 383func mergeProviderConfig(p provider.InferenceProvider, base, other ProviderConfig) ProviderConfig {
 384	if other.APIKey != "" {
 385		base.APIKey = other.APIKey
 386	}
 387	// Only change these options if the provider is not a known provider
 388	if !slices.Contains(provider.KnownProviders(), p) {
 389		if other.BaseURL != "" {
 390			base.BaseURL = other.BaseURL
 391		}
 392		if other.ProviderType != "" {
 393			base.ProviderType = other.ProviderType
 394		}
 395		if len(other.ExtraHeaders) > 0 {
 396			if base.ExtraHeaders == nil {
 397				base.ExtraHeaders = make(map[string]string)
 398			}
 399			maps.Copy(base.ExtraHeaders, other.ExtraHeaders)
 400		}
 401		if len(other.ExtraParams) > 0 {
 402			if base.ExtraParams == nil {
 403				base.ExtraParams = make(map[string]string)
 404			}
 405			maps.Copy(base.ExtraParams, other.ExtraParams)
 406		}
 407	}
 408
 409	if other.Disabled {
 410		base.Disabled = other.Disabled
 411	}
 412
 413	if other.DefaultLargeModel != "" {
 414		base.DefaultLargeModel = other.DefaultLargeModel
 415	}
 416	// Add new models if they don't exist
 417	if other.Models != nil {
 418		for _, model := range other.Models {
 419			// check if the model already exists
 420			exists := false
 421			for _, existingModel := range base.Models {
 422				if existingModel.ID == model.ID {
 423					exists = true
 424					break
 425				}
 426			}
 427			if !exists {
 428				base.Models = append(base.Models, model)
 429			}
 430		}
 431	}
 432
 433	return base
 434}
 435
 436func validateProvider(p provider.InferenceProvider, providerConfig ProviderConfig) error {
 437	if !slices.Contains(provider.KnownProviders(), p) {
 438		if providerConfig.ProviderType != provider.TypeOpenAI {
 439			return errors.New("invalid provider type: " + string(providerConfig.ProviderType))
 440		}
 441		if providerConfig.BaseURL == "" {
 442			return errors.New("base URL must be set for custom providers")
 443		}
 444		if providerConfig.APIKey == "" {
 445			return errors.New("API key must be set for custom providers")
 446		}
 447	}
 448	return nil
 449}
 450
 451func mergeModels(base, global, local *Config) {
 452	for _, cfg := range []*Config{global, local} {
 453		if cfg == nil {
 454			continue
 455		}
 456		if cfg.Models.Large.ModelID != "" && cfg.Models.Large.Provider != "" {
 457			base.Models.Large = cfg.Models.Large
 458		}
 459
 460		if cfg.Models.Small.ModelID != "" && cfg.Models.Small.Provider != "" {
 461			base.Models.Small = cfg.Models.Small
 462		}
 463	}
 464}
 465
 466func mergeOptions(base, global, local *Config) {
 467	for _, cfg := range []*Config{global, local} {
 468		if cfg == nil {
 469			continue
 470		}
 471		baseOptions := base.Options
 472		other := cfg.Options
 473		if len(other.ContextPaths) > 0 {
 474			baseOptions.ContextPaths = append(baseOptions.ContextPaths, other.ContextPaths...)
 475		}
 476
 477		if other.TUI.CompactMode {
 478			baseOptions.TUI.CompactMode = other.TUI.CompactMode
 479		}
 480
 481		if other.Debug {
 482			baseOptions.Debug = other.Debug
 483		}
 484
 485		if other.DebugLSP {
 486			baseOptions.DebugLSP = other.DebugLSP
 487		}
 488
 489		if other.DisableAutoSummarize {
 490			baseOptions.DisableAutoSummarize = other.DisableAutoSummarize
 491		}
 492
 493		if other.DataDirectory != "" {
 494			baseOptions.DataDirectory = other.DataDirectory
 495		}
 496		base.Options = baseOptions
 497	}
 498}
 499
 500func mergeAgents(base, global, local *Config) {
 501	for _, cfg := range []*Config{global, local} {
 502		if cfg == nil {
 503			continue
 504		}
 505		for agentID, newAgent := range cfg.Agents {
 506			if _, ok := base.Agents[agentID]; !ok {
 507				newAgent.ID = agentID
 508				if newAgent.Model == "" {
 509					newAgent.Model = LargeModel
 510				}
 511				if len(newAgent.ContextPaths) > 0 {
 512					newAgent.ContextPaths = append(base.Options.ContextPaths, newAgent.ContextPaths...)
 513				} else {
 514					newAgent.ContextPaths = base.Options.ContextPaths
 515				}
 516				base.Agents[agentID] = newAgent
 517			} else {
 518				baseAgent := base.Agents[agentID]
 519
 520				if agentID == AgentCoder || agentID == AgentTask {
 521					if newAgent.Model != "" {
 522						baseAgent.Model = newAgent.Model
 523					}
 524					if newAgent.AllowedMCP != nil {
 525						baseAgent.AllowedMCP = newAgent.AllowedMCP
 526					}
 527					if newAgent.AllowedLSP != nil {
 528						baseAgent.AllowedLSP = newAgent.AllowedLSP
 529					}
 530					// Context paths are additive for known agents too
 531					if len(newAgent.ContextPaths) > 0 {
 532						baseAgent.ContextPaths = append(baseAgent.ContextPaths, newAgent.ContextPaths...)
 533					}
 534				} else {
 535					if newAgent.Name != "" {
 536						baseAgent.Name = newAgent.Name
 537					}
 538					if newAgent.Description != "" {
 539						baseAgent.Description = newAgent.Description
 540					}
 541					if newAgent.Model != "" {
 542						baseAgent.Model = newAgent.Model
 543					} else if baseAgent.Model == "" {
 544						baseAgent.Model = LargeModel
 545					}
 546
 547					baseAgent.Disabled = newAgent.Disabled
 548
 549					if newAgent.AllowedTools != nil {
 550						baseAgent.AllowedTools = newAgent.AllowedTools
 551					}
 552					if newAgent.AllowedMCP != nil {
 553						baseAgent.AllowedMCP = newAgent.AllowedMCP
 554					}
 555					if newAgent.AllowedLSP != nil {
 556						baseAgent.AllowedLSP = newAgent.AllowedLSP
 557					}
 558					if len(newAgent.ContextPaths) > 0 {
 559						baseAgent.ContextPaths = append(baseAgent.ContextPaths, newAgent.ContextPaths...)
 560					}
 561				}
 562
 563				base.Agents[agentID] = baseAgent
 564			}
 565		}
 566	}
 567}
 568
 569func mergeMCPs(base, global, local *Config) {
 570	for _, cfg := range []*Config{global, local} {
 571		if cfg == nil {
 572			continue
 573		}
 574		maps.Copy(base.MCP, cfg.MCP)
 575	}
 576}
 577
 578func mergeLSPs(base, global, local *Config) {
 579	for _, cfg := range []*Config{global, local} {
 580		if cfg == nil {
 581			continue
 582		}
 583		maps.Copy(base.LSP, cfg.LSP)
 584	}
 585}
 586
 587func mergeProviderConfigs(base, global, local *Config) {
 588	for _, cfg := range []*Config{global, local} {
 589		if cfg == nil {
 590			continue
 591		}
 592		for providerName, p := range cfg.Providers {
 593			p.ID = providerName
 594			if _, ok := base.Providers[providerName]; !ok {
 595				base.Providers[providerName] = p
 596			} else {
 597				base.Providers[providerName] = mergeProviderConfig(providerName, base.Providers[providerName], p)
 598			}
 599		}
 600	}
 601
 602	finalProviders := make(map[provider.InferenceProvider]ProviderConfig)
 603	for providerName, providerConfig := range base.Providers {
 604		err := validateProvider(providerName, providerConfig)
 605		if err != nil {
 606			logging.Warn("Skipping provider", "name", providerName, "error", err)
 607			continue // Skip invalid providers
 608		}
 609		finalProviders[providerName] = providerConfig
 610	}
 611	base.Providers = finalProviders
 612}
 613
 614func providerDefaultConfig(providerID provider.InferenceProvider) ProviderConfig {
 615	switch providerID {
 616	case provider.InferenceProviderAnthropic:
 617		return ProviderConfig{
 618			ID:           providerID,
 619			ProviderType: provider.TypeAnthropic,
 620		}
 621	case provider.InferenceProviderOpenAI:
 622		return ProviderConfig{
 623			ID:           providerID,
 624			ProviderType: provider.TypeOpenAI,
 625		}
 626	case provider.InferenceProviderGemini:
 627		return ProviderConfig{
 628			ID:           providerID,
 629			ProviderType: provider.TypeGemini,
 630		}
 631	case provider.InferenceProviderBedrock:
 632		return ProviderConfig{
 633			ID:           providerID,
 634			ProviderType: provider.TypeBedrock,
 635		}
 636	case provider.InferenceProviderAzure:
 637		return ProviderConfig{
 638			ID:           providerID,
 639			ProviderType: provider.TypeAzure,
 640		}
 641	case provider.InferenceProviderOpenRouter:
 642		return ProviderConfig{
 643			ID:           providerID,
 644			ProviderType: provider.TypeOpenAI,
 645			BaseURL:      "https://openrouter.ai/api/v1",
 646			ExtraHeaders: map[string]string{
 647				"HTTP-Referer": "crush.charm.land",
 648				"X-Title":      "Crush",
 649			},
 650		}
 651	case provider.InferenceProviderXAI:
 652		return ProviderConfig{
 653			ID:           providerID,
 654			ProviderType: provider.TypeXAI,
 655			BaseURL:      "https://api.x.ai/v1",
 656		}
 657	case provider.InferenceProviderVertexAI:
 658		return ProviderConfig{
 659			ID:           providerID,
 660			ProviderType: provider.TypeVertexAI,
 661		}
 662	default:
 663		return ProviderConfig{
 664			ID:           providerID,
 665			ProviderType: provider.TypeOpenAI,
 666		}
 667	}
 668}
 669
 670func defaultConfigBasedOnEnv() *Config {
 671	cfg := &Config{
 672		Options: Options{
 673			DataDirectory: defaultDataDirectory,
 674			ContextPaths:  defaultContextPaths,
 675		},
 676		Providers: make(map[provider.InferenceProvider]ProviderConfig),
 677		Agents:    make(map[AgentID]Agent),
 678		LSP:       make(map[string]LSPConfig),
 679		MCP:       make(map[string]MCP),
 680	}
 681
 682	providers := Providers()
 683
 684	for _, p := range providers {
 685		if strings.HasPrefix(p.APIKey, "$") {
 686			envVar := strings.TrimPrefix(p.APIKey, "$")
 687			if apiKey := os.Getenv(envVar); apiKey != "" {
 688				providerConfig := providerDefaultConfig(p.ID)
 689				providerConfig.APIKey = apiKey
 690				providerConfig.DefaultLargeModel = p.DefaultLargeModelID
 691				providerConfig.DefaultSmallModel = p.DefaultSmallModelID
 692				baseURL := p.APIEndpoint
 693				if strings.HasPrefix(baseURL, "$") {
 694					envVar := strings.TrimPrefix(baseURL, "$")
 695					baseURL = os.Getenv(envVar)
 696				}
 697				providerConfig.BaseURL = baseURL
 698				for _, model := range p.Models {
 699					configModel := Model{
 700						ID:                 model.ID,
 701						Name:               model.Name,
 702						CostPer1MIn:        model.CostPer1MIn,
 703						CostPer1MOut:       model.CostPer1MOut,
 704						CostPer1MInCached:  model.CostPer1MInCached,
 705						CostPer1MOutCached: model.CostPer1MOutCached,
 706						ContextWindow:      model.ContextWindow,
 707						DefaultMaxTokens:   model.DefaultMaxTokens,
 708						CanReason:          model.CanReason,
 709						SupportsImages:     model.SupportsImages,
 710					}
 711					// Set reasoning effort for reasoning models
 712					if model.HasReasoningEffort && model.DefaultReasoningEffort != "" {
 713						configModel.HasReasoningEffort = model.HasReasoningEffort
 714						configModel.ReasoningEffort = model.DefaultReasoningEffort
 715					}
 716					providerConfig.Models = append(providerConfig.Models, configModel)
 717				}
 718				cfg.Providers[p.ID] = providerConfig
 719			}
 720		}
 721	}
 722	// TODO: support local models
 723
 724	if useVertexAI := os.Getenv("GOOGLE_GENAI_USE_VERTEXAI"); useVertexAI == "true" {
 725		providerConfig := providerDefaultConfig(provider.InferenceProviderVertexAI)
 726		providerConfig.ExtraParams = map[string]string{
 727			"project":  os.Getenv("GOOGLE_CLOUD_PROJECT"),
 728			"location": os.Getenv("GOOGLE_CLOUD_LOCATION"),
 729		}
 730		// Find the VertexAI provider definition to get default models
 731		for _, p := range providers {
 732			if p.ID == provider.InferenceProviderVertexAI {
 733				providerConfig.DefaultLargeModel = p.DefaultLargeModelID
 734				providerConfig.DefaultSmallModel = p.DefaultSmallModelID
 735				for _, model := range p.Models {
 736					configModel := Model{
 737						ID:                 model.ID,
 738						Name:               model.Name,
 739						CostPer1MIn:        model.CostPer1MIn,
 740						CostPer1MOut:       model.CostPer1MOut,
 741						CostPer1MInCached:  model.CostPer1MInCached,
 742						CostPer1MOutCached: model.CostPer1MOutCached,
 743						ContextWindow:      model.ContextWindow,
 744						DefaultMaxTokens:   model.DefaultMaxTokens,
 745						CanReason:          model.CanReason,
 746						SupportsImages:     model.SupportsImages,
 747					}
 748					// Set reasoning effort for reasoning models
 749					if model.HasReasoningEffort && model.DefaultReasoningEffort != "" {
 750						configModel.HasReasoningEffort = model.HasReasoningEffort
 751						configModel.ReasoningEffort = model.DefaultReasoningEffort
 752					}
 753					providerConfig.Models = append(providerConfig.Models, configModel)
 754				}
 755				break
 756			}
 757		}
 758		cfg.Providers[provider.InferenceProviderVertexAI] = providerConfig
 759	}
 760
 761	if hasAWSCredentials() {
 762		providerConfig := providerDefaultConfig(provider.InferenceProviderBedrock)
 763		providerConfig.ExtraParams = map[string]string{
 764			"region": os.Getenv("AWS_DEFAULT_REGION"),
 765		}
 766		if providerConfig.ExtraParams["region"] == "" {
 767			providerConfig.ExtraParams["region"] = os.Getenv("AWS_REGION")
 768		}
 769		// Find the Bedrock provider definition to get default models
 770		for _, p := range providers {
 771			if p.ID == provider.InferenceProviderBedrock {
 772				providerConfig.DefaultLargeModel = p.DefaultLargeModelID
 773				providerConfig.DefaultSmallModel = p.DefaultSmallModelID
 774				for _, model := range p.Models {
 775					configModel := Model{
 776						ID:                 model.ID,
 777						Name:               model.Name,
 778						CostPer1MIn:        model.CostPer1MIn,
 779						CostPer1MOut:       model.CostPer1MOut,
 780						CostPer1MInCached:  model.CostPer1MInCached,
 781						CostPer1MOutCached: model.CostPer1MOutCached,
 782						ContextWindow:      model.ContextWindow,
 783						DefaultMaxTokens:   model.DefaultMaxTokens,
 784						CanReason:          model.CanReason,
 785						SupportsImages:     model.SupportsImages,
 786					}
 787					// Set reasoning effort for reasoning models
 788					if model.HasReasoningEffort && model.DefaultReasoningEffort != "" {
 789						configModel.HasReasoningEffort = model.HasReasoningEffort
 790						configModel.ReasoningEffort = model.DefaultReasoningEffort
 791					}
 792					providerConfig.Models = append(providerConfig.Models, configModel)
 793				}
 794				break
 795			}
 796		}
 797		cfg.Providers[provider.InferenceProviderBedrock] = providerConfig
 798	}
 799	return cfg
 800}
 801
 802func hasAWSCredentials() bool {
 803	if os.Getenv("AWS_ACCESS_KEY_ID") != "" && os.Getenv("AWS_SECRET_ACCESS_KEY") != "" {
 804		return true
 805	}
 806
 807	if os.Getenv("AWS_PROFILE") != "" || os.Getenv("AWS_DEFAULT_PROFILE") != "" {
 808		return true
 809	}
 810
 811	if os.Getenv("AWS_REGION") != "" || os.Getenv("AWS_DEFAULT_REGION") != "" {
 812		return true
 813	}
 814
 815	if os.Getenv("AWS_CONTAINER_CREDENTIALS_RELATIVE_URI") != "" ||
 816		os.Getenv("AWS_CONTAINER_CREDENTIALS_FULL_URI") != "" {
 817		return true
 818	}
 819
 820	return false
 821}
 822
 823func WorkingDirectory() string {
 824	return cwd
 825}
 826
 827// TODO: Handle error state
 828
 829func GetAgentModel(agentID AgentID) Model {
 830	cfg := Get()
 831	agent, ok := cfg.Agents[agentID]
 832	if !ok {
 833		logging.Error("Agent not found", "agent_id", agentID)
 834		return Model{}
 835	}
 836
 837	var model PreferredModel
 838	switch agent.Model {
 839	case LargeModel:
 840		model = cfg.Models.Large
 841	case SmallModel:
 842		model = cfg.Models.Small
 843	default:
 844		logging.Warn("Unknown model type for agent", "agent_id", agentID, "model_type", agent.Model)
 845		model = cfg.Models.Large // Fallback to large model
 846	}
 847	providerConfig, ok := cfg.Providers[model.Provider]
 848	if !ok {
 849		logging.Error("Provider not found for agent", "agent_id", agentID, "provider", model.Provider)
 850		return Model{}
 851	}
 852
 853	for _, m := range providerConfig.Models {
 854		if m.ID == model.ModelID {
 855			return m
 856		}
 857	}
 858
 859	logging.Error("Model not found for agent", "agent_id", agentID, "model", agent.Model)
 860	return Model{}
 861}
 862
 863func GetAgentProvider(agentID AgentID) ProviderConfig {
 864	cfg := Get()
 865	agent, ok := cfg.Agents[agentID]
 866	if !ok {
 867		logging.Error("Agent not found", "agent_id", agentID)
 868		return ProviderConfig{}
 869	}
 870
 871	var model PreferredModel
 872	switch agent.Model {
 873	case LargeModel:
 874		model = cfg.Models.Large
 875	case SmallModel:
 876		model = cfg.Models.Small
 877	default:
 878		logging.Warn("Unknown model type for agent", "agent_id", agentID, "model_type", agent.Model)
 879		model = cfg.Models.Large // Fallback to large model
 880	}
 881
 882	providerConfig, ok := cfg.Providers[model.Provider]
 883	if !ok {
 884		logging.Error("Provider not found for agent", "agent_id", agentID, "provider", model.Provider)
 885		return ProviderConfig{}
 886	}
 887
 888	return providerConfig
 889}
 890
 891func GetProviderModel(provider provider.InferenceProvider, modelID string) Model {
 892	cfg := Get()
 893	providerConfig, ok := cfg.Providers[provider]
 894	if !ok {
 895		logging.Error("Provider not found", "provider", provider)
 896		return Model{}
 897	}
 898
 899	for _, model := range providerConfig.Models {
 900		if model.ID == modelID {
 901			return model
 902		}
 903	}
 904
 905	logging.Error("Model not found for provider", "provider", provider, "model_id", modelID)
 906	return Model{}
 907}
 908
 909func GetModel(modelType ModelType) Model {
 910	cfg := Get()
 911	var model PreferredModel
 912	switch modelType {
 913	case LargeModel:
 914		model = cfg.Models.Large
 915	case SmallModel:
 916		model = cfg.Models.Small
 917	default:
 918		model = cfg.Models.Large // Fallback to large model
 919	}
 920	providerConfig, ok := cfg.Providers[model.Provider]
 921	if !ok {
 922		return Model{}
 923	}
 924
 925	for _, m := range providerConfig.Models {
 926		if m.ID == model.ModelID {
 927			return m
 928		}
 929	}
 930	return Model{}
 931}
 932
 933func UpdatePreferredModel(modelType ModelType, model PreferredModel) error {
 934	cfg := Get()
 935	switch modelType {
 936	case LargeModel:
 937		cfg.Models.Large = model
 938	case SmallModel:
 939		cfg.Models.Small = model
 940	default:
 941		return fmt.Errorf("unknown model type: %s", modelType)
 942	}
 943	return nil
 944}
 945
 946// ValidationError represents a configuration validation error
 947type ValidationError struct {
 948	Field   string
 949	Message string
 950}
 951
 952func (e ValidationError) Error() string {
 953	return fmt.Sprintf("validation error in %s: %s", e.Field, e.Message)
 954}
 955
 956// ValidationErrors represents multiple validation errors
 957type ValidationErrors []ValidationError
 958
 959func (e ValidationErrors) Error() string {
 960	if len(e) == 0 {
 961		return "no validation errors"
 962	}
 963	if len(e) == 1 {
 964		return e[0].Error()
 965	}
 966
 967	var messages []string
 968	for _, err := range e {
 969		messages = append(messages, err.Error())
 970	}
 971	return fmt.Sprintf("multiple validation errors: %s", strings.Join(messages, "; "))
 972}
 973
 974// HasErrors returns true if there are any validation errors
 975func (e ValidationErrors) HasErrors() bool {
 976	return len(e) > 0
 977}
 978
 979// Add appends a new validation error
 980func (e *ValidationErrors) Add(field, message string) {
 981	*e = append(*e, ValidationError{Field: field, Message: message})
 982}
 983
 984// Validate performs comprehensive validation of the configuration
 985func (c *Config) Validate() error {
 986	var errors ValidationErrors
 987
 988	// Validate providers
 989	c.validateProviders(&errors)
 990
 991	// Validate models
 992	c.validateModels(&errors)
 993
 994	// Validate agents
 995	c.validateAgents(&errors)
 996
 997	// Validate options
 998	c.validateOptions(&errors)
 999
1000	// Validate MCP configurations
1001	c.validateMCPs(&errors)
1002
1003	// Validate LSP configurations
1004	c.validateLSPs(&errors)
1005
1006	// Validate cross-references
1007	c.validateCrossReferences(&errors)
1008
1009	// Validate completeness
1010	c.validateCompleteness(&errors)
1011
1012	if errors.HasErrors() {
1013		return errors
1014	}
1015
1016	return nil
1017}
1018
1019// validateProviders validates all provider configurations
1020func (c *Config) validateProviders(errors *ValidationErrors) {
1021	if c.Providers == nil {
1022		c.Providers = make(map[provider.InferenceProvider]ProviderConfig)
1023	}
1024
1025	knownProviders := provider.KnownProviders()
1026	validTypes := []provider.Type{
1027		provider.TypeOpenAI,
1028		provider.TypeAnthropic,
1029		provider.TypeGemini,
1030		provider.TypeAzure,
1031		provider.TypeBedrock,
1032		provider.TypeVertexAI,
1033		provider.TypeXAI,
1034	}
1035
1036	for providerID, providerConfig := range c.Providers {
1037		fieldPrefix := fmt.Sprintf("providers.%s", providerID)
1038
1039		// Validate API key for non-disabled providers
1040		if !providerConfig.Disabled && providerConfig.APIKey == "" {
1041			// Special case for AWS Bedrock and VertexAI which may use other auth methods
1042			if providerID != provider.InferenceProviderBedrock && providerID != provider.InferenceProviderVertexAI {
1043				errors.Add(fieldPrefix+".api_key", "API key is required for non-disabled providers")
1044			}
1045		}
1046
1047		// Validate provider type
1048		validType := slices.Contains(validTypes, providerConfig.ProviderType)
1049		if !validType {
1050			errors.Add(fieldPrefix+".provider_type", fmt.Sprintf("invalid provider type: %s", providerConfig.ProviderType))
1051		}
1052
1053		// Validate custom providers
1054		isKnownProvider := slices.Contains(knownProviders, providerID)
1055
1056		if !isKnownProvider {
1057			// Custom provider validation
1058			if providerConfig.BaseURL == "" {
1059				errors.Add(fieldPrefix+".base_url", "BaseURL is required for custom providers")
1060			}
1061			if providerConfig.ProviderType != provider.TypeOpenAI {
1062				errors.Add(fieldPrefix+".provider_type", "custom providers currently only support OpenAI type")
1063			}
1064		}
1065
1066		// Validate models
1067		modelIDs := make(map[string]bool)
1068		for i, model := range providerConfig.Models {
1069			modelFieldPrefix := fmt.Sprintf("%s.models[%d]", fieldPrefix, i)
1070
1071			// Check for duplicate model IDs
1072			if modelIDs[model.ID] {
1073				errors.Add(modelFieldPrefix+".id", fmt.Sprintf("duplicate model ID: %s", model.ID))
1074			}
1075			modelIDs[model.ID] = true
1076
1077			// Validate required model fields
1078			if model.ID == "" {
1079				errors.Add(modelFieldPrefix+".id", "model ID is required")
1080			}
1081			if model.Name == "" {
1082				errors.Add(modelFieldPrefix+".name", "model name is required")
1083			}
1084			if model.ContextWindow <= 0 {
1085				errors.Add(modelFieldPrefix+".context_window", "context window must be positive")
1086			}
1087			if model.DefaultMaxTokens <= 0 {
1088				errors.Add(modelFieldPrefix+".default_max_tokens", "default max tokens must be positive")
1089			}
1090			if model.DefaultMaxTokens > model.ContextWindow {
1091				errors.Add(modelFieldPrefix+".default_max_tokens", "default max tokens cannot exceed context window")
1092			}
1093
1094			// Validate cost fields
1095			if model.CostPer1MIn < 0 {
1096				errors.Add(modelFieldPrefix+".cost_per_1m_in", "cost per 1M input tokens cannot be negative")
1097			}
1098			if model.CostPer1MOut < 0 {
1099				errors.Add(modelFieldPrefix+".cost_per_1m_out", "cost per 1M output tokens cannot be negative")
1100			}
1101			if model.CostPer1MInCached < 0 {
1102				errors.Add(modelFieldPrefix+".cost_per_1m_in_cached", "cached cost per 1M input tokens cannot be negative")
1103			}
1104			if model.CostPer1MOutCached < 0 {
1105				errors.Add(modelFieldPrefix+".cost_per_1m_out_cached", "cached cost per 1M output tokens cannot be negative")
1106			}
1107		}
1108
1109		// Validate default model references
1110		if providerConfig.DefaultLargeModel != "" {
1111			if !modelIDs[providerConfig.DefaultLargeModel] {
1112				errors.Add(fieldPrefix+".default_large_model", fmt.Sprintf("default large model '%s' not found in provider models", providerConfig.DefaultLargeModel))
1113			}
1114		}
1115		if providerConfig.DefaultSmallModel != "" {
1116			if !modelIDs[providerConfig.DefaultSmallModel] {
1117				errors.Add(fieldPrefix+".default_small_model", fmt.Sprintf("default small model '%s' not found in provider models", providerConfig.DefaultSmallModel))
1118			}
1119		}
1120
1121		// Validate provider-specific requirements
1122		c.validateProviderSpecific(providerID, providerConfig, errors)
1123	}
1124}
1125
1126// validateProviderSpecific validates provider-specific requirements
1127func (c *Config) validateProviderSpecific(providerID provider.InferenceProvider, providerConfig ProviderConfig, errors *ValidationErrors) {
1128	fieldPrefix := fmt.Sprintf("providers.%s", providerID)
1129
1130	switch providerID {
1131	case provider.InferenceProviderVertexAI:
1132		if !providerConfig.Disabled {
1133			if providerConfig.ExtraParams == nil {
1134				errors.Add(fieldPrefix+".extra_params", "VertexAI requires extra_params configuration")
1135			} else {
1136				if providerConfig.ExtraParams["project"] == "" {
1137					errors.Add(fieldPrefix+".extra_params.project", "VertexAI requires project parameter")
1138				}
1139				if providerConfig.ExtraParams["location"] == "" {
1140					errors.Add(fieldPrefix+".extra_params.location", "VertexAI requires location parameter")
1141				}
1142			}
1143		}
1144	case provider.InferenceProviderBedrock:
1145		if !providerConfig.Disabled {
1146			if providerConfig.ExtraParams == nil || providerConfig.ExtraParams["region"] == "" {
1147				errors.Add(fieldPrefix+".extra_params.region", "Bedrock requires region parameter")
1148			}
1149			// Check for AWS credentials in environment
1150			if !hasAWSCredentials() {
1151				errors.Add(fieldPrefix, "Bedrock requires AWS credentials in environment")
1152			}
1153		}
1154	}
1155}
1156
1157// validateModels validates preferred model configurations
1158func (c *Config) validateModels(errors *ValidationErrors) {
1159	// Validate large model
1160	if c.Models.Large.ModelID != "" || c.Models.Large.Provider != "" {
1161		if c.Models.Large.ModelID == "" {
1162			errors.Add("models.large.model_id", "large model ID is required when provider is set")
1163		}
1164		if c.Models.Large.Provider == "" {
1165			errors.Add("models.large.provider", "large model provider is required when model ID is set")
1166		}
1167
1168		// Check if provider exists and is not disabled
1169		if providerConfig, exists := c.Providers[c.Models.Large.Provider]; exists {
1170			if providerConfig.Disabled {
1171				errors.Add("models.large.provider", "large model provider is disabled")
1172			}
1173
1174			// Check if model exists in provider
1175			modelExists := false
1176			for _, model := range providerConfig.Models {
1177				if model.ID == c.Models.Large.ModelID {
1178					modelExists = true
1179					break
1180				}
1181			}
1182			if !modelExists {
1183				errors.Add("models.large.model_id", fmt.Sprintf("large model '%s' not found in provider '%s'", c.Models.Large.ModelID, c.Models.Large.Provider))
1184			}
1185		} else {
1186			errors.Add("models.large.provider", fmt.Sprintf("large model provider '%s' not found", c.Models.Large.Provider))
1187		}
1188	}
1189
1190	// Validate small model
1191	if c.Models.Small.ModelID != "" || c.Models.Small.Provider != "" {
1192		if c.Models.Small.ModelID == "" {
1193			errors.Add("models.small.model_id", "small model ID is required when provider is set")
1194		}
1195		if c.Models.Small.Provider == "" {
1196			errors.Add("models.small.provider", "small model provider is required when model ID is set")
1197		}
1198
1199		// Check if provider exists and is not disabled
1200		if providerConfig, exists := c.Providers[c.Models.Small.Provider]; exists {
1201			if providerConfig.Disabled {
1202				errors.Add("models.small.provider", "small model provider is disabled")
1203			}
1204
1205			// Check if model exists in provider
1206			modelExists := false
1207			for _, model := range providerConfig.Models {
1208				if model.ID == c.Models.Small.ModelID {
1209					modelExists = true
1210					break
1211				}
1212			}
1213			if !modelExists {
1214				errors.Add("models.small.model_id", fmt.Sprintf("small model '%s' not found in provider '%s'", c.Models.Small.ModelID, c.Models.Small.Provider))
1215			}
1216		} else {
1217			errors.Add("models.small.provider", fmt.Sprintf("small model provider '%s' not found", c.Models.Small.Provider))
1218		}
1219	}
1220}
1221
1222// validateAgents validates agent configurations
1223func (c *Config) validateAgents(errors *ValidationErrors) {
1224	if c.Agents == nil {
1225		c.Agents = make(map[AgentID]Agent)
1226	}
1227
1228	validTools := []string{
1229		"bash", "edit", "fetch", "glob", "grep", "ls", "sourcegraph", "view", "write", "agent",
1230	}
1231
1232	for agentID, agent := range c.Agents {
1233		fieldPrefix := fmt.Sprintf("agents.%s", agentID)
1234
1235		// Validate agent ID consistency
1236		if agent.ID != agentID {
1237			errors.Add(fieldPrefix+".id", fmt.Sprintf("agent ID mismatch: expected '%s', got '%s'", agentID, agent.ID))
1238		}
1239
1240		// Validate required fields
1241		if agent.ID == "" {
1242			errors.Add(fieldPrefix+".id", "agent ID is required")
1243		}
1244		if agent.Name == "" {
1245			errors.Add(fieldPrefix+".name", "agent name is required")
1246		}
1247
1248		// Validate model type
1249		if agent.Model != LargeModel && agent.Model != SmallModel {
1250			errors.Add(fieldPrefix+".model", fmt.Sprintf("invalid model type: %s (must be 'large' or 'small')", agent.Model))
1251		}
1252
1253		// Validate allowed tools
1254		if agent.AllowedTools != nil {
1255			for i, tool := range agent.AllowedTools {
1256				validTool := slices.Contains(validTools, tool)
1257				if !validTool {
1258					errors.Add(fmt.Sprintf("%s.allowed_tools[%d]", fieldPrefix, i), fmt.Sprintf("unknown tool: %s", tool))
1259				}
1260			}
1261		}
1262
1263		// Validate MCP references
1264		if agent.AllowedMCP != nil {
1265			for mcpName := range agent.AllowedMCP {
1266				if _, exists := c.MCP[mcpName]; !exists {
1267					errors.Add(fieldPrefix+".allowed_mcp", fmt.Sprintf("referenced MCP '%s' not found", mcpName))
1268				}
1269			}
1270		}
1271
1272		// Validate LSP references
1273		if agent.AllowedLSP != nil {
1274			for _, lspName := range agent.AllowedLSP {
1275				if _, exists := c.LSP[lspName]; !exists {
1276					errors.Add(fieldPrefix+".allowed_lsp", fmt.Sprintf("referenced LSP '%s' not found", lspName))
1277				}
1278			}
1279		}
1280
1281		// Validate context paths (basic path validation)
1282		for i, contextPath := range agent.ContextPaths {
1283			if contextPath == "" {
1284				errors.Add(fmt.Sprintf("%s.context_paths[%d]", fieldPrefix, i), "context path cannot be empty")
1285			}
1286			// Check for invalid characters in path
1287			if strings.Contains(contextPath, "\x00") {
1288				errors.Add(fmt.Sprintf("%s.context_paths[%d]", fieldPrefix, i), "context path contains invalid characters")
1289			}
1290		}
1291
1292		// Validate known agents maintain their core properties
1293		if agentID == AgentCoder {
1294			if agent.Name != "Coder" {
1295				errors.Add(fieldPrefix+".name", "coder agent name cannot be changed")
1296			}
1297			if agent.Description != "An agent that helps with executing coding tasks." {
1298				errors.Add(fieldPrefix+".description", "coder agent description cannot be changed")
1299			}
1300		} else if agentID == AgentTask {
1301			if agent.Name != "Task" {
1302				errors.Add(fieldPrefix+".name", "task agent name cannot be changed")
1303			}
1304			if agent.Description != "An agent that helps with searching for context and finding implementation details." {
1305				errors.Add(fieldPrefix+".description", "task agent description cannot be changed")
1306			}
1307			expectedTools := []string{"glob", "grep", "ls", "sourcegraph", "view"}
1308			if agent.AllowedTools != nil && !slices.Equal(agent.AllowedTools, expectedTools) {
1309				errors.Add(fieldPrefix+".allowed_tools", "task agent allowed tools cannot be changed")
1310			}
1311		}
1312	}
1313}
1314
1315// validateOptions validates configuration options
1316func (c *Config) validateOptions(errors *ValidationErrors) {
1317	// Validate data directory
1318	if c.Options.DataDirectory == "" {
1319		errors.Add("options.data_directory", "data directory is required")
1320	}
1321
1322	// Validate context paths
1323	for i, contextPath := range c.Options.ContextPaths {
1324		if contextPath == "" {
1325			errors.Add(fmt.Sprintf("options.context_paths[%d]", i), "context path cannot be empty")
1326		}
1327		if strings.Contains(contextPath, "\x00") {
1328			errors.Add(fmt.Sprintf("options.context_paths[%d]", i), "context path contains invalid characters")
1329		}
1330	}
1331}
1332
1333// validateMCPs validates MCP configurations
1334func (c *Config) validateMCPs(errors *ValidationErrors) {
1335	if c.MCP == nil {
1336		c.MCP = make(map[string]MCP)
1337	}
1338
1339	for mcpName, mcpConfig := range c.MCP {
1340		fieldPrefix := fmt.Sprintf("mcp.%s", mcpName)
1341
1342		// Validate MCP type
1343		if mcpConfig.Type != MCPStdio && mcpConfig.Type != MCPSse {
1344			errors.Add(fieldPrefix+".type", fmt.Sprintf("invalid MCP type: %s (must be 'stdio' or 'sse')", mcpConfig.Type))
1345		}
1346
1347		// Validate based on type
1348		if mcpConfig.Type == MCPStdio {
1349			if mcpConfig.Command == "" {
1350				errors.Add(fieldPrefix+".command", "command is required for stdio MCP")
1351			}
1352		} else if mcpConfig.Type == MCPSse {
1353			if mcpConfig.URL == "" {
1354				errors.Add(fieldPrefix+".url", "URL is required for SSE MCP")
1355			}
1356		}
1357	}
1358}
1359
1360// validateLSPs validates LSP configurations
1361func (c *Config) validateLSPs(errors *ValidationErrors) {
1362	if c.LSP == nil {
1363		c.LSP = make(map[string]LSPConfig)
1364	}
1365
1366	for lspName, lspConfig := range c.LSP {
1367		fieldPrefix := fmt.Sprintf("lsp.%s", lspName)
1368
1369		if lspConfig.Command == "" {
1370			errors.Add(fieldPrefix+".command", "command is required for LSP")
1371		}
1372	}
1373}
1374
1375// validateCrossReferences validates cross-references between different config sections
1376func (c *Config) validateCrossReferences(errors *ValidationErrors) {
1377	// Validate that agents can use their assigned model types
1378	for agentID, agent := range c.Agents {
1379		fieldPrefix := fmt.Sprintf("agents.%s", agentID)
1380
1381		var preferredModel PreferredModel
1382		switch agent.Model {
1383		case LargeModel:
1384			preferredModel = c.Models.Large
1385		case SmallModel:
1386			preferredModel = c.Models.Small
1387		}
1388
1389		if preferredModel.Provider != "" {
1390			if providerConfig, exists := c.Providers[preferredModel.Provider]; exists {
1391				if providerConfig.Disabled {
1392					errors.Add(fieldPrefix+".model", fmt.Sprintf("agent cannot use model type '%s' because provider '%s' is disabled", agent.Model, preferredModel.Provider))
1393				}
1394			}
1395		}
1396	}
1397}
1398
1399// validateCompleteness validates that the configuration is complete and usable
1400func (c *Config) validateCompleteness(errors *ValidationErrors) {
1401	// Check for at least one valid, non-disabled provider
1402	hasValidProvider := false
1403	for _, providerConfig := range c.Providers {
1404		if !providerConfig.Disabled {
1405			hasValidProvider = true
1406			break
1407		}
1408	}
1409	if !hasValidProvider {
1410		errors.Add("providers", "at least one non-disabled provider is required")
1411	}
1412
1413	// Check that default agents exist
1414	if _, exists := c.Agents[AgentCoder]; !exists {
1415		errors.Add("agents", "coder agent is required")
1416	}
1417	if _, exists := c.Agents[AgentTask]; !exists {
1418		errors.Add("agents", "task agent is required")
1419	}
1420
1421	// Check that preferred models are set if providers exist
1422	if hasValidProvider {
1423		if c.Models.Large.ModelID == "" || c.Models.Large.Provider == "" {
1424			errors.Add("models.large", "large preferred model must be configured when providers are available")
1425		}
1426		if c.Models.Small.ModelID == "" || c.Models.Small.Provider == "" {
1427			errors.Add("models.small", "small preferred model must be configured when providers are available")
1428		}
1429	}
1430}
1431
1432// JSONSchemaExtend adds custom schema properties for AgentID
1433func (AgentID) JSONSchemaExtend(schema *jsonschema.Schema) {
1434	schema.Enum = []any{
1435		string(AgentCoder),
1436		string(AgentTask),
1437	}
1438}
1439
1440// JSONSchemaExtend adds custom schema properties for ModelType
1441func (ModelType) JSONSchemaExtend(schema *jsonschema.Schema) {
1442	schema.Enum = []any{
1443		string(LargeModel),
1444		string(SmallModel),
1445	}
1446}
1447
1448// JSONSchemaExtend adds custom schema properties for MCPType
1449func (MCPType) JSONSchemaExtend(schema *jsonschema.Schema) {
1450	schema.Enum = []any{
1451		string(MCPStdio),
1452		string(MCPSse),
1453	}
1454}