1package config
   2
   3import (
   4	"encoding/json"
   5	"errors"
   6	"fmt"
   7	"log/slog"
   8	"maps"
   9	"os"
  10	"path/filepath"
  11	"slices"
  12	"strings"
  13	"sync"
  14
  15	"github.com/charmbracelet/crush/internal/fur/provider"
  16	"github.com/charmbracelet/crush/internal/logging"
  17	"github.com/invopop/jsonschema"
  18)
  19
  20const (
  21	defaultDataDirectory = ".crush"
  22	defaultLogLevel      = "info"
  23	appName              = "crush"
  24
  25	MaxTokensFallbackDefault = 4096
  26)
  27
  28var defaultContextPaths = []string{
  29	".github/copilot-instructions.md",
  30	".cursorrules",
  31	".cursor/rules/",
  32	"CLAUDE.md",
  33	"CLAUDE.local.md",
  34	"GEMINI.md",
  35	"gemini.md",
  36	"crush.md",
  37	"crush.local.md",
  38	"Crush.md",
  39	"Crush.local.md",
  40	"CRUSH.md",
  41	"CRUSH.local.md",
  42}
  43
  44type AgentID string
  45
  46const (
  47	AgentCoder AgentID = "coder"
  48	AgentTask  AgentID = "task"
  49)
  50
  51type ModelType string
  52
  53const (
  54	LargeModel ModelType = "large"
  55	SmallModel ModelType = "small"
  56)
  57
  58type Model struct {
  59	ID                 string  `json:"id" jsonschema:"title=Model ID,description=Unique identifier for the model, the API model"`
  60	Name               string  `json:"name" jsonschema:"title=Model Name,description=Display name of the model"`
  61	CostPer1MIn        float64 `json:"cost_per_1m_in,omitempty" jsonschema:"title=Input Cost,description=Cost per 1 million input tokens,minimum=0"`
  62	CostPer1MOut       float64 `json:"cost_per_1m_out,omitempty" jsonschema:"title=Output Cost,description=Cost per 1 million output tokens,minimum=0"`
  63	CostPer1MInCached  float64 `json:"cost_per_1m_in_cached,omitempty" jsonschema:"title=Cached Input Cost,description=Cost per 1 million cached input tokens,minimum=0"`
  64	CostPer1MOutCached float64 `json:"cost_per_1m_out_cached,omitempty" jsonschema:"title=Cached Output Cost,description=Cost per 1 million cached output tokens,minimum=0"`
  65	ContextWindow      int64   `json:"context_window" jsonschema:"title=Context Window,description=Maximum context window size in tokens,minimum=1"`
  66	DefaultMaxTokens   int64   `json:"default_max_tokens" jsonschema:"title=Default Max Tokens,description=Default maximum tokens for responses,minimum=1"`
  67	CanReason          bool    `json:"can_reason,omitempty" jsonschema:"title=Can Reason,description=Whether the model supports reasoning capabilities"`
  68	ReasoningEffort    string  `json:"reasoning_effort,omitempty" jsonschema:"title=Reasoning Effort,description=Default reasoning effort level for reasoning models"`
  69	HasReasoningEffort bool    `json:"has_reasoning_effort,omitempty" jsonschema:"title=Has Reasoning Effort,description=Whether the model supports reasoning effort configuration"`
  70	SupportsImages     bool    `json:"supports_attachments,omitempty" jsonschema:"title=Supports Images,description=Whether the model supports image attachments"`
  71}
  72
  73type VertexAIOptions struct {
  74	APIKey   string `json:"api_key,omitempty"`
  75	Project  string `json:"project,omitempty"`
  76	Location string `json:"location,omitempty"`
  77}
  78
  79type ProviderConfig struct {
  80	ID           provider.InferenceProvider `json:"id,omitempty" jsonschema:"title=Provider ID,description=Unique identifier for the provider"`
  81	BaseURL      string                     `json:"base_url,omitempty" jsonschema:"title=Base URL,description=Base URL for the provider API (required for custom providers)"`
  82	ProviderType provider.Type              `json:"provider_type" jsonschema:"title=Provider Type,description=Type of the provider (openai, anthropic, etc.)"`
  83	APIKey       string                     `json:"api_key,omitempty" jsonschema:"title=API Key,description=API key for authenticating with the provider"`
  84	Disabled     bool                       `json:"disabled,omitempty" jsonschema:"title=Disabled,description=Whether this provider is disabled,default=false"`
  85	ExtraHeaders map[string]string          `json:"extra_headers,omitempty" jsonschema:"title=Extra Headers,description=Additional HTTP headers to send with requests"`
  86	// used for e.x for vertex to set the project
  87	ExtraParams map[string]string `json:"extra_params,omitempty" jsonschema:"title=Extra Parameters,description=Additional provider-specific parameters"`
  88
  89	DefaultLargeModel string `json:"default_large_model,omitempty" jsonschema:"title=Default Large Model,description=Default model ID for large model type"`
  90	DefaultSmallModel string `json:"default_small_model,omitempty" jsonschema:"title=Default Small Model,description=Default model ID for small model type"`
  91
  92	Models []Model `json:"models,omitempty" jsonschema:"title=Models,description=List of available models for this provider"`
  93}
  94
  95type Agent struct {
  96	ID          AgentID `json:"id,omitempty" jsonschema:"title=Agent ID,description=Unique identifier for the agent,enum=coder,enum=task"`
  97	Name        string  `json:"name,omitempty" jsonschema:"title=Name,description=Display name of the agent"`
  98	Description string  `json:"description,omitempty" jsonschema:"title=Description,description=Description of what the agent does"`
  99	// This is the id of the system prompt used by the agent
 100	Disabled bool `json:"disabled,omitempty" jsonschema:"title=Disabled,description=Whether this agent is disabled,default=false"`
 101
 102	Model ModelType `json:"model" jsonschema:"title=Model Type,description=Type of model to use (large or small),enum=large,enum=small"`
 103
 104	// The available tools for the agent
 105	//  if this is nil, all tools are available
 106	AllowedTools []string `json:"allowed_tools,omitempty" jsonschema:"title=Allowed Tools,description=List of tools this agent is allowed to use (if nil all tools are allowed)"`
 107
 108	// this tells us which MCPs are available for this agent
 109	//  if this is empty all mcps are available
 110	//  the string array is the list of tools from the AllowedMCP the agent has available
 111	//  if the string array is nil, all tools from the AllowedMCP are available
 112	AllowedMCP map[string][]string `json:"allowed_mcp,omitempty" jsonschema:"title=Allowed MCP,description=Map of MCP servers this agent can use and their allowed tools"`
 113
 114	// The list of LSPs that this agent can use
 115	//  if this is nil, all LSPs are available
 116	AllowedLSP []string `json:"allowed_lsp,omitempty" jsonschema:"title=Allowed LSP,description=List of LSP servers this agent can use (if nil all LSPs are allowed)"`
 117
 118	// Overrides the context paths for this agent
 119	ContextPaths []string `json:"context_paths,omitempty" jsonschema:"title=Context Paths,description=Custom context paths for this agent (additive to global context paths)"`
 120}
 121
 122type MCPType string
 123
 124const (
 125	MCPStdio MCPType = "stdio"
 126	MCPSse   MCPType = "sse"
 127)
 128
 129type MCP struct {
 130	Command string   `json:"command" jsonschema:"title=Command,description=Command to execute for stdio MCP servers"`
 131	Env     []string `json:"env,omitempty" jsonschema:"title=Environment,description=Environment variables for the MCP server"`
 132	Args    []string `json:"args,omitempty" jsonschema:"title=Arguments,description=Command line arguments for the MCP server"`
 133	Type    MCPType  `json:"type" jsonschema:"title=Type,description=Type of MCP connection,enum=stdio,enum=sse,default=stdio"`
 134	URL     string   `json:"url,omitempty" jsonschema:"title=URL,description=URL for SSE MCP servers"`
 135	// TODO: maybe make it possible to get the value from the env
 136	Headers map[string]string `json:"headers,omitempty" jsonschema:"title=Headers,description=HTTP headers for SSE MCP servers"`
 137}
 138
 139type LSPConfig struct {
 140	Disabled bool     `json:"enabled,omitempty" jsonschema:"title=Enabled,description=Whether this LSP server is enabled,default=true"`
 141	Command  string   `json:"command" jsonschema:"title=Command,description=Command to execute for the LSP server"`
 142	Args     []string `json:"args,omitempty" jsonschema:"title=Arguments,description=Command line arguments for the LSP server"`
 143	Options  any      `json:"options,omitempty" jsonschema:"title=Options,description=LSP server specific options"`
 144}
 145
 146type TUIOptions struct {
 147	CompactMode bool `json:"compact_mode" jsonschema:"title=Compact Mode,description=Enable compact mode for the TUI,default=false"`
 148	// Here we can add themes later or any TUI related options
 149}
 150
 151type Options struct {
 152	ContextPaths         []string   `json:"context_paths,omitempty" jsonschema:"title=Context Paths,description=List of paths to search for context files"`
 153	TUI                  TUIOptions `json:"tui,omitempty" jsonschema:"title=TUI Options,description=Terminal UI configuration options"`
 154	Debug                bool       `json:"debug,omitempty" jsonschema:"title=Debug,description=Enable debug logging,default=false"`
 155	DebugLSP             bool       `json:"debug_lsp,omitempty" jsonschema:"title=Debug LSP,description=Enable LSP debug logging,default=false"`
 156	DisableAutoSummarize bool       `json:"disable_auto_summarize,omitempty" jsonschema:"title=Disable Auto Summarize,description=Disable automatic conversation summarization,default=false"`
 157	// Relative to the cwd
 158	DataDirectory string `json:"data_directory,omitempty" jsonschema:"title=Data Directory,description=Directory for storing application data,default=.crush"`
 159}
 160
 161type PreferredModel struct {
 162	ModelID  string                     `json:"model_id" jsonschema:"title=Model ID,description=ID of the preferred model"`
 163	Provider provider.InferenceProvider `json:"provider" jsonschema:"title=Provider,description=Provider for the preferred model"`
 164	// ReasoningEffort overrides the default reasoning effort for this model
 165	ReasoningEffort string `json:"reasoning_effort,omitempty" jsonschema:"title=Reasoning Effort,description=Override reasoning effort for this model"`
 166	// MaxTokens overrides the default max tokens for this model
 167	MaxTokens int64 `json:"max_tokens,omitempty" jsonschema:"title=Max Tokens,description=Override max tokens for this model,minimum=1"`
 168
 169	// Think indicates if the model should think, only applicable for anthropic reasoning models
 170	Think bool `json:"think,omitempty" jsonschema:"title=Think,description=Enable thinking for reasoning models,default=false"`
 171}
 172
 173type PreferredModels struct {
 174	Large PreferredModel `json:"large,omitempty" jsonschema:"title=Large Model,description=Preferred model configuration for large model type"`
 175	Small PreferredModel `json:"small,omitempty" jsonschema:"title=Small Model,description=Preferred model configuration for small model type"`
 176}
 177
 178type Config struct {
 179	Models PreferredModels `json:"models,omitempty" jsonschema:"title=Models,description=Preferred model configurations for large and small model types"`
 180	// List of configured providers
 181	Providers map[provider.InferenceProvider]ProviderConfig `json:"providers,omitempty" jsonschema:"title=Providers,description=LLM provider configurations"`
 182
 183	// List of configured agents
 184	Agents map[AgentID]Agent `json:"agents,omitempty" jsonschema:"title=Agents,description=Agent configurations for different tasks"`
 185
 186	// List of configured MCPs
 187	MCP map[string]MCP `json:"mcp,omitempty" jsonschema:"title=MCP,description=Model Control Protocol server configurations"`
 188
 189	// List of configured LSPs
 190	LSP map[string]LSPConfig `json:"lsp,omitempty" jsonschema:"title=LSP,description=Language Server Protocol configurations"`
 191
 192	// Miscellaneous options
 193	Options Options `json:"options,omitempty" jsonschema:"title=Options,description=General application options and settings"`
 194}
 195
 196var (
 197	instance *Config // The single instance of the Singleton
 198	cwd      string
 199	once     sync.Once // Ensures the initialization happens only once
 200
 201)
 202
 203func loadConfig(cwd string, debug bool) (*Config, error) {
 204	// First read the global config file
 205	cfgPath := ConfigPath()
 206
 207	cfg := defaultConfigBasedOnEnv()
 208	cfg.Options.Debug = debug
 209	defaultLevel := slog.LevelInfo
 210	if cfg.Options.Debug {
 211		defaultLevel = slog.LevelDebug
 212	}
 213	if os.Getenv("CRUSH_DEV_DEBUG") == "true" {
 214		loggingFile := fmt.Sprintf("%s/%s", cfg.Options.DataDirectory, "debug.log")
 215
 216		// if file does not exist create it
 217		if _, err := os.Stat(loggingFile); os.IsNotExist(err) {
 218			if err := os.MkdirAll(cfg.Options.DataDirectory, 0o755); err != nil {
 219				return cfg, fmt.Errorf("failed to create directory: %w", err)
 220			}
 221			if _, err := os.Create(loggingFile); err != nil {
 222				return cfg, fmt.Errorf("failed to create log file: %w", err)
 223			}
 224		}
 225
 226		sloggingFileWriter, err := os.OpenFile(loggingFile, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0o666)
 227		if err != nil {
 228			return cfg, fmt.Errorf("failed to open log file: %w", err)
 229		}
 230		// Configure logger
 231		logger := slog.New(slog.NewTextHandler(sloggingFileWriter, &slog.HandlerOptions{
 232			Level: defaultLevel,
 233		}))
 234		slog.SetDefault(logger)
 235	} else {
 236		// Configure logger
 237		logger := slog.New(slog.NewTextHandler(logging.NewWriter(), &slog.HandlerOptions{
 238			Level: defaultLevel,
 239		}))
 240		slog.SetDefault(logger)
 241	}
 242	var globalCfg *Config
 243	if _, err := os.Stat(cfgPath); err != nil && !os.IsNotExist(err) {
 244		// some other error occurred while checking the file
 245		return nil, err
 246	} else if err == nil {
 247		// config file exists, read it
 248		file, err := os.ReadFile(cfgPath)
 249		if err != nil {
 250			return nil, err
 251		}
 252		globalCfg = &Config{}
 253		if err := json.Unmarshal(file, globalCfg); err != nil {
 254			return nil, err
 255		}
 256	} else {
 257		// config file does not exist, create a new one
 258		globalCfg = &Config{}
 259	}
 260
 261	var localConfig *Config
 262	// Global config loaded, now read the local config file
 263	localConfigPath := filepath.Join(cwd, "crush.json")
 264	if _, err := os.Stat(localConfigPath); err != nil && !os.IsNotExist(err) {
 265		// some other error occurred while checking the file
 266		return nil, err
 267	} else if err == nil {
 268		// local config file exists, read it
 269		file, err := os.ReadFile(localConfigPath)
 270		if err != nil {
 271			return nil, err
 272		}
 273		localConfig = &Config{}
 274		if err := json.Unmarshal(file, localConfig); err != nil {
 275			return nil, err
 276		}
 277	}
 278
 279	// merge options
 280	mergeOptions(cfg, globalCfg, localConfig)
 281
 282	mergeProviderConfigs(cfg, globalCfg, localConfig)
 283	// no providers found the app is not initialized yet
 284	if len(cfg.Providers) == 0 {
 285		return cfg, nil
 286	}
 287	preferredProvider := getPreferredProvider(cfg.Providers)
 288	if preferredProvider != nil {
 289		cfg.Models = PreferredModels{
 290			Large: PreferredModel{
 291				ModelID:  preferredProvider.DefaultLargeModel,
 292				Provider: preferredProvider.ID,
 293			},
 294			Small: PreferredModel{
 295				ModelID:  preferredProvider.DefaultSmallModel,
 296				Provider: preferredProvider.ID,
 297			},
 298		}
 299	} else {
 300		// No valid providers found, set empty models
 301		cfg.Models = PreferredModels{}
 302	}
 303
 304	mergeModels(cfg, globalCfg, localConfig)
 305
 306	agents := map[AgentID]Agent{
 307		AgentCoder: {
 308			ID:           AgentCoder,
 309			Name:         "Coder",
 310			Description:  "An agent that helps with executing coding tasks.",
 311			Model:        LargeModel,
 312			ContextPaths: cfg.Options.ContextPaths,
 313			// All tools allowed
 314		},
 315		AgentTask: {
 316			ID:           AgentTask,
 317			Name:         "Task",
 318			Description:  "An agent that helps with searching for context and finding implementation details.",
 319			Model:        LargeModel,
 320			ContextPaths: cfg.Options.ContextPaths,
 321			AllowedTools: []string{
 322				"glob",
 323				"grep",
 324				"ls",
 325				"sourcegraph",
 326				"view",
 327			},
 328			// NO MCPs or LSPs by default
 329			AllowedMCP: map[string][]string{},
 330			AllowedLSP: []string{},
 331		},
 332	}
 333	cfg.Agents = agents
 334	mergeAgents(cfg, globalCfg, localConfig)
 335	mergeMCPs(cfg, globalCfg, localConfig)
 336	mergeLSPs(cfg, globalCfg, localConfig)
 337
 338	// Validate the final configuration
 339	if err := cfg.Validate(); err != nil {
 340		return cfg, fmt.Errorf("configuration validation failed: %w", err)
 341	}
 342
 343	return cfg, nil
 344}
 345
 346func Init(workingDir string, debug bool) (*Config, error) {
 347	var err error
 348	once.Do(func() {
 349		cwd = workingDir
 350		instance, err = loadConfig(cwd, debug)
 351		if err != nil {
 352			logging.Error("Failed to load config", "error", err)
 353		}
 354	})
 355
 356	return instance, err
 357}
 358
 359func Get() *Config {
 360	if instance == nil {
 361		// TODO: Handle this better
 362		panic("Config not initialized. Call InitConfig first.")
 363	}
 364	return instance
 365}
 366
 367func getPreferredProvider(configuredProviders map[provider.InferenceProvider]ProviderConfig) *ProviderConfig {
 368	providers := Providers()
 369	for _, p := range providers {
 370		if providerConfig, ok := configuredProviders[p.ID]; ok && !providerConfig.Disabled {
 371			return &providerConfig
 372		}
 373	}
 374	// if none found return the first configured provider
 375	for _, providerConfig := range configuredProviders {
 376		if !providerConfig.Disabled {
 377			return &providerConfig
 378		}
 379	}
 380	return nil
 381}
 382
 383func mergeProviderConfig(p provider.InferenceProvider, base, other ProviderConfig) ProviderConfig {
 384	if other.APIKey != "" {
 385		base.APIKey = other.APIKey
 386	}
 387	// Only change these options if the provider is not a known provider
 388	if !slices.Contains(provider.KnownProviders(), p) {
 389		if other.BaseURL != "" {
 390			base.BaseURL = other.BaseURL
 391		}
 392		if other.ProviderType != "" {
 393			base.ProviderType = other.ProviderType
 394		}
 395		if len(other.ExtraHeaders) > 0 {
 396			if base.ExtraHeaders == nil {
 397				base.ExtraHeaders = make(map[string]string)
 398			}
 399			maps.Copy(base.ExtraHeaders, other.ExtraHeaders)
 400		}
 401		if len(other.ExtraParams) > 0 {
 402			if base.ExtraParams == nil {
 403				base.ExtraParams = make(map[string]string)
 404			}
 405			maps.Copy(base.ExtraParams, other.ExtraParams)
 406		}
 407	}
 408
 409	if other.Disabled {
 410		base.Disabled = other.Disabled
 411	}
 412
 413	if other.DefaultLargeModel != "" {
 414		base.DefaultLargeModel = other.DefaultLargeModel
 415	}
 416	// Add new models if they don't exist
 417	if other.Models != nil {
 418		for _, model := range other.Models {
 419			// check if the model already exists
 420			exists := false
 421			for _, existingModel := range base.Models {
 422				if existingModel.ID == model.ID {
 423					exists = true
 424					break
 425				}
 426			}
 427			if !exists {
 428				base.Models = append(base.Models, model)
 429			}
 430		}
 431	}
 432
 433	return base
 434}
 435
 436func validateProvider(p provider.InferenceProvider, providerConfig ProviderConfig) error {
 437	if !slices.Contains(provider.KnownProviders(), p) {
 438		if providerConfig.ProviderType != provider.TypeOpenAI {
 439			return errors.New("invalid provider type: " + string(providerConfig.ProviderType))
 440		}
 441		if providerConfig.BaseURL == "" {
 442			return errors.New("base URL must be set for custom providers")
 443		}
 444		if providerConfig.APIKey == "" {
 445			return errors.New("API key must be set for custom providers")
 446		}
 447	}
 448	return nil
 449}
 450
 451func mergeModels(base, global, local *Config) {
 452	for _, cfg := range []*Config{global, local} {
 453		if cfg == nil {
 454			continue
 455		}
 456		if cfg.Models.Large.ModelID != "" && cfg.Models.Large.Provider != "" {
 457			base.Models.Large = cfg.Models.Large
 458		}
 459
 460		if cfg.Models.Small.ModelID != "" && cfg.Models.Small.Provider != "" {
 461			base.Models.Small = cfg.Models.Small
 462		}
 463	}
 464}
 465
 466func mergeOptions(base, global, local *Config) {
 467	for _, cfg := range []*Config{global, local} {
 468		if cfg == nil {
 469			continue
 470		}
 471		baseOptions := base.Options
 472		other := cfg.Options
 473		if len(other.ContextPaths) > 0 {
 474			baseOptions.ContextPaths = append(baseOptions.ContextPaths, other.ContextPaths...)
 475		}
 476
 477		if other.TUI.CompactMode {
 478			baseOptions.TUI.CompactMode = other.TUI.CompactMode
 479		}
 480
 481		if other.Debug {
 482			baseOptions.Debug = other.Debug
 483		}
 484
 485		if other.DebugLSP {
 486			baseOptions.DebugLSP = other.DebugLSP
 487		}
 488
 489		if other.DisableAutoSummarize {
 490			baseOptions.DisableAutoSummarize = other.DisableAutoSummarize
 491		}
 492
 493		if other.DataDirectory != "" {
 494			baseOptions.DataDirectory = other.DataDirectory
 495		}
 496		base.Options = baseOptions
 497	}
 498}
 499
 500func mergeAgents(base, global, local *Config) {
 501	for _, cfg := range []*Config{global, local} {
 502		if cfg == nil {
 503			continue
 504		}
 505		for agentID, newAgent := range cfg.Agents {
 506			if _, ok := base.Agents[agentID]; !ok {
 507				newAgent.ID = agentID
 508				if newAgent.Model == "" {
 509					newAgent.Model = LargeModel
 510				}
 511				if len(newAgent.ContextPaths) > 0 {
 512					newAgent.ContextPaths = append(base.Options.ContextPaths, newAgent.ContextPaths...)
 513				} else {
 514					newAgent.ContextPaths = base.Options.ContextPaths
 515				}
 516				base.Agents[agentID] = newAgent
 517			} else {
 518				baseAgent := base.Agents[agentID]
 519
 520				if agentID == AgentCoder || agentID == AgentTask {
 521					if newAgent.Model != "" {
 522						baseAgent.Model = newAgent.Model
 523					}
 524					if newAgent.AllowedMCP != nil {
 525						baseAgent.AllowedMCP = newAgent.AllowedMCP
 526					}
 527					if newAgent.AllowedLSP != nil {
 528						baseAgent.AllowedLSP = newAgent.AllowedLSP
 529					}
 530					// Context paths are additive for known agents too
 531					if len(newAgent.ContextPaths) > 0 {
 532						baseAgent.ContextPaths = append(baseAgent.ContextPaths, newAgent.ContextPaths...)
 533					}
 534				} else {
 535					if newAgent.Name != "" {
 536						baseAgent.Name = newAgent.Name
 537					}
 538					if newAgent.Description != "" {
 539						baseAgent.Description = newAgent.Description
 540					}
 541					if newAgent.Model != "" {
 542						baseAgent.Model = newAgent.Model
 543					} else if baseAgent.Model == "" {
 544						baseAgent.Model = LargeModel
 545					}
 546
 547					baseAgent.Disabled = newAgent.Disabled
 548
 549					if newAgent.AllowedTools != nil {
 550						baseAgent.AllowedTools = newAgent.AllowedTools
 551					}
 552					if newAgent.AllowedMCP != nil {
 553						baseAgent.AllowedMCP = newAgent.AllowedMCP
 554					}
 555					if newAgent.AllowedLSP != nil {
 556						baseAgent.AllowedLSP = newAgent.AllowedLSP
 557					}
 558					if len(newAgent.ContextPaths) > 0 {
 559						baseAgent.ContextPaths = append(baseAgent.ContextPaths, newAgent.ContextPaths...)
 560					}
 561				}
 562
 563				base.Agents[agentID] = baseAgent
 564			}
 565		}
 566	}
 567}
 568
 569func mergeMCPs(base, global, local *Config) {
 570	for _, cfg := range []*Config{global, local} {
 571		if cfg == nil {
 572			continue
 573		}
 574		maps.Copy(base.MCP, cfg.MCP)
 575	}
 576}
 577
 578func mergeLSPs(base, global, local *Config) {
 579	for _, cfg := range []*Config{global, local} {
 580		if cfg == nil {
 581			continue
 582		}
 583		maps.Copy(base.LSP, cfg.LSP)
 584	}
 585}
 586
 587func mergeProviderConfigs(base, global, local *Config) {
 588	for _, cfg := range []*Config{global, local} {
 589		if cfg == nil {
 590			continue
 591		}
 592		for providerName, p := range cfg.Providers {
 593			p.ID = providerName
 594			if _, ok := base.Providers[providerName]; !ok {
 595				base.Providers[providerName] = p
 596			} else {
 597				base.Providers[providerName] = mergeProviderConfig(providerName, base.Providers[providerName], p)
 598			}
 599		}
 600	}
 601
 602	finalProviders := make(map[provider.InferenceProvider]ProviderConfig)
 603	for providerName, providerConfig := range base.Providers {
 604		err := validateProvider(providerName, providerConfig)
 605		if err != nil {
 606			logging.Warn("Skipping provider", "name", providerName, "error", err)
 607			continue // Skip invalid providers
 608		}
 609		finalProviders[providerName] = providerConfig
 610	}
 611	base.Providers = finalProviders
 612}
 613
 614func providerDefaultConfig(providerID provider.InferenceProvider) ProviderConfig {
 615	switch providerID {
 616	case provider.InferenceProviderAnthropic:
 617		return ProviderConfig{
 618			ID:           providerID,
 619			ProviderType: provider.TypeAnthropic,
 620		}
 621	case provider.InferenceProviderOpenAI:
 622		return ProviderConfig{
 623			ID:           providerID,
 624			ProviderType: provider.TypeOpenAI,
 625		}
 626	case provider.InferenceProviderGemini:
 627		return ProviderConfig{
 628			ID:           providerID,
 629			ProviderType: provider.TypeGemini,
 630		}
 631	case provider.InferenceProviderBedrock:
 632		return ProviderConfig{
 633			ID:           providerID,
 634			ProviderType: provider.TypeBedrock,
 635		}
 636	case provider.InferenceProviderAzure:
 637		return ProviderConfig{
 638			ID:           providerID,
 639			ProviderType: provider.TypeAzure,
 640		}
 641	case provider.InferenceProviderOpenRouter:
 642		return ProviderConfig{
 643			ID:           providerID,
 644			ProviderType: provider.TypeOpenAI,
 645			BaseURL:      "https://openrouter.ai/api/v1",
 646			ExtraHeaders: map[string]string{
 647				"HTTP-Referer": "crush.charm.land",
 648				"X-Title":      "Crush",
 649			},
 650		}
 651	case provider.InferenceProviderXAI:
 652		return ProviderConfig{
 653			ID:           providerID,
 654			ProviderType: provider.TypeXAI,
 655			BaseURL:      "https://api.x.ai/v1",
 656		}
 657	case provider.InferenceProviderVertexAI:
 658		return ProviderConfig{
 659			ID:           providerID,
 660			ProviderType: provider.TypeVertexAI,
 661		}
 662	default:
 663		return ProviderConfig{
 664			ID:           providerID,
 665			ProviderType: provider.TypeOpenAI,
 666		}
 667	}
 668}
 669
 670func defaultConfigBasedOnEnv() *Config {
 671	cfg := &Config{
 672		Options: Options{
 673			DataDirectory: defaultDataDirectory,
 674			ContextPaths:  defaultContextPaths,
 675		},
 676		Providers: make(map[provider.InferenceProvider]ProviderConfig),
 677		Agents:    make(map[AgentID]Agent),
 678		LSP:       make(map[string]LSPConfig),
 679		MCP:       make(map[string]MCP),
 680	}
 681
 682	providers := Providers()
 683
 684	for _, p := range providers {
 685		if strings.HasPrefix(p.APIKey, "$") {
 686			envVar := strings.TrimPrefix(p.APIKey, "$")
 687			if apiKey := os.Getenv(envVar); apiKey != "" {
 688				providerConfig := providerDefaultConfig(p.ID)
 689				providerConfig.APIKey = apiKey
 690				providerConfig.DefaultLargeModel = p.DefaultLargeModelID
 691				providerConfig.DefaultSmallModel = p.DefaultSmallModelID
 692				baseURL := p.APIEndpoint
 693				if strings.HasPrefix(baseURL, "$") {
 694					envVar := strings.TrimPrefix(baseURL, "$")
 695					baseURL = os.Getenv(envVar)
 696				}
 697				providerConfig.BaseURL = baseURL
 698				for _, model := range p.Models {
 699					configModel := Model{
 700						ID:                 model.ID,
 701						Name:               model.Name,
 702						CostPer1MIn:        model.CostPer1MIn,
 703						CostPer1MOut:       model.CostPer1MOut,
 704						CostPer1MInCached:  model.CostPer1MInCached,
 705						CostPer1MOutCached: model.CostPer1MOutCached,
 706						ContextWindow:      model.ContextWindow,
 707						DefaultMaxTokens:   model.DefaultMaxTokens,
 708						CanReason:          model.CanReason,
 709						SupportsImages:     model.SupportsImages,
 710					}
 711					// Set reasoning effort for reasoning models
 712					if model.HasReasoningEffort && model.DefaultReasoningEffort != "" {
 713						configModel.HasReasoningEffort = model.HasReasoningEffort
 714						configModel.ReasoningEffort = model.DefaultReasoningEffort
 715					}
 716					providerConfig.Models = append(providerConfig.Models, configModel)
 717				}
 718				cfg.Providers[p.ID] = providerConfig
 719			}
 720		}
 721	}
 722	// TODO: support local models
 723
 724	if useVertexAI := os.Getenv("GOOGLE_GENAI_USE_VERTEXAI"); useVertexAI == "true" {
 725		providerConfig := providerDefaultConfig(provider.InferenceProviderVertexAI)
 726		providerConfig.ExtraParams = map[string]string{
 727			"project":  os.Getenv("GOOGLE_CLOUD_PROJECT"),
 728			"location": os.Getenv("GOOGLE_CLOUD_LOCATION"),
 729		}
 730		// Find the VertexAI provider definition to get default models
 731		for _, p := range providers {
 732			if p.ID == provider.InferenceProviderVertexAI {
 733				providerConfig.DefaultLargeModel = p.DefaultLargeModelID
 734				providerConfig.DefaultSmallModel = p.DefaultSmallModelID
 735				for _, model := range p.Models {
 736					configModel := Model{
 737						ID:                 model.ID,
 738						Name:               model.Name,
 739						CostPer1MIn:        model.CostPer1MIn,
 740						CostPer1MOut:       model.CostPer1MOut,
 741						CostPer1MInCached:  model.CostPer1MInCached,
 742						CostPer1MOutCached: model.CostPer1MOutCached,
 743						ContextWindow:      model.ContextWindow,
 744						DefaultMaxTokens:   model.DefaultMaxTokens,
 745						CanReason:          model.CanReason,
 746						SupportsImages:     model.SupportsImages,
 747					}
 748					// Set reasoning effort for reasoning models
 749					if model.HasReasoningEffort && model.DefaultReasoningEffort != "" {
 750						configModel.HasReasoningEffort = model.HasReasoningEffort
 751						configModel.ReasoningEffort = model.DefaultReasoningEffort
 752					}
 753					providerConfig.Models = append(providerConfig.Models, configModel)
 754				}
 755				break
 756			}
 757		}
 758		cfg.Providers[provider.InferenceProviderVertexAI] = providerConfig
 759	}
 760
 761	if hasAWSCredentials() {
 762		providerConfig := providerDefaultConfig(provider.InferenceProviderBedrock)
 763		providerConfig.ExtraParams = map[string]string{
 764			"region": os.Getenv("AWS_DEFAULT_REGION"),
 765		}
 766		if providerConfig.ExtraParams["region"] == "" {
 767			providerConfig.ExtraParams["region"] = os.Getenv("AWS_REGION")
 768		}
 769		// Find the Bedrock provider definition to get default models
 770		for _, p := range providers {
 771			if p.ID == provider.InferenceProviderBedrock {
 772				providerConfig.DefaultLargeModel = p.DefaultLargeModelID
 773				providerConfig.DefaultSmallModel = p.DefaultSmallModelID
 774				for _, model := range p.Models {
 775					configModel := Model{
 776						ID:                 model.ID,
 777						Name:               model.Name,
 778						CostPer1MIn:        model.CostPer1MIn,
 779						CostPer1MOut:       model.CostPer1MOut,
 780						CostPer1MInCached:  model.CostPer1MInCached,
 781						CostPer1MOutCached: model.CostPer1MOutCached,
 782						ContextWindow:      model.ContextWindow,
 783						DefaultMaxTokens:   model.DefaultMaxTokens,
 784						CanReason:          model.CanReason,
 785						SupportsImages:     model.SupportsImages,
 786					}
 787					// Set reasoning effort for reasoning models
 788					if model.HasReasoningEffort && model.DefaultReasoningEffort != "" {
 789						configModel.HasReasoningEffort = model.HasReasoningEffort
 790						configModel.ReasoningEffort = model.DefaultReasoningEffort
 791					}
 792					providerConfig.Models = append(providerConfig.Models, configModel)
 793				}
 794				break
 795			}
 796		}
 797		cfg.Providers[provider.InferenceProviderBedrock] = providerConfig
 798	}
 799	return cfg
 800}
 801
 802func hasAWSCredentials() bool {
 803	if os.Getenv("AWS_ACCESS_KEY_ID") != "" && os.Getenv("AWS_SECRET_ACCESS_KEY") != "" {
 804		return true
 805	}
 806
 807	if os.Getenv("AWS_PROFILE") != "" || os.Getenv("AWS_DEFAULT_PROFILE") != "" {
 808		return true
 809	}
 810
 811	if os.Getenv("AWS_REGION") != "" || os.Getenv("AWS_DEFAULT_REGION") != "" {
 812		return true
 813	}
 814
 815	if os.Getenv("AWS_CONTAINER_CREDENTIALS_RELATIVE_URI") != "" ||
 816		os.Getenv("AWS_CONTAINER_CREDENTIALS_FULL_URI") != "" {
 817		return true
 818	}
 819
 820	return false
 821}
 822
 823func WorkingDirectory() string {
 824	return cwd
 825}
 826
 827// TODO: Handle error state
 828
 829func GetAgentModel(agentID AgentID) Model {
 830	cfg := Get()
 831	agent, ok := cfg.Agents[agentID]
 832	if !ok {
 833		logging.Error("Agent not found", "agent_id", agentID)
 834		return Model{}
 835	}
 836
 837	var model PreferredModel
 838	switch agent.Model {
 839	case LargeModel:
 840		model = cfg.Models.Large
 841	case SmallModel:
 842		model = cfg.Models.Small
 843	default:
 844		logging.Warn("Unknown model type for agent", "agent_id", agentID, "model_type", agent.Model)
 845		model = cfg.Models.Large // Fallback to large model
 846	}
 847	providerConfig, ok := cfg.Providers[model.Provider]
 848	if !ok {
 849		logging.Error("Provider not found for agent", "agent_id", agentID, "provider", model.Provider)
 850		return Model{}
 851	}
 852
 853	for _, m := range providerConfig.Models {
 854		if m.ID == model.ModelID {
 855			return m
 856		}
 857	}
 858
 859	logging.Error("Model not found for agent", "agent_id", agentID, "model", agent.Model)
 860	return Model{}
 861}
 862
 863// GetAgentEffectiveMaxTokens returns the effective max tokens for an agent,
 864// considering any overrides from the preferred model configuration
 865func GetAgentEffectiveMaxTokens(agentID AgentID) int64 {
 866	cfg := Get()
 867	agent, ok := cfg.Agents[agentID]
 868	if !ok {
 869		logging.Error("Agent not found", "agent_id", agentID)
 870		return 0
 871	}
 872
 873	var preferredModel PreferredModel
 874	switch agent.Model {
 875	case LargeModel:
 876		preferredModel = cfg.Models.Large
 877	case SmallModel:
 878		preferredModel = cfg.Models.Small
 879	default:
 880		logging.Warn("Unknown model type for agent", "agent_id", agentID, "model_type", agent.Model)
 881		preferredModel = cfg.Models.Large // Fallback to large model
 882	}
 883
 884	// Get the base model configuration
 885	baseModel := GetAgentModel(agentID)
 886	if baseModel.ID == "" {
 887		return 0
 888	}
 889
 890	// Start with the default max tokens from the base model
 891	maxTokens := baseModel.DefaultMaxTokens
 892
 893	// Override with preferred model max tokens if set
 894	if preferredModel.MaxTokens > 0 {
 895		maxTokens = preferredModel.MaxTokens
 896	}
 897
 898	return maxTokens
 899}
 900
 901func GetAgentProvider(agentID AgentID) ProviderConfig {
 902	cfg := Get()
 903	agent, ok := cfg.Agents[agentID]
 904	if !ok {
 905		logging.Error("Agent not found", "agent_id", agentID)
 906		return ProviderConfig{}
 907	}
 908
 909	var model PreferredModel
 910	switch agent.Model {
 911	case LargeModel:
 912		model = cfg.Models.Large
 913	case SmallModel:
 914		model = cfg.Models.Small
 915	default:
 916		logging.Warn("Unknown model type for agent", "agent_id", agentID, "model_type", agent.Model)
 917		model = cfg.Models.Large // Fallback to large model
 918	}
 919
 920	providerConfig, ok := cfg.Providers[model.Provider]
 921	if !ok {
 922		logging.Error("Provider not found for agent", "agent_id", agentID, "provider", model.Provider)
 923		return ProviderConfig{}
 924	}
 925
 926	return providerConfig
 927}
 928
 929func GetProviderModel(provider provider.InferenceProvider, modelID string) Model {
 930	cfg := Get()
 931	providerConfig, ok := cfg.Providers[provider]
 932	if !ok {
 933		logging.Error("Provider not found", "provider", provider)
 934		return Model{}
 935	}
 936
 937	for _, model := range providerConfig.Models {
 938		if model.ID == modelID {
 939			return model
 940		}
 941	}
 942
 943	logging.Error("Model not found for provider", "provider", provider, "model_id", modelID)
 944	return Model{}
 945}
 946
 947func GetModel(modelType ModelType) Model {
 948	cfg := Get()
 949	var model PreferredModel
 950	switch modelType {
 951	case LargeModel:
 952		model = cfg.Models.Large
 953	case SmallModel:
 954		model = cfg.Models.Small
 955	default:
 956		model = cfg.Models.Large // Fallback to large model
 957	}
 958	providerConfig, ok := cfg.Providers[model.Provider]
 959	if !ok {
 960		return Model{}
 961	}
 962
 963	for _, m := range providerConfig.Models {
 964		if m.ID == model.ModelID {
 965			return m
 966		}
 967	}
 968	return Model{}
 969}
 970
 971func UpdatePreferredModel(modelType ModelType, model PreferredModel) error {
 972	cfg := Get()
 973	switch modelType {
 974	case LargeModel:
 975		cfg.Models.Large = model
 976	case SmallModel:
 977		cfg.Models.Small = model
 978	default:
 979		return fmt.Errorf("unknown model type: %s", modelType)
 980	}
 981	return nil
 982}
 983
 984// ValidationError represents a configuration validation error
 985type ValidationError struct {
 986	Field   string
 987	Message string
 988}
 989
 990func (e ValidationError) Error() string {
 991	return fmt.Sprintf("validation error in %s: %s", e.Field, e.Message)
 992}
 993
 994// ValidationErrors represents multiple validation errors
 995type ValidationErrors []ValidationError
 996
 997func (e ValidationErrors) Error() string {
 998	if len(e) == 0 {
 999		return "no validation errors"
1000	}
1001	if len(e) == 1 {
1002		return e[0].Error()
1003	}
1004
1005	var messages []string
1006	for _, err := range e {
1007		messages = append(messages, err.Error())
1008	}
1009	return fmt.Sprintf("multiple validation errors: %s", strings.Join(messages, "; "))
1010}
1011
1012// HasErrors returns true if there are any validation errors
1013func (e ValidationErrors) HasErrors() bool {
1014	return len(e) > 0
1015}
1016
1017// Add appends a new validation error
1018func (e *ValidationErrors) Add(field, message string) {
1019	*e = append(*e, ValidationError{Field: field, Message: message})
1020}
1021
1022// Validate performs comprehensive validation of the configuration
1023func (c *Config) Validate() error {
1024	var errors ValidationErrors
1025
1026	// Validate providers
1027	c.validateProviders(&errors)
1028
1029	// Validate models
1030	c.validateModels(&errors)
1031
1032	// Validate agents
1033	c.validateAgents(&errors)
1034
1035	// Validate options
1036	c.validateOptions(&errors)
1037
1038	// Validate MCP configurations
1039	c.validateMCPs(&errors)
1040
1041	// Validate LSP configurations
1042	c.validateLSPs(&errors)
1043
1044	// Validate cross-references
1045	c.validateCrossReferences(&errors)
1046
1047	// Validate completeness
1048	c.validateCompleteness(&errors)
1049
1050	if errors.HasErrors() {
1051		return errors
1052	}
1053
1054	return nil
1055}
1056
1057// validateProviders validates all provider configurations
1058func (c *Config) validateProviders(errors *ValidationErrors) {
1059	if c.Providers == nil {
1060		c.Providers = make(map[provider.InferenceProvider]ProviderConfig)
1061	}
1062
1063	knownProviders := provider.KnownProviders()
1064	validTypes := []provider.Type{
1065		provider.TypeOpenAI,
1066		provider.TypeAnthropic,
1067		provider.TypeGemini,
1068		provider.TypeAzure,
1069		provider.TypeBedrock,
1070		provider.TypeVertexAI,
1071		provider.TypeXAI,
1072	}
1073
1074	for providerID, providerConfig := range c.Providers {
1075		fieldPrefix := fmt.Sprintf("providers.%s", providerID)
1076
1077		// Validate API key for non-disabled providers
1078		if !providerConfig.Disabled && providerConfig.APIKey == "" {
1079			// Special case for AWS Bedrock and VertexAI which may use other auth methods
1080			if providerID != provider.InferenceProviderBedrock && providerID != provider.InferenceProviderVertexAI {
1081				errors.Add(fieldPrefix+".api_key", "API key is required for non-disabled providers")
1082			}
1083		}
1084
1085		// Validate provider type
1086		validType := slices.Contains(validTypes, providerConfig.ProviderType)
1087		if !validType {
1088			errors.Add(fieldPrefix+".provider_type", fmt.Sprintf("invalid provider type: %s", providerConfig.ProviderType))
1089		}
1090
1091		// Validate custom providers
1092		isKnownProvider := slices.Contains(knownProviders, providerID)
1093
1094		if !isKnownProvider {
1095			// Custom provider validation
1096			if providerConfig.BaseURL == "" {
1097				errors.Add(fieldPrefix+".base_url", "BaseURL is required for custom providers")
1098			}
1099			if providerConfig.ProviderType != provider.TypeOpenAI {
1100				errors.Add(fieldPrefix+".provider_type", "custom providers currently only support OpenAI type")
1101			}
1102		}
1103
1104		// Validate models
1105		modelIDs := make(map[string]bool)
1106		for i, model := range providerConfig.Models {
1107			modelFieldPrefix := fmt.Sprintf("%s.models[%d]", fieldPrefix, i)
1108
1109			// Check for duplicate model IDs
1110			if modelIDs[model.ID] {
1111				errors.Add(modelFieldPrefix+".id", fmt.Sprintf("duplicate model ID: %s", model.ID))
1112			}
1113			modelIDs[model.ID] = true
1114
1115			// Validate required model fields
1116			if model.ID == "" {
1117				errors.Add(modelFieldPrefix+".id", "model ID is required")
1118			}
1119			if model.Name == "" {
1120				errors.Add(modelFieldPrefix+".name", "model name is required")
1121			}
1122			if model.ContextWindow <= 0 {
1123				errors.Add(modelFieldPrefix+".context_window", "context window must be positive")
1124			}
1125			if model.DefaultMaxTokens <= 0 {
1126				errors.Add(modelFieldPrefix+".default_max_tokens", "default max tokens must be positive")
1127			}
1128			if model.DefaultMaxTokens > model.ContextWindow {
1129				errors.Add(modelFieldPrefix+".default_max_tokens", "default max tokens cannot exceed context window")
1130			}
1131
1132			// Validate cost fields
1133			if model.CostPer1MIn < 0 {
1134				errors.Add(modelFieldPrefix+".cost_per_1m_in", "cost per 1M input tokens cannot be negative")
1135			}
1136			if model.CostPer1MOut < 0 {
1137				errors.Add(modelFieldPrefix+".cost_per_1m_out", "cost per 1M output tokens cannot be negative")
1138			}
1139			if model.CostPer1MInCached < 0 {
1140				errors.Add(modelFieldPrefix+".cost_per_1m_in_cached", "cached cost per 1M input tokens cannot be negative")
1141			}
1142			if model.CostPer1MOutCached < 0 {
1143				errors.Add(modelFieldPrefix+".cost_per_1m_out_cached", "cached cost per 1M output tokens cannot be negative")
1144			}
1145		}
1146
1147		// Validate default model references
1148		if providerConfig.DefaultLargeModel != "" {
1149			if !modelIDs[providerConfig.DefaultLargeModel] {
1150				errors.Add(fieldPrefix+".default_large_model", fmt.Sprintf("default large model '%s' not found in provider models", providerConfig.DefaultLargeModel))
1151			}
1152		}
1153		if providerConfig.DefaultSmallModel != "" {
1154			if !modelIDs[providerConfig.DefaultSmallModel] {
1155				errors.Add(fieldPrefix+".default_small_model", fmt.Sprintf("default small model '%s' not found in provider models", providerConfig.DefaultSmallModel))
1156			}
1157		}
1158
1159		// Validate provider-specific requirements
1160		c.validateProviderSpecific(providerID, providerConfig, errors)
1161	}
1162}
1163
1164// validateProviderSpecific validates provider-specific requirements
1165func (c *Config) validateProviderSpecific(providerID provider.InferenceProvider, providerConfig ProviderConfig, errors *ValidationErrors) {
1166	fieldPrefix := fmt.Sprintf("providers.%s", providerID)
1167
1168	switch providerID {
1169	case provider.InferenceProviderVertexAI:
1170		if !providerConfig.Disabled {
1171			if providerConfig.ExtraParams == nil {
1172				errors.Add(fieldPrefix+".extra_params", "VertexAI requires extra_params configuration")
1173			} else {
1174				if providerConfig.ExtraParams["project"] == "" {
1175					errors.Add(fieldPrefix+".extra_params.project", "VertexAI requires project parameter")
1176				}
1177				if providerConfig.ExtraParams["location"] == "" {
1178					errors.Add(fieldPrefix+".extra_params.location", "VertexAI requires location parameter")
1179				}
1180			}
1181		}
1182	case provider.InferenceProviderBedrock:
1183		if !providerConfig.Disabled {
1184			if providerConfig.ExtraParams == nil || providerConfig.ExtraParams["region"] == "" {
1185				errors.Add(fieldPrefix+".extra_params.region", "Bedrock requires region parameter")
1186			}
1187			// Check for AWS credentials in environment
1188			if !hasAWSCredentials() {
1189				errors.Add(fieldPrefix, "Bedrock requires AWS credentials in environment")
1190			}
1191		}
1192	}
1193}
1194
1195// validateModels validates preferred model configurations
1196func (c *Config) validateModels(errors *ValidationErrors) {
1197	// Validate large model
1198	if c.Models.Large.ModelID != "" || c.Models.Large.Provider != "" {
1199		if c.Models.Large.ModelID == "" {
1200			errors.Add("models.large.model_id", "large model ID is required when provider is set")
1201		}
1202		if c.Models.Large.Provider == "" {
1203			errors.Add("models.large.provider", "large model provider is required when model ID is set")
1204		}
1205
1206		// Check if provider exists and is not disabled
1207		if providerConfig, exists := c.Providers[c.Models.Large.Provider]; exists {
1208			if providerConfig.Disabled {
1209				errors.Add("models.large.provider", "large model provider is disabled")
1210			}
1211
1212			// Check if model exists in provider
1213			modelExists := false
1214			for _, model := range providerConfig.Models {
1215				if model.ID == c.Models.Large.ModelID {
1216					modelExists = true
1217					break
1218				}
1219			}
1220			if !modelExists {
1221				errors.Add("models.large.model_id", fmt.Sprintf("large model '%s' not found in provider '%s'", c.Models.Large.ModelID, c.Models.Large.Provider))
1222			}
1223		} else {
1224			errors.Add("models.large.provider", fmt.Sprintf("large model provider '%s' not found", c.Models.Large.Provider))
1225		}
1226	}
1227
1228	// Validate small model
1229	if c.Models.Small.ModelID != "" || c.Models.Small.Provider != "" {
1230		if c.Models.Small.ModelID == "" {
1231			errors.Add("models.small.model_id", "small model ID is required when provider is set")
1232		}
1233		if c.Models.Small.Provider == "" {
1234			errors.Add("models.small.provider", "small model provider is required when model ID is set")
1235		}
1236
1237		// Check if provider exists and is not disabled
1238		if providerConfig, exists := c.Providers[c.Models.Small.Provider]; exists {
1239			if providerConfig.Disabled {
1240				errors.Add("models.small.provider", "small model provider is disabled")
1241			}
1242
1243			// Check if model exists in provider
1244			modelExists := false
1245			for _, model := range providerConfig.Models {
1246				if model.ID == c.Models.Small.ModelID {
1247					modelExists = true
1248					break
1249				}
1250			}
1251			if !modelExists {
1252				errors.Add("models.small.model_id", fmt.Sprintf("small model '%s' not found in provider '%s'", c.Models.Small.ModelID, c.Models.Small.Provider))
1253			}
1254		} else {
1255			errors.Add("models.small.provider", fmt.Sprintf("small model provider '%s' not found", c.Models.Small.Provider))
1256		}
1257	}
1258}
1259
1260// validateAgents validates agent configurations
1261func (c *Config) validateAgents(errors *ValidationErrors) {
1262	if c.Agents == nil {
1263		c.Agents = make(map[AgentID]Agent)
1264	}
1265
1266	validTools := []string{
1267		"bash", "edit", "fetch", "glob", "grep", "ls", "sourcegraph", "view", "write", "agent",
1268	}
1269
1270	for agentID, agent := range c.Agents {
1271		fieldPrefix := fmt.Sprintf("agents.%s", agentID)
1272
1273		// Validate agent ID consistency
1274		if agent.ID != agentID {
1275			errors.Add(fieldPrefix+".id", fmt.Sprintf("agent ID mismatch: expected '%s', got '%s'", agentID, agent.ID))
1276		}
1277
1278		// Validate required fields
1279		if agent.ID == "" {
1280			errors.Add(fieldPrefix+".id", "agent ID is required")
1281		}
1282		if agent.Name == "" {
1283			errors.Add(fieldPrefix+".name", "agent name is required")
1284		}
1285
1286		// Validate model type
1287		if agent.Model != LargeModel && agent.Model != SmallModel {
1288			errors.Add(fieldPrefix+".model", fmt.Sprintf("invalid model type: %s (must be 'large' or 'small')", agent.Model))
1289		}
1290
1291		// Validate allowed tools
1292		if agent.AllowedTools != nil {
1293			for i, tool := range agent.AllowedTools {
1294				validTool := slices.Contains(validTools, tool)
1295				if !validTool {
1296					errors.Add(fmt.Sprintf("%s.allowed_tools[%d]", fieldPrefix, i), fmt.Sprintf("unknown tool: %s", tool))
1297				}
1298			}
1299		}
1300
1301		// Validate MCP references
1302		if agent.AllowedMCP != nil {
1303			for mcpName := range agent.AllowedMCP {
1304				if _, exists := c.MCP[mcpName]; !exists {
1305					errors.Add(fieldPrefix+".allowed_mcp", fmt.Sprintf("referenced MCP '%s' not found", mcpName))
1306				}
1307			}
1308		}
1309
1310		// Validate LSP references
1311		if agent.AllowedLSP != nil {
1312			for _, lspName := range agent.AllowedLSP {
1313				if _, exists := c.LSP[lspName]; !exists {
1314					errors.Add(fieldPrefix+".allowed_lsp", fmt.Sprintf("referenced LSP '%s' not found", lspName))
1315				}
1316			}
1317		}
1318
1319		// Validate context paths (basic path validation)
1320		for i, contextPath := range agent.ContextPaths {
1321			if contextPath == "" {
1322				errors.Add(fmt.Sprintf("%s.context_paths[%d]", fieldPrefix, i), "context path cannot be empty")
1323			}
1324			// Check for invalid characters in path
1325			if strings.Contains(contextPath, "\x00") {
1326				errors.Add(fmt.Sprintf("%s.context_paths[%d]", fieldPrefix, i), "context path contains invalid characters")
1327			}
1328		}
1329
1330		// Validate known agents maintain their core properties
1331		if agentID == AgentCoder {
1332			if agent.Name != "Coder" {
1333				errors.Add(fieldPrefix+".name", "coder agent name cannot be changed")
1334			}
1335			if agent.Description != "An agent that helps with executing coding tasks." {
1336				errors.Add(fieldPrefix+".description", "coder agent description cannot be changed")
1337			}
1338		} else if agentID == AgentTask {
1339			if agent.Name != "Task" {
1340				errors.Add(fieldPrefix+".name", "task agent name cannot be changed")
1341			}
1342			if agent.Description != "An agent that helps with searching for context and finding implementation details." {
1343				errors.Add(fieldPrefix+".description", "task agent description cannot be changed")
1344			}
1345			expectedTools := []string{"glob", "grep", "ls", "sourcegraph", "view"}
1346			if agent.AllowedTools != nil && !slices.Equal(agent.AllowedTools, expectedTools) {
1347				errors.Add(fieldPrefix+".allowed_tools", "task agent allowed tools cannot be changed")
1348			}
1349		}
1350	}
1351}
1352
1353// validateOptions validates configuration options
1354func (c *Config) validateOptions(errors *ValidationErrors) {
1355	// Validate data directory
1356	if c.Options.DataDirectory == "" {
1357		errors.Add("options.data_directory", "data directory is required")
1358	}
1359
1360	// Validate context paths
1361	for i, contextPath := range c.Options.ContextPaths {
1362		if contextPath == "" {
1363			errors.Add(fmt.Sprintf("options.context_paths[%d]", i), "context path cannot be empty")
1364		}
1365		if strings.Contains(contextPath, "\x00") {
1366			errors.Add(fmt.Sprintf("options.context_paths[%d]", i), "context path contains invalid characters")
1367		}
1368	}
1369}
1370
1371// validateMCPs validates MCP configurations
1372func (c *Config) validateMCPs(errors *ValidationErrors) {
1373	if c.MCP == nil {
1374		c.MCP = make(map[string]MCP)
1375	}
1376
1377	for mcpName, mcpConfig := range c.MCP {
1378		fieldPrefix := fmt.Sprintf("mcp.%s", mcpName)
1379
1380		// Validate MCP type
1381		if mcpConfig.Type != MCPStdio && mcpConfig.Type != MCPSse {
1382			errors.Add(fieldPrefix+".type", fmt.Sprintf("invalid MCP type: %s (must be 'stdio' or 'sse')", mcpConfig.Type))
1383		}
1384
1385		// Validate based on type
1386		if mcpConfig.Type == MCPStdio {
1387			if mcpConfig.Command == "" {
1388				errors.Add(fieldPrefix+".command", "command is required for stdio MCP")
1389			}
1390		} else if mcpConfig.Type == MCPSse {
1391			if mcpConfig.URL == "" {
1392				errors.Add(fieldPrefix+".url", "URL is required for SSE MCP")
1393			}
1394		}
1395	}
1396}
1397
1398// validateLSPs validates LSP configurations
1399func (c *Config) validateLSPs(errors *ValidationErrors) {
1400	if c.LSP == nil {
1401		c.LSP = make(map[string]LSPConfig)
1402	}
1403
1404	for lspName, lspConfig := range c.LSP {
1405		fieldPrefix := fmt.Sprintf("lsp.%s", lspName)
1406
1407		if lspConfig.Command == "" {
1408			errors.Add(fieldPrefix+".command", "command is required for LSP")
1409		}
1410	}
1411}
1412
1413// validateCrossReferences validates cross-references between different config sections
1414func (c *Config) validateCrossReferences(errors *ValidationErrors) {
1415	// Validate that agents can use their assigned model types
1416	for agentID, agent := range c.Agents {
1417		fieldPrefix := fmt.Sprintf("agents.%s", agentID)
1418
1419		var preferredModel PreferredModel
1420		switch agent.Model {
1421		case LargeModel:
1422			preferredModel = c.Models.Large
1423		case SmallModel:
1424			preferredModel = c.Models.Small
1425		}
1426
1427		if preferredModel.Provider != "" {
1428			if providerConfig, exists := c.Providers[preferredModel.Provider]; exists {
1429				if providerConfig.Disabled {
1430					errors.Add(fieldPrefix+".model", fmt.Sprintf("agent cannot use model type '%s' because provider '%s' is disabled", agent.Model, preferredModel.Provider))
1431				}
1432			}
1433		}
1434	}
1435}
1436
1437// validateCompleteness validates that the configuration is complete and usable
1438func (c *Config) validateCompleteness(errors *ValidationErrors) {
1439	// Check for at least one valid, non-disabled provider
1440	hasValidProvider := false
1441	for _, providerConfig := range c.Providers {
1442		if !providerConfig.Disabled {
1443			hasValidProvider = true
1444			break
1445		}
1446	}
1447	if !hasValidProvider {
1448		errors.Add("providers", "at least one non-disabled provider is required")
1449	}
1450
1451	// Check that default agents exist
1452	if _, exists := c.Agents[AgentCoder]; !exists {
1453		errors.Add("agents", "coder agent is required")
1454	}
1455	if _, exists := c.Agents[AgentTask]; !exists {
1456		errors.Add("agents", "task agent is required")
1457	}
1458
1459	// Check that preferred models are set if providers exist
1460	if hasValidProvider {
1461		if c.Models.Large.ModelID == "" || c.Models.Large.Provider == "" {
1462			errors.Add("models.large", "large preferred model must be configured when providers are available")
1463		}
1464		if c.Models.Small.ModelID == "" || c.Models.Small.Provider == "" {
1465			errors.Add("models.small", "small preferred model must be configured when providers are available")
1466		}
1467	}
1468}
1469
1470// JSONSchemaExtend adds custom schema properties for AgentID
1471func (AgentID) JSONSchemaExtend(schema *jsonschema.Schema) {
1472	schema.Enum = []any{
1473		string(AgentCoder),
1474		string(AgentTask),
1475	}
1476}
1477
1478// JSONSchemaExtend adds custom schema properties for ModelType
1479func (ModelType) JSONSchemaExtend(schema *jsonschema.Schema) {
1480	schema.Enum = []any{
1481		string(LargeModel),
1482		string(SmallModel),
1483	}
1484}
1485
1486// JSONSchemaExtend adds custom schema properties for MCPType
1487func (MCPType) JSONSchemaExtend(schema *jsonschema.Schema) {
1488	schema.Enum = []any{
1489		string(MCPStdio),
1490		string(MCPSse),
1491	}
1492}