models.go

  1package models
  2
  3import (
  4	"context"
  5	"fmt"
  6	"log/slog"
  7	"net/http"
  8	"time"
  9
 10	"shelley.exe.dev/db"
 11	"shelley.exe.dev/db/generated"
 12	"shelley.exe.dev/llm"
 13	"shelley.exe.dev/llm/ant"
 14	"shelley.exe.dev/llm/gem"
 15	"shelley.exe.dev/llm/llmhttp"
 16	"shelley.exe.dev/llm/oai"
 17	"shelley.exe.dev/loop"
 18)
 19
 20// Provider represents an LLM provider
 21type Provider string
 22
 23const (
 24	ProviderOpenAI    Provider = "openai"
 25	ProviderAnthropic Provider = "anthropic"
 26	ProviderFireworks Provider = "fireworks"
 27	ProviderGemini    Provider = "gemini"
 28	ProviderBuiltIn   Provider = "builtin"
 29)
 30
 31// ModelSource describes where a model's configuration comes from
 32type ModelSource string
 33
 34const (
 35	SourceGateway ModelSource = "exe.dev gateway"
 36	SourceEnvVar  ModelSource = "env"    // Will be combined with env var name
 37	SourceCustom  ModelSource = "custom" // User-configured custom model
 38)
 39
 40// Model represents a configured LLM model in Shelley
 41type Model struct {
 42	// ID is the user-facing identifier for this model
 43	ID string
 44
 45	// Provider is the LLM provider (OpenAI, Anthropic, etc.)
 46	Provider Provider
 47
 48	// Description is a human-readable description
 49	Description string
 50
 51	// Tags is a comma-separated list of tags (e.g., "slug")
 52	Tags string
 53
 54	// RequiredEnvVars are the environment variables required for this model
 55	RequiredEnvVars []string
 56
 57	// GatewayEnabled indicates whether this model is available when using a gateway
 58	GatewayEnabled bool
 59
 60	// Factory creates an llm.Service instance for this model
 61	Factory func(config *Config, httpc *http.Client) (llm.Service, error)
 62}
 63
 64// Source returns a human-readable description of where this model's configuration comes from.
 65// For example: "exe.dev gateway", "$ANTHROPIC_API_KEY", etc.
 66func (m Model) Source(cfg *Config) string {
 67	// Predictable model has no source
 68	if m.ID == "predictable" {
 69		return ""
 70	}
 71
 72	// Check if using gateway with implicit keys
 73	if cfg.Gateway != "" {
 74		// Gateway is configured - check if this model is using gateway (implicit key)
 75		switch m.Provider {
 76		case ProviderAnthropic:
 77			if cfg.AnthropicAPIKey == "implicit" {
 78				return string(SourceGateway)
 79			}
 80			return "$ANTHROPIC_API_KEY"
 81		case ProviderOpenAI:
 82			if cfg.OpenAIAPIKey == "implicit" {
 83				return string(SourceGateway)
 84			}
 85			return "$OPENAI_API_KEY"
 86		case ProviderFireworks:
 87			if cfg.FireworksAPIKey == "implicit" {
 88				return string(SourceGateway)
 89			}
 90			return "$FIREWORKS_API_KEY"
 91		case ProviderGemini:
 92			if cfg.GeminiAPIKey == "implicit" {
 93				return string(SourceGateway)
 94			}
 95			return "$GEMINI_API_KEY"
 96		}
 97	}
 98
 99	// No gateway - use env var names based on RequiredEnvVars
100	if len(m.RequiredEnvVars) > 0 {
101		return "$" + m.RequiredEnvVars[0]
102	}
103	return ""
104}
105
106// Config holds the configuration needed to create LLM services
107type Config struct {
108	// API keys for each provider
109	AnthropicAPIKey string
110	OpenAIAPIKey    string
111	GeminiAPIKey    string
112	FireworksAPIKey string
113
114	// Gateway is the base URL of the LLM gateway (optional)
115	// If set, model-specific suffixes will be appended
116	Gateway string
117
118	Logger *slog.Logger
119
120	// Database for recording LLM requests (optional)
121	DB *db.DB
122}
123
124// getAnthropicURL returns the Anthropic API URL, with gateway suffix if gateway is set
125func (c *Config) getAnthropicURL() string {
126	if c.Gateway != "" {
127		return c.Gateway + "/_/gateway/anthropic/v1/messages"
128	}
129	return "" // use default from ant package
130}
131
132// getOpenAIURL returns the OpenAI API URL, with gateway suffix if gateway is set
133func (c *Config) getOpenAIURL() string {
134	if c.Gateway != "" {
135		return c.Gateway + "/_/gateway/openai/v1"
136	}
137	return "" // use default from oai package
138}
139
140// getGeminiURL returns the Gemini API URL, with gateway suffix if gateway is set
141func (c *Config) getGeminiURL() string {
142	if c.Gateway != "" {
143		return c.Gateway + "/_/gateway/gemini/v1/models/generate"
144	}
145	return "" // use default from gem package
146}
147
148// getFireworksURL returns the Fireworks API URL, with gateway suffix if gateway is set
149func (c *Config) getFireworksURL() string {
150	if c.Gateway != "" {
151		return c.Gateway + "/_/gateway/fireworks/inference/v1"
152	}
153	return "" // use default from oai package
154}
155
156// All returns all available models in Shelley
157func All() []Model {
158	return []Model{
159		{
160			ID:              "claude-opus-4.6",
161			Provider:        ProviderAnthropic,
162			Description:     "Claude Opus 4.6 (default)",
163			RequiredEnvVars: []string{"ANTHROPIC_API_KEY"},
164			GatewayEnabled:  true,
165			Factory: func(config *Config, httpc *http.Client) (llm.Service, error) {
166				if config.AnthropicAPIKey == "" {
167					return nil, fmt.Errorf("claude-opus-4.6 requires ANTHROPIC_API_KEY")
168				}
169				svc := &ant.Service{APIKey: config.AnthropicAPIKey, Model: ant.Claude46Opus, HTTPC: httpc, ThinkingLevel: llm.ThinkingLevelMedium}
170				if url := config.getAnthropicURL(); url != "" {
171					svc.URL = url
172				}
173				return svc, nil
174			},
175		},
176		{
177			ID:              "claude-opus-4.5",
178			Provider:        ProviderAnthropic,
179			Description:     "Claude Opus 4.5",
180			RequiredEnvVars: []string{"ANTHROPIC_API_KEY"},
181			GatewayEnabled:  true,
182			Factory: func(config *Config, httpc *http.Client) (llm.Service, error) {
183				if config.AnthropicAPIKey == "" {
184					return nil, fmt.Errorf("claude-opus-4.5 requires ANTHROPIC_API_KEY")
185				}
186				svc := &ant.Service{APIKey: config.AnthropicAPIKey, Model: ant.Claude45Opus, HTTPC: httpc, ThinkingLevel: llm.ThinkingLevelMedium}
187				if url := config.getAnthropicURL(); url != "" {
188					svc.URL = url
189				}
190				return svc, nil
191			},
192		},
193		{
194			ID:              "claude-sonnet-4.5",
195			Provider:        ProviderAnthropic,
196			Description:     "Claude Sonnet 4.5",
197			RequiredEnvVars: []string{"ANTHROPIC_API_KEY"},
198			GatewayEnabled:  true,
199			Factory: func(config *Config, httpc *http.Client) (llm.Service, error) {
200				if config.AnthropicAPIKey == "" {
201					return nil, fmt.Errorf("claude-sonnet-4.5 requires ANTHROPIC_API_KEY")
202				}
203				svc := &ant.Service{APIKey: config.AnthropicAPIKey, Model: ant.Claude45Sonnet, HTTPC: httpc, ThinkingLevel: llm.ThinkingLevelMedium}
204				if url := config.getAnthropicURL(); url != "" {
205					svc.URL = url
206				}
207				return svc, nil
208			},
209		},
210		{
211			ID:              "claude-haiku-4.5",
212			Provider:        ProviderAnthropic,
213			Description:     "Claude Haiku 4.5",
214			RequiredEnvVars: []string{"ANTHROPIC_API_KEY"},
215			GatewayEnabled:  true,
216			Factory: func(config *Config, httpc *http.Client) (llm.Service, error) {
217				if config.AnthropicAPIKey == "" {
218					return nil, fmt.Errorf("claude-haiku-4.5 requires ANTHROPIC_API_KEY")
219				}
220				svc := &ant.Service{APIKey: config.AnthropicAPIKey, Model: ant.Claude45Haiku, HTTPC: httpc, ThinkingLevel: llm.ThinkingLevelMedium}
221				if url := config.getAnthropicURL(); url != "" {
222					svc.URL = url
223				}
224				return svc, nil
225			},
226		},
227		{
228			ID:              "glm-4.7-fireworks",
229			Provider:        ProviderFireworks,
230			Description:     "GLM-4.7 on Fireworks",
231			RequiredEnvVars: []string{"FIREWORKS_API_KEY"},
232			GatewayEnabled:  true,
233			Factory: func(config *Config, httpc *http.Client) (llm.Service, error) {
234				if config.FireworksAPIKey == "" {
235					return nil, fmt.Errorf("glm-4.7-fireworks requires FIREWORKS_API_KEY")
236				}
237				svc := &oai.Service{Model: oai.GLM47Fireworks, APIKey: config.FireworksAPIKey, HTTPC: httpc}
238				if url := config.getFireworksURL(); url != "" {
239					svc.ModelURL = url
240				}
241				return svc, nil
242			},
243		},
244		{
245			ID:              "gpt-5.2-codex",
246			Provider:        ProviderOpenAI,
247			Description:     "GPT-5.2 Codex",
248			RequiredEnvVars: []string{"OPENAI_API_KEY"},
249			GatewayEnabled:  true,
250			Factory: func(config *Config, httpc *http.Client) (llm.Service, error) {
251				if config.OpenAIAPIKey == "" {
252					return nil, fmt.Errorf("gpt-5.2-codex requires OPENAI_API_KEY")
253				}
254				svc := &oai.ResponsesService{Model: oai.GPT52Codex, APIKey: config.OpenAIAPIKey, HTTPC: httpc, ThinkingLevel: llm.ThinkingLevelMedium}
255				if url := config.getOpenAIURL(); url != "" {
256					svc.ModelURL = url
257				}
258				return svc, nil
259			},
260		},
261		{
262			ID:              "qwen3-coder-fireworks",
263			Provider:        ProviderFireworks,
264			Description:     "Qwen3 Coder 480B on Fireworks",
265			Tags:            "slug",
266			RequiredEnvVars: []string{"FIREWORKS_API_KEY"},
267			GatewayEnabled:  true,
268			Factory: func(config *Config, httpc *http.Client) (llm.Service, error) {
269				if config.FireworksAPIKey == "" {
270					return nil, fmt.Errorf("qwen3-coder-fireworks requires FIREWORKS_API_KEY")
271				}
272				svc := &oai.Service{Model: oai.Qwen3CoderFireworks, APIKey: config.FireworksAPIKey, HTTPC: httpc}
273				if url := config.getFireworksURL(); url != "" {
274					svc.ModelURL = url
275				}
276				return svc, nil
277			},
278		},
279		{
280			ID:              "glm-4p6-fireworks",
281			Provider:        ProviderFireworks,
282			Description:     "GLM-4P6 on Fireworks",
283			RequiredEnvVars: []string{"FIREWORKS_API_KEY"},
284			Factory: func(config *Config, httpc *http.Client) (llm.Service, error) {
285				if config.FireworksAPIKey == "" {
286					return nil, fmt.Errorf("glm-4p6-fireworks requires FIREWORKS_API_KEY")
287				}
288				svc := &oai.Service{Model: oai.GLM4P6Fireworks, APIKey: config.FireworksAPIKey, HTTPC: httpc}
289				if url := config.getFireworksURL(); url != "" {
290					svc.ModelURL = url
291				}
292				return svc, nil
293			},
294		},
295		{
296			ID:              "gemini-3-pro",
297			Provider:        ProviderGemini,
298			Description:     "Gemini 3 Pro",
299			RequiredEnvVars: []string{"GEMINI_API_KEY"},
300			Factory: func(config *Config, httpc *http.Client) (llm.Service, error) {
301				if config.GeminiAPIKey == "" {
302					return nil, fmt.Errorf("gemini-3-pro requires GEMINI_API_KEY")
303				}
304				svc := &gem.Service{APIKey: config.GeminiAPIKey, Model: "gemini-3-pro-preview", HTTPC: httpc}
305				if url := config.getGeminiURL(); url != "" {
306					svc.URL = url
307				}
308				return svc, nil
309			},
310		},
311		{
312			ID:              "gemini-3-flash",
313			Provider:        ProviderGemini,
314			Description:     "Gemini 3 Flash",
315			RequiredEnvVars: []string{"GEMINI_API_KEY"},
316			Factory: func(config *Config, httpc *http.Client) (llm.Service, error) {
317				if config.GeminiAPIKey == "" {
318					return nil, fmt.Errorf("gemini-3-flash requires GEMINI_API_KEY")
319				}
320				svc := &gem.Service{APIKey: config.GeminiAPIKey, Model: "gemini-3-flash-preview", HTTPC: httpc}
321				if url := config.getGeminiURL(); url != "" {
322					svc.URL = url
323				}
324				return svc, nil
325			},
326		},
327		{
328			ID:          "predictable",
329			Provider:    ProviderBuiltIn,
330			Description: "Deterministic test model (no API key)",
331			// Used for testing; should be harmless.
332			GatewayEnabled:  true,
333			RequiredEnvVars: []string{},
334			Factory: func(config *Config, httpc *http.Client) (llm.Service, error) {
335				return loop.NewPredictableService(), nil
336			},
337		},
338	}
339}
340
341// ByID returns the model with the given ID, or nil if not found
342func ByID(id string) *Model {
343	for _, m := range All() {
344		if m.ID == id {
345			return &m
346		}
347	}
348	return nil
349}
350
351// IDs returns all model IDs (not including aliases)
352func IDs() []string {
353	models := All()
354	ids := make([]string, len(models))
355	for i, m := range models {
356		ids[i] = m.ID
357	}
358	return ids
359}
360
361// Default returns the default model
362func Default() Model {
363	return All()[0] // claude-opus-4.6
364}
365
366// Manager manages LLM services for all configured models
367type Manager struct {
368	services   map[string]serviceEntry
369	modelOrder []string // ordered list of model IDs (built-in first, then custom)
370	logger     *slog.Logger
371	db         *db.DB       // for custom models and LLM request recording
372	httpc      *http.Client // HTTP client with recording middleware
373	cfg        *Config      // retained for refreshing custom models
374}
375
376type serviceEntry struct {
377	service     llm.Service
378	provider    Provider
379	modelID     string
380	source      string // Human-readable source (e.g., "exe.dev gateway", "$ANTHROPIC_API_KEY")
381	displayName string // For custom models, the user-provided display name
382	tags        string // For custom models, user-provided tags
383}
384
385// ConfigInfo is an optional interface that services can implement to provide configuration details for logging
386type ConfigInfo interface {
387	// ConfigDetails returns human-readable configuration info (e.g., URL, model name)
388	ConfigDetails() map[string]string
389}
390
391// loggingService wraps an llm.Service to log request completion with usage information
392type loggingService struct {
393	service  llm.Service
394	logger   *slog.Logger
395	modelID  string
396	provider Provider
397	db       *db.DB
398}
399
400// Do wraps the underlying service's Do method with logging and database recording
401func (l *loggingService) Do(ctx context.Context, request *llm.Request) (*llm.Response, error) {
402	start := time.Now()
403
404	// Add model ID and provider to context for the HTTP transport
405	ctx = llmhttp.WithModelID(ctx, l.modelID)
406	ctx = llmhttp.WithProvider(ctx, string(l.provider))
407
408	// Call the underlying service
409	response, err := l.service.Do(ctx, request)
410
411	duration := time.Since(start)
412	durationSeconds := duration.Seconds()
413
414	// Log the completion with usage information
415	if err != nil {
416		logAttrs := []any{
417			"model", l.modelID,
418			"duration_seconds", durationSeconds,
419		}
420
421		// Add configuration details if available
422		if configProvider, ok := l.service.(ConfigInfo); ok {
423			for k, v := range configProvider.ConfigDetails() {
424				logAttrs = append(logAttrs, k, v)
425			}
426		}
427
428		logAttrs = append(logAttrs, "error", err)
429		l.logger.Error("LLM request failed", logAttrs...)
430	} else {
431		// Log successful completion with usage info
432		logAttrs := []any{
433			"model", l.modelID,
434			"duration_seconds", durationSeconds,
435		}
436
437		// Add usage information if available
438		if !response.Usage.IsZero() {
439			logAttrs = append(logAttrs,
440				"input_tokens", response.Usage.InputTokens,
441				"output_tokens", response.Usage.OutputTokens,
442				"cost_usd", response.Usage.CostUSD,
443			)
444			if response.Usage.CacheCreationInputTokens > 0 {
445				logAttrs = append(logAttrs, "cache_creation_input_tokens", response.Usage.CacheCreationInputTokens)
446			}
447			if response.Usage.CacheReadInputTokens > 0 {
448				logAttrs = append(logAttrs, "cache_read_input_tokens", response.Usage.CacheReadInputTokens)
449			}
450		}
451
452		l.logger.Info("LLM request completed", logAttrs...)
453	}
454
455	return response, err
456}
457
458// TokenContextWindow delegates to the underlying service
459func (l *loggingService) TokenContextWindow() int {
460	return l.service.TokenContextWindow()
461}
462
463// MaxImageDimension delegates to the underlying service
464func (l *loggingService) MaxImageDimension() int {
465	return l.service.MaxImageDimension()
466}
467
468// UseSimplifiedPatch delegates to the underlying service if it supports it
469func (l *loggingService) UseSimplifiedPatch() bool {
470	if sp, ok := l.service.(llm.SimplifiedPatcher); ok {
471		return sp.UseSimplifiedPatch()
472	}
473	return false
474}
475
476// NewManager creates a new Manager with all models configured
477func NewManager(cfg *Config) (*Manager, error) {
478	manager := &Manager{
479		services: make(map[string]serviceEntry),
480		logger:   cfg.Logger,
481		db:       cfg.DB,
482	}
483
484	// Create HTTP client with recording if database is available
485	var httpc *http.Client
486	if cfg.DB != nil {
487		recorder := func(ctx context.Context, url string, requestBody, responseBody []byte, statusCode int, err error, duration time.Duration) {
488			modelID := llmhttp.ModelIDFromContext(ctx)
489			provider := llmhttp.ProviderFromContext(ctx)
490			conversationID := llmhttp.ConversationIDFromContext(ctx)
491
492			var convIDPtr *string
493			if conversationID != "" {
494				convIDPtr = &conversationID
495			}
496
497			var reqBodyPtr, respBodyPtr *string
498			if len(requestBody) > 0 {
499				s := string(requestBody)
500				reqBodyPtr = &s
501			}
502			if len(responseBody) > 0 {
503				s := string(responseBody)
504				respBodyPtr = &s
505			}
506
507			var statusCodePtr *int64
508			if statusCode != 0 {
509				sc := int64(statusCode)
510				statusCodePtr = &sc
511			}
512
513			var errPtr *string
514			if err != nil {
515				s := err.Error()
516				errPtr = &s
517			}
518
519			durationMs := duration.Milliseconds()
520			durationMsPtr := &durationMs
521
522			// Insert into database (fire and forget, don't block the request)
523			go func() {
524				_, insertErr := cfg.DB.InsertLLMRequest(context.Background(), generated.InsertLLMRequestParams{
525					ConversationID: convIDPtr,
526					Model:          modelID,
527					Provider:       provider,
528					Url:            url,
529					RequestBody:    reqBodyPtr,
530					ResponseBody:   respBodyPtr,
531					StatusCode:     statusCodePtr,
532					Error:          errPtr,
533					DurationMs:     durationMsPtr,
534				})
535				if insertErr != nil && cfg.Logger != nil {
536					cfg.Logger.Warn("Failed to record LLM request", "error", insertErr)
537				}
538			}()
539		}
540		httpc = llmhttp.NewClient(nil, recorder)
541	} else {
542		// Still use the custom transport for headers, just without recording
543		httpc = llmhttp.NewClient(nil, nil)
544	}
545
546	// Store the HTTP client and config for use with custom models
547	manager.httpc = httpc
548	manager.cfg = cfg
549
550	// Load built-in models first
551	useGateway := cfg.Gateway != ""
552	for _, model := range All() {
553		// Skip non-gateway-enabled models when using a gateway
554		if useGateway && !model.GatewayEnabled {
555			continue
556		}
557		svc, err := model.Factory(cfg, httpc)
558		if err != nil {
559			// Model not available (e.g., missing API key) - skip it
560			continue
561		}
562
563		manager.services[model.ID] = serviceEntry{
564			service:     svc,
565			provider:    model.Provider,
566			modelID:     model.ID,
567			source:      model.Source(cfg),
568			displayName: model.ID, // built-in models use ID as display name
569			tags:        model.Tags,
570		}
571		manager.modelOrder = append(manager.modelOrder, model.ID)
572	}
573
574	// Load custom models from database
575	if err := manager.loadCustomModels(); err != nil && cfg.Logger != nil {
576		cfg.Logger.Warn("Failed to load custom models", "error", err)
577	}
578
579	return manager, nil
580}
581
582// loadCustomModels loads custom models from the database into the manager.
583// It adds them after built-in models in the order.
584func (m *Manager) loadCustomModels() error {
585	if m.db == nil {
586		return nil
587	}
588
589	dbModels, err := m.db.GetModels(context.Background())
590	if err != nil {
591		return err
592	}
593
594	for _, model := range dbModels {
595		// Skip if this model ID is already registered (built-in takes precedence)
596		if _, exists := m.services[model.ModelID]; exists {
597			continue
598		}
599
600		svc := m.createServiceFromModel(&model)
601		if svc == nil {
602			continue
603		}
604
605		m.services[model.ModelID] = serviceEntry{
606			service:     svc,
607			provider:    Provider(model.ProviderType),
608			modelID:     model.ModelID,
609			source:      string(SourceCustom),
610			displayName: model.DisplayName,
611			tags:        model.Tags,
612		}
613		m.modelOrder = append(m.modelOrder, model.ModelID)
614	}
615
616	return nil
617}
618
619// RefreshCustomModels reloads custom models from the database.
620// Call this after adding or removing custom models via the UI.
621func (m *Manager) RefreshCustomModels() error {
622	if m.db == nil {
623		return nil
624	}
625
626	// Remove existing custom models from services and modelOrder
627	newOrder := make([]string, 0, len(m.modelOrder))
628	for _, id := range m.modelOrder {
629		entry, ok := m.services[id]
630		if ok && entry.source != string(SourceCustom) {
631			newOrder = append(newOrder, id)
632		} else {
633			delete(m.services, id)
634		}
635	}
636	m.modelOrder = newOrder
637
638	// Reload custom models
639	return m.loadCustomModels()
640}
641
642// GetService returns the LLM service for the given model ID, wrapped with logging
643func (m *Manager) GetService(modelID string) (llm.Service, error) {
644	entry, ok := m.services[modelID]
645	if !ok {
646		return nil, fmt.Errorf("unsupported model: %s", modelID)
647	}
648
649	// Wrap with logging if we have a logger
650	if m.logger != nil {
651		return &loggingService{
652			service:  entry.service,
653			logger:   m.logger,
654			modelID:  entry.modelID,
655			provider: entry.provider,
656			db:       m.db,
657		}, nil
658	}
659	return entry.service, nil
660}
661
662// GetAvailableModels returns a list of available model IDs.
663// Returns union of built-in models (in order) followed by custom models.
664func (m *Manager) GetAvailableModels() []string {
665	// Return a copy to prevent external modification
666	result := make([]string, len(m.modelOrder))
667	copy(result, m.modelOrder)
668	return result
669}
670
671// HasModel reports whether the manager has a service for the given model ID
672func (m *Manager) HasModel(modelID string) bool {
673	_, ok := m.services[modelID]
674	return ok
675}
676
677// ModelInfo contains display name, tags, and source for a model
678type ModelInfo struct {
679	DisplayName string
680	Tags        string
681	Source      string // Human-readable source (e.g., "exe.dev gateway", "$ANTHROPIC_API_KEY", "custom")
682}
683
684// GetModelInfo returns the display name, tags, and source for a model
685func (m *Manager) GetModelInfo(modelID string) *ModelInfo {
686	entry, ok := m.services[modelID]
687	if !ok {
688		return nil
689	}
690	return &ModelInfo{
691		DisplayName: entry.displayName,
692		Tags:        entry.tags,
693		Source:      entry.source,
694	}
695}
696
697// createServiceFromModel creates an LLM service from a database model configuration
698func (m *Manager) createServiceFromModel(model *generated.Model) llm.Service {
699	switch model.ProviderType {
700	case "anthropic":
701		return &ant.Service{
702			APIKey:        model.ApiKey,
703			URL:           model.Endpoint,
704			Model:         model.ModelName,
705			HTTPC:         m.httpc,
706			ThinkingLevel: llm.ThinkingLevelMedium,
707		}
708	case "openai":
709		return &oai.Service{
710			APIKey:   model.ApiKey,
711			ModelURL: model.Endpoint,
712			Model: oai.Model{
713				ModelName: model.ModelName,
714				URL:       model.Endpoint,
715			},
716			MaxTokens: int(model.MaxTokens),
717			HTTPC:     m.httpc,
718		}
719	case "openai-responses":
720		return &oai.ResponsesService{
721			APIKey:   model.ApiKey,
722			ModelURL: model.Endpoint,
723			Model: oai.Model{
724				ModelName: model.ModelName,
725				URL:       model.Endpoint,
726			},
727			MaxTokens:     int(model.MaxTokens),
728			HTTPC:         m.httpc,
729			ThinkingLevel: llm.ThinkingLevelMedium,
730		}
731	case "gemini":
732		return &gem.Service{
733			APIKey: model.ApiKey,
734			URL:    model.Endpoint,
735			Model:  model.ModelName,
736			HTTPC:  m.httpc,
737		}
738	default:
739		if m.logger != nil {
740			m.logger.Error("Unknown provider type for model", "model_id", model.ModelID, "provider_type", model.ProviderType)
741		}
742		return nil
743	}
744}