1package models
2
3import (
4 "context"
5 "fmt"
6 "log/slog"
7 "net/http"
8 "time"
9
10 "shelley.exe.dev/db"
11 "shelley.exe.dev/db/generated"
12 "shelley.exe.dev/llm"
13 "shelley.exe.dev/llm/ant"
14 "shelley.exe.dev/llm/gem"
15 "shelley.exe.dev/llm/llmhttp"
16 "shelley.exe.dev/llm/oai"
17 "shelley.exe.dev/loop"
18)
19
20// Provider represents an LLM provider
21type Provider string
22
23const (
24 ProviderOpenAI Provider = "openai"
25 ProviderAnthropic Provider = "anthropic"
26 ProviderFireworks Provider = "fireworks"
27 ProviderGemini Provider = "gemini"
28 ProviderBuiltIn Provider = "builtin"
29)
30
31// ModelSource describes where a model's configuration comes from
32type ModelSource string
33
34const (
35 SourceGateway ModelSource = "exe.dev gateway"
36 SourceEnvVar ModelSource = "env" // Will be combined with env var name
37 SourceCustom ModelSource = "custom" // User-configured custom model
38)
39
40// Model represents a configured LLM model in Shelley
41type Model struct {
42 // ID is the user-facing identifier for this model
43 ID string
44
45 // Provider is the LLM provider (OpenAI, Anthropic, etc.)
46 Provider Provider
47
48 // Description is a human-readable description
49 Description string
50
51 // Tags is a comma-separated list of tags (e.g., "slug")
52 Tags string
53
54 // RequiredEnvVars are the environment variables required for this model
55 RequiredEnvVars []string
56
57 // GatewayEnabled indicates whether this model is available when using a gateway
58 GatewayEnabled bool
59
60 // Factory creates an llm.Service instance for this model
61 Factory func(config *Config, httpc *http.Client) (llm.Service, error)
62}
63
64// Source returns a human-readable description of where this model's configuration comes from.
65// For example: "exe.dev gateway", "$ANTHROPIC_API_KEY", etc.
66func (m Model) Source(cfg *Config) string {
67 // Predictable model has no source
68 if m.ID == "predictable" {
69 return ""
70 }
71
72 // Check if using gateway with implicit keys
73 if cfg.Gateway != "" {
74 // Gateway is configured - check if this model is using gateway (implicit key)
75 switch m.Provider {
76 case ProviderAnthropic:
77 if cfg.AnthropicAPIKey == "implicit" {
78 return string(SourceGateway)
79 }
80 return "$ANTHROPIC_API_KEY"
81 case ProviderOpenAI:
82 if cfg.OpenAIAPIKey == "implicit" {
83 return string(SourceGateway)
84 }
85 return "$OPENAI_API_KEY"
86 case ProviderFireworks:
87 if cfg.FireworksAPIKey == "implicit" {
88 return string(SourceGateway)
89 }
90 return "$FIREWORKS_API_KEY"
91 case ProviderGemini:
92 if cfg.GeminiAPIKey == "implicit" {
93 return string(SourceGateway)
94 }
95 return "$GEMINI_API_KEY"
96 }
97 }
98
99 // No gateway - use env var names based on RequiredEnvVars
100 if len(m.RequiredEnvVars) > 0 {
101 return "$" + m.RequiredEnvVars[0]
102 }
103 return ""
104}
105
106// Config holds the configuration needed to create LLM services
107type Config struct {
108 // API keys for each provider
109 AnthropicAPIKey string
110 OpenAIAPIKey string
111 GeminiAPIKey string
112 FireworksAPIKey string
113
114 // Gateway is the base URL of the LLM gateway (optional)
115 // If set, model-specific suffixes will be appended
116 Gateway string
117
118 Logger *slog.Logger
119
120 // Database for recording LLM requests (optional)
121 DB *db.DB
122}
123
124// getAnthropicURL returns the Anthropic API URL, with gateway suffix if gateway is set
125func (c *Config) getAnthropicURL() string {
126 if c.Gateway != "" {
127 return c.Gateway + "/_/gateway/anthropic/v1/messages"
128 }
129 return "" // use default from ant package
130}
131
132// getOpenAIURL returns the OpenAI API URL, with gateway suffix if gateway is set
133func (c *Config) getOpenAIURL() string {
134 if c.Gateway != "" {
135 return c.Gateway + "/_/gateway/openai/v1"
136 }
137 return "" // use default from oai package
138}
139
140// getGeminiURL returns the Gemini API URL, with gateway suffix if gateway is set
141func (c *Config) getGeminiURL() string {
142 if c.Gateway != "" {
143 return c.Gateway + "/_/gateway/gemini/v1/models/generate"
144 }
145 return "" // use default from gem package
146}
147
148// getFireworksURL returns the Fireworks API URL, with gateway suffix if gateway is set
149func (c *Config) getFireworksURL() string {
150 if c.Gateway != "" {
151 return c.Gateway + "/_/gateway/fireworks/inference/v1"
152 }
153 return "" // use default from oai package
154}
155
156// All returns all available models in Shelley
157func All() []Model {
158 return []Model{
159 {
160 ID: "claude-opus-4.6",
161 Provider: ProviderAnthropic,
162 Description: "Claude Opus 4.6 (default)",
163 RequiredEnvVars: []string{"ANTHROPIC_API_KEY"},
164 GatewayEnabled: true,
165 Factory: func(config *Config, httpc *http.Client) (llm.Service, error) {
166 if config.AnthropicAPIKey == "" {
167 return nil, fmt.Errorf("claude-opus-4.6 requires ANTHROPIC_API_KEY")
168 }
169 svc := &ant.Service{APIKey: config.AnthropicAPIKey, Model: ant.Claude46Opus, HTTPC: httpc, ThinkingLevel: llm.ThinkingLevelMedium}
170 if url := config.getAnthropicURL(); url != "" {
171 svc.URL = url
172 }
173 return svc, nil
174 },
175 },
176 {
177 ID: "claude-opus-4.5",
178 Provider: ProviderAnthropic,
179 Description: "Claude Opus 4.5",
180 RequiredEnvVars: []string{"ANTHROPIC_API_KEY"},
181 GatewayEnabled: true,
182 Factory: func(config *Config, httpc *http.Client) (llm.Service, error) {
183 if config.AnthropicAPIKey == "" {
184 return nil, fmt.Errorf("claude-opus-4.5 requires ANTHROPIC_API_KEY")
185 }
186 svc := &ant.Service{APIKey: config.AnthropicAPIKey, Model: ant.Claude45Opus, HTTPC: httpc, ThinkingLevel: llm.ThinkingLevelMedium}
187 if url := config.getAnthropicURL(); url != "" {
188 svc.URL = url
189 }
190 return svc, nil
191 },
192 },
193 {
194 ID: "claude-sonnet-4.5",
195 Provider: ProviderAnthropic,
196 Description: "Claude Sonnet 4.5",
197 RequiredEnvVars: []string{"ANTHROPIC_API_KEY"},
198 GatewayEnabled: true,
199 Factory: func(config *Config, httpc *http.Client) (llm.Service, error) {
200 if config.AnthropicAPIKey == "" {
201 return nil, fmt.Errorf("claude-sonnet-4.5 requires ANTHROPIC_API_KEY")
202 }
203 svc := &ant.Service{APIKey: config.AnthropicAPIKey, Model: ant.Claude45Sonnet, HTTPC: httpc, ThinkingLevel: llm.ThinkingLevelMedium}
204 if url := config.getAnthropicURL(); url != "" {
205 svc.URL = url
206 }
207 return svc, nil
208 },
209 },
210 {
211 ID: "claude-haiku-4.5",
212 Provider: ProviderAnthropic,
213 Description: "Claude Haiku 4.5",
214 RequiredEnvVars: []string{"ANTHROPIC_API_KEY"},
215 GatewayEnabled: true,
216 Factory: func(config *Config, httpc *http.Client) (llm.Service, error) {
217 if config.AnthropicAPIKey == "" {
218 return nil, fmt.Errorf("claude-haiku-4.5 requires ANTHROPIC_API_KEY")
219 }
220 svc := &ant.Service{APIKey: config.AnthropicAPIKey, Model: ant.Claude45Haiku, HTTPC: httpc, ThinkingLevel: llm.ThinkingLevelMedium}
221 if url := config.getAnthropicURL(); url != "" {
222 svc.URL = url
223 }
224 return svc, nil
225 },
226 },
227 {
228 ID: "glm-4.7-fireworks",
229 Provider: ProviderFireworks,
230 Description: "GLM-4.7 on Fireworks",
231 RequiredEnvVars: []string{"FIREWORKS_API_KEY"},
232 GatewayEnabled: true,
233 Factory: func(config *Config, httpc *http.Client) (llm.Service, error) {
234 if config.FireworksAPIKey == "" {
235 return nil, fmt.Errorf("glm-4.7-fireworks requires FIREWORKS_API_KEY")
236 }
237 svc := &oai.Service{Model: oai.GLM47Fireworks, APIKey: config.FireworksAPIKey, HTTPC: httpc}
238 if url := config.getFireworksURL(); url != "" {
239 svc.ModelURL = url
240 }
241 return svc, nil
242 },
243 },
244 {
245 ID: "gpt-5.3-codex",
246 Provider: ProviderOpenAI,
247 Description: "GPT-5.3 Codex",
248 RequiredEnvVars: []string{"OPENAI_API_KEY"},
249 GatewayEnabled: true,
250 Factory: func(config *Config, httpc *http.Client) (llm.Service, error) {
251 if config.OpenAIAPIKey == "" {
252 return nil, fmt.Errorf("gpt-5.3-codex requires OPENAI_API_KEY")
253 }
254 svc := &oai.ResponsesService{Model: oai.GPT53Codex, APIKey: config.OpenAIAPIKey, HTTPC: httpc, ThinkingLevel: llm.ThinkingLevelMedium}
255 if url := config.getOpenAIURL(); url != "" {
256 svc.ModelURL = url
257 }
258 return svc, nil
259 },
260 },
261 {
262 ID: "gpt-5.2-codex",
263 Provider: ProviderOpenAI,
264 Description: "GPT-5.2 Codex",
265 RequiredEnvVars: []string{"OPENAI_API_KEY"},
266 GatewayEnabled: true,
267 Factory: func(config *Config, httpc *http.Client) (llm.Service, error) {
268 if config.OpenAIAPIKey == "" {
269 return nil, fmt.Errorf("gpt-5.2-codex requires OPENAI_API_KEY")
270 }
271 svc := &oai.ResponsesService{Model: oai.GPT52Codex, APIKey: config.OpenAIAPIKey, HTTPC: httpc, ThinkingLevel: llm.ThinkingLevelMedium}
272 if url := config.getOpenAIURL(); url != "" {
273 svc.ModelURL = url
274 }
275 return svc, nil
276 },
277 },
278 {
279 ID: "qwen3-coder-fireworks",
280 Provider: ProviderFireworks,
281 Description: "Qwen3 Coder 480B on Fireworks",
282 Tags: "slug",
283 RequiredEnvVars: []string{"FIREWORKS_API_KEY"},
284 GatewayEnabled: true,
285 Factory: func(config *Config, httpc *http.Client) (llm.Service, error) {
286 if config.FireworksAPIKey == "" {
287 return nil, fmt.Errorf("qwen3-coder-fireworks requires FIREWORKS_API_KEY")
288 }
289 svc := &oai.Service{Model: oai.Qwen3CoderFireworks, APIKey: config.FireworksAPIKey, HTTPC: httpc}
290 if url := config.getFireworksURL(); url != "" {
291 svc.ModelURL = url
292 }
293 return svc, nil
294 },
295 },
296 {
297 ID: "glm-4p6-fireworks",
298 Provider: ProviderFireworks,
299 Description: "GLM-4P6 on Fireworks",
300 RequiredEnvVars: []string{"FIREWORKS_API_KEY"},
301 Factory: func(config *Config, httpc *http.Client) (llm.Service, error) {
302 if config.FireworksAPIKey == "" {
303 return nil, fmt.Errorf("glm-4p6-fireworks requires FIREWORKS_API_KEY")
304 }
305 svc := &oai.Service{Model: oai.GLM4P6Fireworks, APIKey: config.FireworksAPIKey, HTTPC: httpc}
306 if url := config.getFireworksURL(); url != "" {
307 svc.ModelURL = url
308 }
309 return svc, nil
310 },
311 },
312 {
313 ID: "gemini-3-pro",
314 Provider: ProviderGemini,
315 Description: "Gemini 3 Pro",
316 RequiredEnvVars: []string{"GEMINI_API_KEY"},
317 Factory: func(config *Config, httpc *http.Client) (llm.Service, error) {
318 if config.GeminiAPIKey == "" {
319 return nil, fmt.Errorf("gemini-3-pro requires GEMINI_API_KEY")
320 }
321 svc := &gem.Service{APIKey: config.GeminiAPIKey, Model: "gemini-3-pro-preview", HTTPC: httpc}
322 if url := config.getGeminiURL(); url != "" {
323 svc.URL = url
324 }
325 return svc, nil
326 },
327 },
328 {
329 ID: "gemini-3-flash",
330 Provider: ProviderGemini,
331 Description: "Gemini 3 Flash",
332 RequiredEnvVars: []string{"GEMINI_API_KEY"},
333 Factory: func(config *Config, httpc *http.Client) (llm.Service, error) {
334 if config.GeminiAPIKey == "" {
335 return nil, fmt.Errorf("gemini-3-flash requires GEMINI_API_KEY")
336 }
337 svc := &gem.Service{APIKey: config.GeminiAPIKey, Model: "gemini-3-flash-preview", HTTPC: httpc}
338 if url := config.getGeminiURL(); url != "" {
339 svc.URL = url
340 }
341 return svc, nil
342 },
343 },
344 {
345 ID: "predictable",
346 Provider: ProviderBuiltIn,
347 Description: "Deterministic test model (no API key)",
348 // Used for testing; should be harmless.
349 GatewayEnabled: true,
350 RequiredEnvVars: []string{},
351 Factory: func(config *Config, httpc *http.Client) (llm.Service, error) {
352 return loop.NewPredictableService(), nil
353 },
354 },
355 }
356}
357
358// ByID returns the model with the given ID, or nil if not found
359func ByID(id string) *Model {
360 for _, m := range All() {
361 if m.ID == id {
362 return &m
363 }
364 }
365 return nil
366}
367
368// IDs returns all model IDs (not including aliases)
369func IDs() []string {
370 models := All()
371 ids := make([]string, len(models))
372 for i, m := range models {
373 ids[i] = m.ID
374 }
375 return ids
376}
377
378// Default returns the default model
379func Default() Model {
380 return All()[0] // claude-opus-4.6
381}
382
383// Manager manages LLM services for all configured models
384type Manager struct {
385 services map[string]serviceEntry
386 modelOrder []string // ordered list of model IDs (built-in first, then custom)
387 logger *slog.Logger
388 db *db.DB // for custom models and LLM request recording
389 httpc *http.Client // HTTP client with recording middleware
390 cfg *Config // retained for refreshing custom models
391}
392
393type serviceEntry struct {
394 service llm.Service
395 provider Provider
396 modelID string
397 source string // Human-readable source (e.g., "exe.dev gateway", "$ANTHROPIC_API_KEY")
398 displayName string // For custom models, the user-provided display name
399 tags string // For custom models, user-provided tags
400}
401
402// ConfigInfo is an optional interface that services can implement to provide configuration details for logging
403type ConfigInfo interface {
404 // ConfigDetails returns human-readable configuration info (e.g., URL, model name)
405 ConfigDetails() map[string]string
406}
407
408// loggingService wraps an llm.Service to log request completion with usage information
409type loggingService struct {
410 service llm.Service
411 logger *slog.Logger
412 modelID string
413 provider Provider
414 db *db.DB
415}
416
417// Do wraps the underlying service's Do method with logging and database recording
418func (l *loggingService) Do(ctx context.Context, request *llm.Request) (*llm.Response, error) {
419 start := time.Now()
420
421 // Add model ID and provider to context for the HTTP transport
422 ctx = llmhttp.WithModelID(ctx, l.modelID)
423 ctx = llmhttp.WithProvider(ctx, string(l.provider))
424
425 // Call the underlying service
426 response, err := l.service.Do(ctx, request)
427
428 duration := time.Since(start)
429 durationSeconds := duration.Seconds()
430
431 // Log the completion with usage information
432 if err != nil {
433 logAttrs := []any{
434 "model", l.modelID,
435 "duration_seconds", durationSeconds,
436 }
437
438 // Add configuration details if available
439 if configProvider, ok := l.service.(ConfigInfo); ok {
440 for k, v := range configProvider.ConfigDetails() {
441 logAttrs = append(logAttrs, k, v)
442 }
443 }
444
445 logAttrs = append(logAttrs, "error", err)
446 l.logger.Error("LLM request failed", logAttrs...)
447 } else {
448 // Log successful completion with usage info
449 logAttrs := []any{
450 "model", l.modelID,
451 "duration_seconds", durationSeconds,
452 }
453
454 // Add usage information if available
455 if !response.Usage.IsZero() {
456 logAttrs = append(logAttrs,
457 "input_tokens", response.Usage.InputTokens,
458 "output_tokens", response.Usage.OutputTokens,
459 "cost_usd", response.Usage.CostUSD,
460 )
461 if response.Usage.CacheCreationInputTokens > 0 {
462 logAttrs = append(logAttrs, "cache_creation_input_tokens", response.Usage.CacheCreationInputTokens)
463 }
464 if response.Usage.CacheReadInputTokens > 0 {
465 logAttrs = append(logAttrs, "cache_read_input_tokens", response.Usage.CacheReadInputTokens)
466 }
467 }
468
469 l.logger.Info("LLM request completed", logAttrs...)
470 }
471
472 return response, err
473}
474
475// TokenContextWindow delegates to the underlying service
476func (l *loggingService) TokenContextWindow() int {
477 return l.service.TokenContextWindow()
478}
479
480// MaxImageDimension delegates to the underlying service
481func (l *loggingService) MaxImageDimension() int {
482 return l.service.MaxImageDimension()
483}
484
485// UseSimplifiedPatch delegates to the underlying service if it supports it
486func (l *loggingService) UseSimplifiedPatch() bool {
487 if sp, ok := l.service.(llm.SimplifiedPatcher); ok {
488 return sp.UseSimplifiedPatch()
489 }
490 return false
491}
492
493// NewManager creates a new Manager with all models configured
494func NewManager(cfg *Config) (*Manager, error) {
495 manager := &Manager{
496 services: make(map[string]serviceEntry),
497 logger: cfg.Logger,
498 db: cfg.DB,
499 }
500
501 // Create HTTP client with recording if database is available
502 var httpc *http.Client
503 if cfg.DB != nil {
504 recorder := func(ctx context.Context, url string, requestBody, responseBody []byte, statusCode int, err error, duration time.Duration) {
505 modelID := llmhttp.ModelIDFromContext(ctx)
506 provider := llmhttp.ProviderFromContext(ctx)
507 conversationID := llmhttp.ConversationIDFromContext(ctx)
508
509 var convIDPtr *string
510 if conversationID != "" {
511 convIDPtr = &conversationID
512 }
513
514 var reqBodyPtr, respBodyPtr *string
515 if len(requestBody) > 0 {
516 s := string(requestBody)
517 reqBodyPtr = &s
518 }
519 if len(responseBody) > 0 {
520 s := string(responseBody)
521 respBodyPtr = &s
522 }
523
524 var statusCodePtr *int64
525 if statusCode != 0 {
526 sc := int64(statusCode)
527 statusCodePtr = &sc
528 }
529
530 var errPtr *string
531 if err != nil {
532 s := err.Error()
533 errPtr = &s
534 }
535
536 durationMs := duration.Milliseconds()
537 durationMsPtr := &durationMs
538
539 // Insert into database (fire and forget, don't block the request)
540 go func() {
541 _, insertErr := cfg.DB.InsertLLMRequest(context.Background(), generated.InsertLLMRequestParams{
542 ConversationID: convIDPtr,
543 Model: modelID,
544 Provider: provider,
545 Url: url,
546 RequestBody: reqBodyPtr,
547 ResponseBody: respBodyPtr,
548 StatusCode: statusCodePtr,
549 Error: errPtr,
550 DurationMs: durationMsPtr,
551 })
552 if insertErr != nil && cfg.Logger != nil {
553 cfg.Logger.Warn("Failed to record LLM request", "error", insertErr)
554 }
555 }()
556 }
557 httpc = llmhttp.NewClient(nil, recorder)
558 } else {
559 // Still use the custom transport for headers, just without recording
560 httpc = llmhttp.NewClient(nil, nil)
561 }
562
563 // Store the HTTP client and config for use with custom models
564 manager.httpc = httpc
565 manager.cfg = cfg
566
567 // Load built-in models first
568 useGateway := cfg.Gateway != ""
569 for _, model := range All() {
570 // Skip non-gateway-enabled models when using a gateway
571 if useGateway && !model.GatewayEnabled {
572 continue
573 }
574 svc, err := model.Factory(cfg, httpc)
575 if err != nil {
576 // Model not available (e.g., missing API key) - skip it
577 continue
578 }
579
580 manager.services[model.ID] = serviceEntry{
581 service: svc,
582 provider: model.Provider,
583 modelID: model.ID,
584 source: model.Source(cfg),
585 displayName: model.ID, // built-in models use ID as display name
586 tags: model.Tags,
587 }
588 manager.modelOrder = append(manager.modelOrder, model.ID)
589 }
590
591 // Load custom models from database
592 if err := manager.loadCustomModels(); err != nil && cfg.Logger != nil {
593 cfg.Logger.Warn("Failed to load custom models", "error", err)
594 }
595
596 return manager, nil
597}
598
599// loadCustomModels loads custom models from the database into the manager.
600// It adds them after built-in models in the order.
601func (m *Manager) loadCustomModels() error {
602 if m.db == nil {
603 return nil
604 }
605
606 dbModels, err := m.db.GetModels(context.Background())
607 if err != nil {
608 return err
609 }
610
611 for _, model := range dbModels {
612 // Skip if this model ID is already registered (built-in takes precedence)
613 if _, exists := m.services[model.ModelID]; exists {
614 continue
615 }
616
617 svc := m.createServiceFromModel(&model)
618 if svc == nil {
619 continue
620 }
621
622 m.services[model.ModelID] = serviceEntry{
623 service: svc,
624 provider: Provider(model.ProviderType),
625 modelID: model.ModelID,
626 source: string(SourceCustom),
627 displayName: model.DisplayName,
628 tags: model.Tags,
629 }
630 m.modelOrder = append(m.modelOrder, model.ModelID)
631 }
632
633 return nil
634}
635
636// RefreshCustomModels reloads custom models from the database.
637// Call this after adding or removing custom models via the UI.
638func (m *Manager) RefreshCustomModels() error {
639 if m.db == nil {
640 return nil
641 }
642
643 // Remove existing custom models from services and modelOrder
644 newOrder := make([]string, 0, len(m.modelOrder))
645 for _, id := range m.modelOrder {
646 entry, ok := m.services[id]
647 if ok && entry.source != string(SourceCustom) {
648 newOrder = append(newOrder, id)
649 } else {
650 delete(m.services, id)
651 }
652 }
653 m.modelOrder = newOrder
654
655 // Reload custom models
656 return m.loadCustomModels()
657}
658
659// GetService returns the LLM service for the given model ID, wrapped with logging
660func (m *Manager) GetService(modelID string) (llm.Service, error) {
661 entry, ok := m.services[modelID]
662 if !ok {
663 return nil, fmt.Errorf("unsupported model: %s", modelID)
664 }
665
666 // Wrap with logging if we have a logger
667 if m.logger != nil {
668 return &loggingService{
669 service: entry.service,
670 logger: m.logger,
671 modelID: entry.modelID,
672 provider: entry.provider,
673 db: m.db,
674 }, nil
675 }
676 return entry.service, nil
677}
678
679// GetAvailableModels returns a list of available model IDs.
680// Returns union of built-in models (in order) followed by custom models.
681func (m *Manager) GetAvailableModels() []string {
682 // Return a copy to prevent external modification
683 result := make([]string, len(m.modelOrder))
684 copy(result, m.modelOrder)
685 return result
686}
687
688// HasModel reports whether the manager has a service for the given model ID
689func (m *Manager) HasModel(modelID string) bool {
690 _, ok := m.services[modelID]
691 return ok
692}
693
694// ModelInfo contains display name, tags, and source for a model
695type ModelInfo struct {
696 DisplayName string
697 Tags string
698 Source string // Human-readable source (e.g., "exe.dev gateway", "$ANTHROPIC_API_KEY", "custom")
699}
700
701// GetModelInfo returns the display name, tags, and source for a model
702func (m *Manager) GetModelInfo(modelID string) *ModelInfo {
703 entry, ok := m.services[modelID]
704 if !ok {
705 return nil
706 }
707 return &ModelInfo{
708 DisplayName: entry.displayName,
709 Tags: entry.tags,
710 Source: entry.source,
711 }
712}
713
714// createServiceFromModel creates an LLM service from a database model configuration
715func (m *Manager) createServiceFromModel(model *generated.Model) llm.Service {
716 switch model.ProviderType {
717 case "anthropic":
718 return &ant.Service{
719 APIKey: model.ApiKey,
720 URL: model.Endpoint,
721 Model: model.ModelName,
722 HTTPC: m.httpc,
723 ThinkingLevel: llm.ThinkingLevelMedium,
724 }
725 case "openai":
726 return &oai.Service{
727 APIKey: model.ApiKey,
728 ModelURL: model.Endpoint,
729 Model: oai.Model{
730 ModelName: model.ModelName,
731 URL: model.Endpoint,
732 },
733 MaxTokens: int(model.MaxTokens),
734 HTTPC: m.httpc,
735 }
736 case "openai-responses":
737 return &oai.ResponsesService{
738 APIKey: model.ApiKey,
739 ModelURL: model.Endpoint,
740 Model: oai.Model{
741 ModelName: model.ModelName,
742 URL: model.Endpoint,
743 },
744 MaxTokens: int(model.MaxTokens),
745 HTTPC: m.httpc,
746 ThinkingLevel: llm.ThinkingLevelMedium,
747 }
748 case "gemini":
749 return &gem.Service{
750 APIKey: model.ApiKey,
751 URL: model.Endpoint,
752 Model: model.ModelName,
753 HTTPC: m.httpc,
754 }
755 default:
756 if m.logger != nil {
757 m.logger.Error("Unknown provider type for model", "model_id", model.ModelID, "provider_type", model.ProviderType)
758 }
759 return nil
760 }
761}