1package provider
2
3import (
4 "context"
5 "fmt"
6
7 "github.com/opencode-ai/opencode/internal/llm/models"
8 "github.com/opencode-ai/opencode/internal/llm/tools"
9 "github.com/opencode-ai/opencode/internal/message"
10)
11
12type EventType string
13
14const maxRetries = 8
15
16const (
17 EventContentStart EventType = "content_start"
18 EventToolUseStart EventType = "tool_use_start"
19 EventToolUseDelta EventType = "tool_use_delta"
20 EventToolUseStop EventType = "tool_use_stop"
21 EventContentDelta EventType = "content_delta"
22 EventThinkingDelta EventType = "thinking_delta"
23 EventContentStop EventType = "content_stop"
24 EventComplete EventType = "complete"
25 EventError EventType = "error"
26 EventWarning EventType = "warning"
27)
28
29type TokenUsage struct {
30 InputTokens int64
31 OutputTokens int64
32 CacheCreationTokens int64
33 CacheReadTokens int64
34}
35
36type ProviderResponse struct {
37 Content string
38 ToolCalls []message.ToolCall
39 Usage TokenUsage
40 FinishReason message.FinishReason
41}
42
43type ProviderEvent struct {
44 Type EventType
45
46 Content string
47 Thinking string
48 Response *ProviderResponse
49 ToolCall *message.ToolCall
50 Error error
51}
52type Provider interface {
53 SendMessages(ctx context.Context, messages []message.Message, tools []tools.BaseTool) (*ProviderResponse, error)
54
55 StreamResponse(ctx context.Context, messages []message.Message, tools []tools.BaseTool) <-chan ProviderEvent
56
57 Model() models.Model
58}
59
60type providerClientOptions struct {
61 apiKey string
62 model models.Model
63 maxTokens int64
64 systemMessage string
65
66 anthropicOptions []AnthropicOption
67 openaiOptions []OpenAIOption
68 geminiOptions []GeminiOption
69 bedrockOptions []BedrockOption
70}
71
72type ProviderClientOption func(*providerClientOptions)
73
74type ProviderClient interface {
75 send(ctx context.Context, messages []message.Message, tools []tools.BaseTool) (*ProviderResponse, error)
76 stream(ctx context.Context, messages []message.Message, tools []tools.BaseTool) <-chan ProviderEvent
77}
78
79type baseProvider[C ProviderClient] struct {
80 options providerClientOptions
81 client C
82}
83
84func NewProvider(providerName models.ModelProvider, opts ...ProviderClientOption) (Provider, error) {
85 clientOptions := providerClientOptions{}
86 for _, o := range opts {
87 o(&clientOptions)
88 }
89 switch providerName {
90 case models.ProviderAnthropic:
91 return &baseProvider[AnthropicClient]{
92 options: clientOptions,
93 client: newAnthropicClient(clientOptions),
94 }, nil
95 case models.ProviderOpenAI:
96 return &baseProvider[OpenAIClient]{
97 options: clientOptions,
98 client: newOpenAIClient(clientOptions),
99 }, nil
100 case models.ProviderGemini:
101 return &baseProvider[GeminiClient]{
102 options: clientOptions,
103 client: newGeminiClient(clientOptions),
104 }, nil
105 case models.ProviderBedrock:
106 return &baseProvider[BedrockClient]{
107 options: clientOptions,
108 client: newBedrockClient(clientOptions),
109 }, nil
110 case models.ProviderGROQ:
111 clientOptions.openaiOptions = append(clientOptions.openaiOptions,
112 WithOpenAIBaseURL("https://api.groq.com/openai/v1"),
113 )
114 return &baseProvider[OpenAIClient]{
115 options: clientOptions,
116 client: newOpenAIClient(clientOptions),
117 }, nil
118 case models.ProviderAzure:
119 return &baseProvider[AzureClient]{
120 options: clientOptions,
121 client: newAzureClient(clientOptions),
122 }, nil
123 case models.ProviderMock:
124 // TODO: implement mock client for test
125 panic("not implemented")
126 }
127 return nil, fmt.Errorf("provider not supported: %s", providerName)
128}
129
130func (p *baseProvider[C]) cleanMessages(messages []message.Message) (cleaned []message.Message) {
131 for _, msg := range messages {
132 // The message has no content
133 if len(msg.Parts) == 0 {
134 continue
135 }
136 cleaned = append(cleaned, msg)
137 }
138 return
139}
140
141func (p *baseProvider[C]) SendMessages(ctx context.Context, messages []message.Message, tools []tools.BaseTool) (*ProviderResponse, error) {
142 messages = p.cleanMessages(messages)
143 return p.client.send(ctx, messages, tools)
144}
145
146func (p *baseProvider[C]) Model() models.Model {
147 return p.options.model
148}
149
150func (p *baseProvider[C]) StreamResponse(ctx context.Context, messages []message.Message, tools []tools.BaseTool) <-chan ProviderEvent {
151 messages = p.cleanMessages(messages)
152 return p.client.stream(ctx, messages, tools)
153}
154
155func WithAPIKey(apiKey string) ProviderClientOption {
156 return func(options *providerClientOptions) {
157 options.apiKey = apiKey
158 }
159}
160
161func WithModel(model models.Model) ProviderClientOption {
162 return func(options *providerClientOptions) {
163 options.model = model
164 }
165}
166
167func WithMaxTokens(maxTokens int64) ProviderClientOption {
168 return func(options *providerClientOptions) {
169 options.maxTokens = maxTokens
170 }
171}
172
173func WithSystemMessage(systemMessage string) ProviderClientOption {
174 return func(options *providerClientOptions) {
175 options.systemMessage = systemMessage
176 }
177}
178
179func WithAnthropicOptions(anthropicOptions ...AnthropicOption) ProviderClientOption {
180 return func(options *providerClientOptions) {
181 options.anthropicOptions = anthropicOptions
182 }
183}
184
185func WithOpenAIOptions(openaiOptions ...OpenAIOption) ProviderClientOption {
186 return func(options *providerClientOptions) {
187 options.openaiOptions = openaiOptions
188 }
189}
190
191func WithGeminiOptions(geminiOptions ...GeminiOption) ProviderClientOption {
192 return func(options *providerClientOptions) {
193 options.geminiOptions = geminiOptions
194 }
195}
196
197func WithBedrockOptions(bedrockOptions ...BedrockOption) ProviderClientOption {
198 return func(options *providerClientOptions) {
199 options.bedrockOptions = bedrockOptions
200 }
201}