1package provider
2
3import (
4 "context"
5 "encoding/json"
6 "errors"
7 "fmt"
8 "io"
9 "strings"
10 "time"
11
12 "github.com/google/uuid"
13 "github.com/opencode-ai/opencode/internal/config"
14 "github.com/opencode-ai/opencode/internal/llm/tools"
15 "github.com/opencode-ai/opencode/internal/logging"
16 "github.com/opencode-ai/opencode/internal/message"
17 "google.golang.org/genai"
18)
19
20type geminiOptions struct {
21 disableCache bool
22}
23
24type GeminiOption func(*geminiOptions)
25
26type geminiClient struct {
27 providerOptions providerClientOptions
28 options geminiOptions
29 client *genai.Client
30}
31
32type GeminiClient ProviderClient
33
34func newGeminiClient(opts providerClientOptions) GeminiClient {
35 geminiOpts := geminiOptions{}
36 for _, o := range opts.geminiOptions {
37 o(&geminiOpts)
38 }
39
40 client, err := genai.NewClient(context.Background(), &genai.ClientConfig{APIKey: opts.apiKey, Backend: genai.BackendGeminiAPI})
41 if err != nil {
42 logging.Error("Failed to create Gemini client", "error", err)
43 return nil
44 }
45
46 return &geminiClient{
47 providerOptions: opts,
48 options: geminiOpts,
49 client: client,
50 }
51}
52
53func (g *geminiClient) convertMessages(messages []message.Message) []*genai.Content {
54 var history []*genai.Content
55 for _, msg := range messages {
56 switch msg.Role {
57 case message.User:
58 var parts []*genai.Part
59 parts = append(parts, &genai.Part{Text: msg.Content().String()})
60 for _, binaryContent := range msg.BinaryContent() {
61 imageFormat := strings.Split(binaryContent.MIMEType, "/")
62 parts = append(parts, &genai.Part{InlineData: &genai.Blob{
63 MIMEType: imageFormat[1],
64 Data: binaryContent.Data,
65 }})
66 }
67 history = append(history, &genai.Content{
68 Parts: parts,
69 Role: "user",
70 })
71 case message.Assistant:
72 content := &genai.Content{
73 Role: "model",
74 Parts: []*genai.Part{},
75 }
76
77 if msg.Content().String() != "" {
78 content.Parts = append(content.Parts, &genai.Part{Text: msg.Content().String()})
79 }
80
81 if len(msg.ToolCalls()) > 0 {
82 for _, call := range msg.ToolCalls() {
83 args, _ := parseJsonToMap(call.Input)
84 content.Parts = append(content.Parts, &genai.Part{
85 FunctionCall: &genai.FunctionCall{
86 Name: call.Name,
87 Args: args,
88 },
89 })
90 }
91 }
92
93 history = append(history, content)
94
95 case message.Tool:
96 for _, result := range msg.ToolResults() {
97 response := map[string]interface{}{"result": result.Content}
98 parsed, err := parseJsonToMap(result.Content)
99 if err == nil {
100 response = parsed
101 }
102
103 var toolCall message.ToolCall
104 for _, m := range messages {
105 if m.Role == message.Assistant {
106 for _, call := range m.ToolCalls() {
107 if call.ID == result.ToolCallID {
108 toolCall = call
109 break
110 }
111 }
112 }
113 }
114
115 history = append(history, &genai.Content{
116 Parts: []*genai.Part{
117 {
118 FunctionResponse: &genai.FunctionResponse{
119 Name: toolCall.Name,
120 Response: response,
121 },
122 },
123 },
124 Role: "function",
125 })
126 }
127 }
128 }
129
130 return history
131}
132
133func (g *geminiClient) convertTools(tools []tools.BaseTool) []*genai.Tool {
134 geminiTool := &genai.Tool{}
135 geminiTool.FunctionDeclarations = make([]*genai.FunctionDeclaration, 0, len(tools))
136
137 for _, tool := range tools {
138 info := tool.Info()
139 declaration := &genai.FunctionDeclaration{
140 Name: info.Name,
141 Description: info.Description,
142 Parameters: &genai.Schema{
143 Type: genai.TypeObject,
144 Properties: convertSchemaProperties(info.Parameters),
145 Required: info.Required,
146 },
147 }
148
149 geminiTool.FunctionDeclarations = append(geminiTool.FunctionDeclarations, declaration)
150 }
151
152 return []*genai.Tool{geminiTool}
153}
154
155func (g *geminiClient) finishReason(reason genai.FinishReason) message.FinishReason {
156 switch {
157 case reason == genai.FinishReasonStop:
158 return message.FinishReasonEndTurn
159 case reason == genai.FinishReasonMaxTokens:
160 return message.FinishReasonMaxTokens
161 default:
162 return message.FinishReasonUnknown
163 }
164}
165
166func (g *geminiClient) send(ctx context.Context, messages []message.Message, tools []tools.BaseTool) (*ProviderResponse, error) {
167 // Convert messages
168 geminiMessages := g.convertMessages(messages)
169
170 cfg := config.Get()
171 if cfg.Debug {
172 jsonData, _ := json.Marshal(geminiMessages)
173 logging.Debug("Prepared messages", "messages", string(jsonData))
174 }
175
176 history := geminiMessages[:len(geminiMessages)-1] // All but last message
177 lastMsg := geminiMessages[len(geminiMessages)-1]
178 chat, _ := g.client.Chats.Create(ctx, g.providerOptions.model.APIModel, &genai.GenerateContentConfig{
179 MaxOutputTokens: int32(g.providerOptions.maxTokens),
180 SystemInstruction: &genai.Content{
181 Parts: []*genai.Part{{Text: g.providerOptions.systemMessage}},
182 },
183 Tools: g.convertTools(tools),
184 }, history)
185
186 attempts := 0
187 for {
188 attempts++
189 var toolCalls []message.ToolCall
190
191 var lastMsgParts []genai.Part
192 for _, part := range lastMsg.Parts {
193 lastMsgParts = append(lastMsgParts, *part)
194 }
195 resp, err := chat.SendMessage(ctx, lastMsgParts...)
196 // If there is an error we are going to see if we can retry the call
197 if err != nil {
198 retry, after, retryErr := g.shouldRetry(attempts, err)
199 if retryErr != nil {
200 return nil, retryErr
201 }
202 if retry {
203 logging.WarnPersist(fmt.Sprintf("Retrying due to rate limit... attempt %d of %d", attempts, maxRetries), logging.PersistTimeArg, time.Millisecond*time.Duration(after+100))
204 select {
205 case <-ctx.Done():
206 return nil, ctx.Err()
207 case <-time.After(time.Duration(after) * time.Millisecond):
208 continue
209 }
210 }
211 return nil, retryErr
212 }
213
214 content := ""
215
216 if len(resp.Candidates) > 0 && resp.Candidates[0].Content != nil {
217 for _, part := range resp.Candidates[0].Content.Parts {
218 switch {
219 case part.Text != "":
220 content = string(part.Text)
221 case part.FunctionCall != nil:
222 id := "call_" + uuid.New().String()
223 args, _ := json.Marshal(part.FunctionCall.Args)
224 toolCalls = append(toolCalls, message.ToolCall{
225 ID: id,
226 Name: part.FunctionCall.Name,
227 Input: string(args),
228 Type: "function",
229 Finished: true,
230 })
231 }
232 }
233 }
234 finishReason := message.FinishReasonEndTurn
235 if len(resp.Candidates) > 0 {
236 finishReason = g.finishReason(resp.Candidates[0].FinishReason)
237 }
238 if len(toolCalls) > 0 {
239 finishReason = message.FinishReasonToolUse
240 }
241
242 return &ProviderResponse{
243 Content: content,
244 ToolCalls: toolCalls,
245 Usage: g.usage(resp),
246 FinishReason: finishReason,
247 }, nil
248 }
249}
250
251func (g *geminiClient) stream(ctx context.Context, messages []message.Message, tools []tools.BaseTool) <-chan ProviderEvent {
252 // Convert messages
253 geminiMessages := g.convertMessages(messages)
254
255 cfg := config.Get()
256 if cfg.Debug {
257 jsonData, _ := json.Marshal(geminiMessages)
258 logging.Debug("Prepared messages", "messages", string(jsonData))
259 }
260
261 history := geminiMessages[:len(geminiMessages)-1] // All but last message
262 lastMsg := geminiMessages[len(geminiMessages)-1]
263 chat, _ := g.client.Chats.Create(ctx, g.providerOptions.model.APIModel, &genai.GenerateContentConfig{
264 MaxOutputTokens: int32(g.providerOptions.maxTokens),
265 SystemInstruction: &genai.Content{
266 Parts: []*genai.Part{{Text: g.providerOptions.systemMessage}},
267 },
268 Tools: g.convertTools(tools),
269 }, history)
270
271 attempts := 0
272 eventChan := make(chan ProviderEvent)
273
274 go func() {
275 defer close(eventChan)
276
277 for {
278 attempts++
279
280 currentContent := ""
281 toolCalls := []message.ToolCall{}
282 var finalResp *genai.GenerateContentResponse
283
284 eventChan <- ProviderEvent{Type: EventContentStart}
285
286 var lastMsgParts []genai.Part
287
288 for _, part := range lastMsg.Parts {
289 lastMsgParts = append(lastMsgParts, *part)
290 }
291 for resp, err := range chat.SendMessageStream(ctx, lastMsgParts...) {
292 if err != nil {
293 retry, after, retryErr := g.shouldRetry(attempts, err)
294 if retryErr != nil {
295 eventChan <- ProviderEvent{Type: EventError, Error: retryErr}
296 return
297 }
298 if retry {
299 logging.WarnPersist(fmt.Sprintf("Retrying due to rate limit... attempt %d of %d", attempts, maxRetries), logging.PersistTimeArg, time.Millisecond*time.Duration(after+100))
300 select {
301 case <-ctx.Done():
302 if ctx.Err() != nil {
303 eventChan <- ProviderEvent{Type: EventError, Error: ctx.Err()}
304 }
305
306 return
307 case <-time.After(time.Duration(after) * time.Millisecond):
308 break
309 }
310 } else {
311 eventChan <- ProviderEvent{Type: EventError, Error: err}
312 return
313 }
314 }
315
316 finalResp = resp
317
318 if len(resp.Candidates) > 0 && resp.Candidates[0].Content != nil {
319 for _, part := range resp.Candidates[0].Content.Parts {
320 switch {
321 case part.Text != "":
322 delta := string(part.Text)
323 if delta != "" {
324 eventChan <- ProviderEvent{
325 Type: EventContentDelta,
326 Content: delta,
327 }
328 currentContent += delta
329 }
330 case part.FunctionCall != nil:
331 id := "call_" + uuid.New().String()
332 args, _ := json.Marshal(part.FunctionCall.Args)
333 newCall := message.ToolCall{
334 ID: id,
335 Name: part.FunctionCall.Name,
336 Input: string(args),
337 Type: "function",
338 Finished: true,
339 }
340
341 isNew := true
342 for _, existing := range toolCalls {
343 if existing.Name == newCall.Name && existing.Input == newCall.Input {
344 isNew = false
345 break
346 }
347 }
348
349 if isNew {
350 toolCalls = append(toolCalls, newCall)
351 }
352 }
353 }
354 }
355 }
356
357 eventChan <- ProviderEvent{Type: EventContentStop}
358
359 if finalResp != nil {
360
361 finishReason := message.FinishReasonEndTurn
362 if len(finalResp.Candidates) > 0 {
363 finishReason = g.finishReason(finalResp.Candidates[0].FinishReason)
364 }
365 if len(toolCalls) > 0 {
366 finishReason = message.FinishReasonToolUse
367 }
368 eventChan <- ProviderEvent{
369 Type: EventComplete,
370 Response: &ProviderResponse{
371 Content: currentContent,
372 ToolCalls: toolCalls,
373 Usage: g.usage(finalResp),
374 FinishReason: finishReason,
375 },
376 }
377 return
378 }
379
380 }
381 }()
382
383 return eventChan
384}
385
386func (g *geminiClient) shouldRetry(attempts int, err error) (bool, int64, error) {
387 // Check if error is a rate limit error
388 if attempts > maxRetries {
389 return false, 0, fmt.Errorf("maximum retry attempts reached for rate limit: %d retries", maxRetries)
390 }
391
392 // Gemini doesn't have a standard error type we can check against
393 // So we'll check the error message for rate limit indicators
394 if errors.Is(err, io.EOF) {
395 return false, 0, err
396 }
397
398 errMsg := err.Error()
399 isRateLimit := false
400
401 // Check for common rate limit error messages
402 if contains(errMsg, "rate limit", "quota exceeded", "too many requests") {
403 isRateLimit = true
404 }
405
406 if !isRateLimit {
407 return false, 0, err
408 }
409
410 // Calculate backoff with jitter
411 backoffMs := 2000 * (1 << (attempts - 1))
412 jitterMs := int(float64(backoffMs) * 0.2)
413 retryMs := backoffMs + jitterMs
414
415 return true, int64(retryMs), nil
416}
417
418func (g *geminiClient) toolCalls(resp *genai.GenerateContentResponse) []message.ToolCall {
419 var toolCalls []message.ToolCall
420
421 if len(resp.Candidates) > 0 && resp.Candidates[0].Content != nil {
422 for _, part := range resp.Candidates[0].Content.Parts {
423 if part.FunctionCall != nil {
424 id := "call_" + uuid.New().String()
425 args, _ := json.Marshal(part.FunctionCall.Args)
426 toolCalls = append(toolCalls, message.ToolCall{
427 ID: id,
428 Name: part.FunctionCall.Name,
429 Input: string(args),
430 Type: "function",
431 })
432 }
433 }
434 }
435
436 return toolCalls
437}
438
439func (g *geminiClient) usage(resp *genai.GenerateContentResponse) TokenUsage {
440 if resp == nil || resp.UsageMetadata == nil {
441 return TokenUsage{}
442 }
443
444 return TokenUsage{
445 InputTokens: int64(resp.UsageMetadata.PromptTokenCount),
446 OutputTokens: int64(resp.UsageMetadata.CandidatesTokenCount),
447 CacheCreationTokens: 0, // Not directly provided by Gemini
448 CacheReadTokens: int64(resp.UsageMetadata.CachedContentTokenCount),
449 }
450}
451
452func WithGeminiDisableCache() GeminiOption {
453 return func(options *geminiOptions) {
454 options.disableCache = true
455 }
456}
457
458// Helper functions
459func parseJsonToMap(jsonStr string) (map[string]interface{}, error) {
460 var result map[string]interface{}
461 err := json.Unmarshal([]byte(jsonStr), &result)
462 return result, err
463}
464
465func convertSchemaProperties(parameters map[string]interface{}) map[string]*genai.Schema {
466 properties := make(map[string]*genai.Schema)
467
468 for name, param := range parameters {
469 properties[name] = convertToSchema(param)
470 }
471
472 return properties
473}
474
475func convertToSchema(param interface{}) *genai.Schema {
476 schema := &genai.Schema{Type: genai.TypeString}
477
478 paramMap, ok := param.(map[string]interface{})
479 if !ok {
480 return schema
481 }
482
483 if desc, ok := paramMap["description"].(string); ok {
484 schema.Description = desc
485 }
486
487 typeVal, hasType := paramMap["type"]
488 if !hasType {
489 return schema
490 }
491
492 typeStr, ok := typeVal.(string)
493 if !ok {
494 return schema
495 }
496
497 schema.Type = mapJSONTypeToGenAI(typeStr)
498
499 switch typeStr {
500 case "array":
501 schema.Items = processArrayItems(paramMap)
502 case "object":
503 if props, ok := paramMap["properties"].(map[string]interface{}); ok {
504 schema.Properties = convertSchemaProperties(props)
505 }
506 }
507
508 return schema
509}
510
511func processArrayItems(paramMap map[string]interface{}) *genai.Schema {
512 items, ok := paramMap["items"].(map[string]interface{})
513 if !ok {
514 return nil
515 }
516
517 return convertToSchema(items)
518}
519
520func mapJSONTypeToGenAI(jsonType string) genai.Type {
521 switch jsonType {
522 case "string":
523 return genai.TypeString
524 case "number":
525 return genai.TypeNumber
526 case "integer":
527 return genai.TypeInteger
528 case "boolean":
529 return genai.TypeBoolean
530 case "array":
531 return genai.TypeArray
532 case "object":
533 return genai.TypeObject
534 default:
535 return genai.TypeString // Default to string for unknown types
536 }
537}
538
539func contains(s string, substrs ...string) bool {
540 for _, substr := range substrs {
541 if strings.Contains(strings.ToLower(s), strings.ToLower(substr)) {
542 return true
543 }
544 }
545 return false
546}