chore: update openai sdk to v3 (#168)

Andrey Nering created

Change summary

go.mod                                         |  2 +-
go.sum                                         |  4 ++--
providers/azure/azure.go                       |  4 ++--
providers/openai/call_useragent.go             |  2 +-
providers/openai/error.go                      |  2 +-
providers/openai/language_model.go             |  6 +++---
providers/openai/language_model_hooks.go       |  6 +++---
providers/openai/openai.go                     |  4 ++--
providers/openai/openai_test.go                |  2 +-
providers/openai/provider_options.go           |  2 +-
providers/openai/responses_language_model.go   | 12 ++++++------
providers/openaicompat/language_model_hooks.go |  6 +++---
providers/openaicompat/openaicompat.go         |  2 +-
providers/openrouter/language_model_hooks.go   |  4 ++--
providers/openrouter/openrouter.go             |  2 +-
providers/vercel/language_model_hooks.go       |  4 ++--
providers/vercel/vercel.go                     |  2 +-
17 files changed, 33 insertions(+), 33 deletions(-)

Detailed changes

go.mod 🔗

@@ -16,7 +16,7 @@ require (
 	github.com/google/uuid v1.6.0
 	github.com/joho/godotenv v1.5.1
 	github.com/kaptinlin/jsonschema v0.7.5
-	github.com/openai/openai-go/v2 v2.7.1
+	github.com/openai/openai-go/v3 v3.26.0
 	github.com/stretchr/testify v1.11.1
 	golang.org/x/oauth2 v0.36.0
 	google.golang.org/genai v1.49.0

go.sum 🔗

@@ -201,8 +201,8 @@ github.com/onsi/ginkgo/v2 v2.23.4 h1:ktYTpKJAVZnDT4VjxSbiBenUjmlL/5QkBEocaWXiQus
 github.com/onsi/ginkgo/v2 v2.23.4/go.mod h1:Bt66ApGPBFzHyR+JO10Zbt0Gsp4uWxu5mIOTusL46e8=
 github.com/onsi/gomega v1.37.0 h1:CdEG8g0S133B4OswTDC/5XPSzE1OeP29QOioj2PID2Y=
 github.com/onsi/gomega v1.37.0/go.mod h1:8D9+Txp43QWKhM24yyOBEdpkzN8FvJyAwecBgsU4KU0=
-github.com/openai/openai-go/v2 v2.7.1 h1:/tfvTJhfv7hTSL8mWwc5VL4WLLSDL5yn9VqVykdu9r8=
-github.com/openai/openai-go/v2 v2.7.1/go.mod h1:jrJs23apqJKKbT+pqtFgNKpRju/KP9zpUTZhz3GElQE=
+github.com/openai/openai-go/v3 v3.26.0 h1:bRt6H/ozMNt/dDkN4gobnLqaEGrRGBzmbVs0xxJEnQE=
+github.com/openai/openai-go/v3 v3.26.0/go.mod h1:cdufnVK14cWcT9qA1rRtrXx4FTRsgbDPW7Ia7SS5cZo=
 github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4=
 github.com/pelletier/go-toml/v2 v2.2.4/go.mod h1:2gIqNv+qfxSVS7cM2xJQKtLSTLUE9V8t9Stt+h56mCY=
 github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ=

providers/azure/azure.go 🔗

@@ -8,8 +8,8 @@ import (
 
 	"charm.land/fantasy"
 	"charm.land/fantasy/providers/openai"
-	"github.com/openai/openai-go/v2/azure"
-	"github.com/openai/openai-go/v2/option"
+	"github.com/openai/openai-go/v3/azure"
+	"github.com/openai/openai-go/v3/option"
 )
 
 type options struct {

providers/openai/call_useragent.go 🔗

@@ -3,7 +3,7 @@ package openai
 import (
 	"charm.land/fantasy"
 	"charm.land/fantasy/providers/internal/httpheaders"
-	"github.com/openai/openai-go/v2/option"
+	"github.com/openai/openai-go/v3/option"
 )
 
 // callUARequestOptions returns per-request options that override the

providers/openai/error.go 🔗

@@ -10,7 +10,7 @@ import (
 	"strings"
 
 	"charm.land/fantasy"
-	"github.com/openai/openai-go/v2"
+	"github.com/openai/openai-go/v3"
 )
 
 var openaiContextPattern = regexp.MustCompile(`maximum context length is (\d+) tokens.*?(?:resulted in|requested) (\d+) tokens`)

providers/openai/language_model.go 🔗

@@ -14,9 +14,9 @@ import (
 	"charm.land/fantasy/schema"
 	xjson "github.com/charmbracelet/x/json"
 	"github.com/google/uuid"
-	"github.com/openai/openai-go/v2"
-	"github.com/openai/openai-go/v2/packages/param"
-	"github.com/openai/openai-go/v2/shared"
+	"github.com/openai/openai-go/v3"
+	"github.com/openai/openai-go/v3/packages/param"
+	"github.com/openai/openai-go/v3/shared"
 )
 
 type languageModel struct {

providers/openai/language_model_hooks.go 🔗

@@ -6,9 +6,9 @@ import (
 	"strings"
 
 	"charm.land/fantasy"
-	"github.com/openai/openai-go/v2"
-	"github.com/openai/openai-go/v2/packages/param"
-	"github.com/openai/openai-go/v2/shared"
+	"github.com/openai/openai-go/v3"
+	"github.com/openai/openai-go/v3/packages/param"
+	"github.com/openai/openai-go/v3/shared"
 )
 
 // LanguageModelPrepareCallFunc is a function that prepares the call for the language model.

providers/openai/openai.go 🔗

@@ -8,8 +8,8 @@ import (
 
 	"charm.land/fantasy"
 	"charm.land/fantasy/providers/internal/httpheaders"
-	"github.com/openai/openai-go/v2"
-	"github.com/openai/openai-go/v2/option"
+	"github.com/openai/openai-go/v3"
+	"github.com/openai/openai-go/v3/option"
 )
 
 const (

providers/openai/openai_test.go 🔗

@@ -11,7 +11,7 @@ import (
 	"testing"
 
 	"charm.land/fantasy"
-	"github.com/openai/openai-go/v2/packages/param"
+	"github.com/openai/openai-go/v3/packages/param"
 	"github.com/stretchr/testify/assert"
 	"github.com/stretchr/testify/require"
 )

providers/openai/provider_options.go 🔗

@@ -5,7 +5,7 @@ import (
 	"encoding/json"
 
 	"charm.land/fantasy"
-	"github.com/openai/openai-go/v2"
+	"github.com/openai/openai-go/v3"
 )
 
 // ReasoningEffort represents the reasoning effort level for OpenAI models.

providers/openai/responses_language_model.go 🔗

@@ -12,10 +12,10 @@ import (
 	"charm.land/fantasy/object"
 	"charm.land/fantasy/schema"
 	"github.com/google/uuid"
-	"github.com/openai/openai-go/v2"
-	"github.com/openai/openai-go/v2/packages/param"
-	"github.com/openai/openai-go/v2/responses"
-	"github.com/openai/openai-go/v2/shared"
+	"github.com/openai/openai-go/v3"
+	"github.com/openai/openai-go/v3/packages/param"
+	"github.com/openai/openai-go/v3/responses"
+	"github.com/openai/openai-go/v3/shared"
 )
 
 const topLogprobsMax = 20
@@ -730,7 +730,7 @@ func (o responsesLanguageModel) Generate(ctx context.Context, call fantasy.Call)
 				ProviderExecuted: false,
 				ToolCallID:       outputItem.CallID,
 				ToolName:         outputItem.Name,
-				Input:            outputItem.Arguments,
+				Input:            outputItem.Arguments.OfString,
 			})
 
 		case "reasoning":
@@ -899,7 +899,7 @@ func (o responsesLanguageModel) Stream(ctx context.Context, call fantasy.Call) (
 							Type:          fantasy.StreamPartTypeToolCall,
 							ID:            done.Item.CallID,
 							ToolCallName:  done.Item.Name,
-							ToolCallInput: done.Item.Arguments,
+							ToolCallInput: done.Item.Arguments.OfString,
 						}) {
 							return
 						}

providers/openaicompat/language_model_hooks.go 🔗

@@ -8,9 +8,9 @@ import (
 
 	"charm.land/fantasy"
 	"charm.land/fantasy/providers/openai"
-	openaisdk "github.com/openai/openai-go/v2"
-	"github.com/openai/openai-go/v2/packages/param"
-	"github.com/openai/openai-go/v2/shared"
+	openaisdk "github.com/openai/openai-go/v3"
+	"github.com/openai/openai-go/v3/packages/param"
+	"github.com/openai/openai-go/v3/shared"
 )
 
 const reasoningStartedCtx = "reasoning_started"

providers/openaicompat/openaicompat.go 🔗

@@ -4,7 +4,7 @@ package openaicompat
 import (
 	"charm.land/fantasy"
 	"charm.land/fantasy/providers/openai"
-	"github.com/openai/openai-go/v2/option"
+	"github.com/openai/openai-go/v3/option"
 )
 
 type options struct {

providers/openrouter/language_model_hooks.go 🔗

@@ -11,8 +11,8 @@ import (
 	"charm.land/fantasy/providers/anthropic"
 	"charm.land/fantasy/providers/google"
 	"charm.land/fantasy/providers/openai"
-	openaisdk "github.com/openai/openai-go/v2"
-	"github.com/openai/openai-go/v2/packages/param"
+	openaisdk "github.com/openai/openai-go/v3"
+	"github.com/openai/openai-go/v3/packages/param"
 )
 
 const reasoningStartedCtx = "reasoning_started"

providers/openrouter/openrouter.go 🔗

@@ -6,7 +6,7 @@ import (
 
 	"charm.land/fantasy"
 	"charm.land/fantasy/providers/openai"
-	"github.com/openai/openai-go/v2/option"
+	"github.com/openai/openai-go/v3/option"
 )
 
 type options struct {

providers/vercel/language_model_hooks.go 🔗

@@ -11,8 +11,8 @@ import (
 	"charm.land/fantasy/providers/anthropic"
 	"charm.land/fantasy/providers/google"
 	openaipkg "charm.land/fantasy/providers/openai"
-	openaisdk "github.com/openai/openai-go/v2"
-	"github.com/openai/openai-go/v2/packages/param"
+	openaisdk "github.com/openai/openai-go/v3"
+	"github.com/openai/openai-go/v3/packages/param"
 )
 
 const reasoningStartedCtx = "reasoning_started"

providers/vercel/vercel.go 🔗

@@ -4,7 +4,7 @@ package vercel
 import (
 	"charm.land/fantasy"
 	"charm.land/fantasy/providers/openai"
-	"github.com/openai/openai-go/v2/option"
+	"github.com/openai/openai-go/v3/option"
 )
 
 type options struct {