@@ -815,7 +815,7 @@ func TestDoGenerate(t *testing.T) {
WithOpenAIApiKey("test-api-key"),
WithOpenAIBaseURL(server.server.URL),
)
- model := provider.LanguageModel("gpt-3.5-turbo")
+ model, _ := provider.LanguageModel("gpt-3.5-turbo")
result, err := model.Generate(context.Background(), ai.Call{
Prompt: testPrompt,
@@ -847,7 +847,7 @@ func TestDoGenerate(t *testing.T) {
WithOpenAIApiKey("test-api-key"),
WithOpenAIBaseURL(server.server.URL),
)
- model := provider.LanguageModel("gpt-3.5-turbo")
+ model, _ := provider.LanguageModel("gpt-3.5-turbo")
result, err := model.Generate(context.Background(), ai.Call{
Prompt: testPrompt,
@@ -871,7 +871,7 @@ func TestDoGenerate(t *testing.T) {
WithOpenAIApiKey("test-api-key"),
WithOpenAIBaseURL(server.server.URL),
)
- model := provider.LanguageModel("gpt-3.5-turbo")
+ model, _ := provider.LanguageModel("gpt-3.5-turbo")
_, err := model.Generate(context.Background(), ai.Call{
Prompt: testPrompt,
@@ -911,7 +911,7 @@ func TestDoGenerate(t *testing.T) {
WithOpenAIApiKey("test-api-key"),
WithOpenAIBaseURL(server.server.URL),
)
- model := provider.LanguageModel("gpt-3.5-turbo")
+ model, _ := provider.LanguageModel("gpt-3.5-turbo")
result, err := model.Generate(context.Background(), ai.Call{
Prompt: testPrompt,
@@ -937,7 +937,7 @@ func TestDoGenerate(t *testing.T) {
WithOpenAIApiKey("test-api-key"),
WithOpenAIBaseURL(server.server.URL),
)
- model := provider.LanguageModel("gpt-3.5-turbo")
+ model, _ := provider.LanguageModel("gpt-3.5-turbo")
result, err := model.Generate(context.Background(), ai.Call{
Prompt: testPrompt,
@@ -973,7 +973,7 @@ func TestDoGenerate(t *testing.T) {
WithOpenAIApiKey("test-api-key"),
WithOpenAIBaseURL(server.server.URL),
)
- model := provider.LanguageModel("gpt-3.5-turbo")
+ model, _ := provider.LanguageModel("gpt-3.5-turbo")
result, err := model.Generate(context.Background(), ai.Call{
Prompt: testPrompt,
@@ -997,7 +997,7 @@ func TestDoGenerate(t *testing.T) {
WithOpenAIApiKey("test-api-key"),
WithOpenAIBaseURL(server.server.URL),
)
- model := provider.LanguageModel("gpt-3.5-turbo")
+ model, _ := provider.LanguageModel("gpt-3.5-turbo")
result, err := model.Generate(context.Background(), ai.Call{
Prompt: testPrompt,
@@ -1021,7 +1021,7 @@ func TestDoGenerate(t *testing.T) {
WithOpenAIApiKey("test-api-key"),
WithOpenAIBaseURL(server.server.URL),
)
- model := provider.LanguageModel("gpt-3.5-turbo")
+ model, _ := provider.LanguageModel("gpt-3.5-turbo")
_, err := model.Generate(context.Background(), ai.Call{
Prompt: testPrompt,
@@ -1053,7 +1053,7 @@ func TestDoGenerate(t *testing.T) {
WithOpenAIApiKey("test-api-key"),
WithOpenAIBaseURL(server.server.URL),
)
- model := provider.LanguageModel("gpt-3.5-turbo")
+ model, _ := provider.LanguageModel("gpt-3.5-turbo")
_, err := model.Generate(context.Background(), ai.Call{
Prompt: testPrompt,
@@ -1097,7 +1097,7 @@ func TestDoGenerate(t *testing.T) {
WithOpenAIApiKey("test-api-key"),
WithOpenAIBaseURL(server.server.URL),
)
- model := provider.LanguageModel("o1-mini")
+ model, _ := provider.LanguageModel("o1-mini")
_, err := model.Generate(context.Background(), ai.Call{
Prompt: testPrompt,
@@ -1137,7 +1137,7 @@ func TestDoGenerate(t *testing.T) {
WithOpenAIApiKey("test-api-key"),
WithOpenAIBaseURL(server.server.URL),
)
- model := provider.LanguageModel("gpt-4o")
+ model, _ := provider.LanguageModel("gpt-4o")
_, err := model.Generate(context.Background(), ai.Call{
Prompt: testPrompt,
@@ -1177,7 +1177,7 @@ func TestDoGenerate(t *testing.T) {
WithOpenAIApiKey("test-api-key"),
WithOpenAIBaseURL(server.server.URL),
)
- model := provider.LanguageModel("gpt-3.5-turbo")
+ model, _ := provider.LanguageModel("gpt-3.5-turbo")
_, err := model.Generate(context.Background(), ai.Call{
Prompt: testPrompt,
@@ -1249,7 +1249,7 @@ func TestDoGenerate(t *testing.T) {
WithOpenAIApiKey("test-api-key"),
WithOpenAIBaseURL(server.server.URL),
)
- model := provider.LanguageModel("gpt-3.5-turbo")
+ model, _ := provider.LanguageModel("gpt-3.5-turbo")
result, err := model.Generate(context.Background(), ai.Call{
Prompt: testPrompt,
@@ -1307,7 +1307,7 @@ func TestDoGenerate(t *testing.T) {
WithOpenAIApiKey("test-api-key"),
WithOpenAIBaseURL(server.server.URL),
)
- model := provider.LanguageModel("gpt-3.5-turbo")
+ model, _ := provider.LanguageModel("gpt-3.5-turbo")
result, err := model.Generate(context.Background(), ai.Call{
Prompt: testPrompt,
@@ -1349,7 +1349,7 @@ func TestDoGenerate(t *testing.T) {
WithOpenAIApiKey("test-api-key"),
WithOpenAIBaseURL(server.server.URL),
)
- model := provider.LanguageModel("gpt-4o-mini")
+ model, _ := provider.LanguageModel("gpt-4o-mini")
result, err := model.Generate(context.Background(), ai.Call{
Prompt: testPrompt,
@@ -1384,7 +1384,7 @@ func TestDoGenerate(t *testing.T) {
WithOpenAIApiKey("test-api-key"),
WithOpenAIBaseURL(server.server.URL),
)
- model := provider.LanguageModel("gpt-4o-mini")
+ model, _ := provider.LanguageModel("gpt-4o-mini")
result, err := model.Generate(context.Background(), ai.Call{
Prompt: testPrompt,
@@ -1411,7 +1411,7 @@ func TestDoGenerate(t *testing.T) {
WithOpenAIApiKey("test-api-key"),
WithOpenAIBaseURL(server.server.URL),
)
- model := provider.LanguageModel("o1-preview")
+ model, _ := provider.LanguageModel("o1-preview")
result, err := model.Generate(context.Background(), ai.Call{
Prompt: testPrompt,
@@ -1459,7 +1459,7 @@ func TestDoGenerate(t *testing.T) {
WithOpenAIApiKey("test-api-key"),
WithOpenAIBaseURL(server.server.URL),
)
- model := provider.LanguageModel("o1-preview")
+ model, _ := provider.LanguageModel("o1-preview")
_, err := model.Generate(context.Background(), ai.Call{
Prompt: testPrompt,
@@ -1503,7 +1503,7 @@ func TestDoGenerate(t *testing.T) {
WithOpenAIApiKey("test-api-key"),
WithOpenAIBaseURL(server.server.URL),
)
- model := provider.LanguageModel("o1-preview")
+ model, _ := provider.LanguageModel("o1-preview")
result, err := model.Generate(context.Background(), ai.Call{
Prompt: testPrompt,
@@ -1530,7 +1530,7 @@ func TestDoGenerate(t *testing.T) {
WithOpenAIApiKey("test-api-key"),
WithOpenAIBaseURL(server.server.URL),
)
- model := provider.LanguageModel("o1-preview")
+ model, _ := provider.LanguageModel("o1-preview")
_, err := model.Generate(context.Background(), ai.Call{
Prompt: testPrompt,
@@ -1570,7 +1570,7 @@ func TestDoGenerate(t *testing.T) {
WithOpenAIApiKey("test-api-key"),
WithOpenAIBaseURL(server.server.URL),
)
- model := provider.LanguageModel("gpt-3.5-turbo")
+ model, _ := provider.LanguageModel("gpt-3.5-turbo")
_, err := model.Generate(context.Background(), ai.Call{
Prompt: testPrompt,
@@ -1616,7 +1616,7 @@ func TestDoGenerate(t *testing.T) {
WithOpenAIApiKey("test-api-key"),
WithOpenAIBaseURL(server.server.URL),
)
- model := provider.LanguageModel("gpt-3.5-turbo")
+ model, _ := provider.LanguageModel("gpt-3.5-turbo")
_, err := model.Generate(context.Background(), ai.Call{
Prompt: testPrompt,
@@ -1656,7 +1656,7 @@ func TestDoGenerate(t *testing.T) {
WithOpenAIApiKey("test-api-key"),
WithOpenAIBaseURL(server.server.URL),
)
- model := provider.LanguageModel("gpt-3.5-turbo")
+ model, _ := provider.LanguageModel("gpt-3.5-turbo")
_, err := model.Generate(context.Background(), ai.Call{
Prompt: testPrompt,
@@ -1700,7 +1700,7 @@ func TestDoGenerate(t *testing.T) {
WithOpenAIApiKey("test-api-key"),
WithOpenAIBaseURL(server.server.URL),
)
- model := provider.LanguageModel("gpt-3.5-turbo")
+ model, _ := provider.LanguageModel("gpt-3.5-turbo")
_, err := model.Generate(context.Background(), ai.Call{
Prompt: testPrompt,
@@ -1740,7 +1740,7 @@ func TestDoGenerate(t *testing.T) {
WithOpenAIApiKey("test-api-key"),
WithOpenAIBaseURL(server.server.URL),
)
- model := provider.LanguageModel("gpt-3.5-turbo")
+ model, _ := provider.LanguageModel("gpt-3.5-turbo")
_, err := model.Generate(context.Background(), ai.Call{
Prompt: testPrompt,
@@ -1778,7 +1778,7 @@ func TestDoGenerate(t *testing.T) {
WithOpenAIApiKey("test-api-key"),
WithOpenAIBaseURL(server.server.URL),
)
- model := provider.LanguageModel("gpt-4o-search-preview")
+ model, _ := provider.LanguageModel("gpt-4o-search-preview")
result, err := model.Generate(context.Background(), ai.Call{
Prompt: testPrompt,
@@ -1812,7 +1812,7 @@ func TestDoGenerate(t *testing.T) {
WithOpenAIApiKey("test-api-key"),
WithOpenAIBaseURL(server.server.URL),
)
- model := provider.LanguageModel("o3-mini")
+ model, _ := provider.LanguageModel("o3-mini")
_, err := model.Generate(context.Background(), ai.Call{
Prompt: testPrompt,
@@ -1850,7 +1850,7 @@ func TestDoGenerate(t *testing.T) {
WithOpenAIApiKey("test-api-key"),
WithOpenAIBaseURL(server.server.URL),
)
- model := provider.LanguageModel("gpt-4o-mini")
+ model, _ := provider.LanguageModel("gpt-4o-mini")
result, err := model.Generate(context.Background(), ai.Call{
Prompt: testPrompt,
@@ -1885,7 +1885,7 @@ func TestDoGenerate(t *testing.T) {
WithOpenAIApiKey("test-api-key"),
WithOpenAIBaseURL(server.server.URL),
)
- model := provider.LanguageModel("gpt-4o-mini")
+ model, _ := provider.LanguageModel("gpt-4o-mini")
_, err := model.Generate(context.Background(), ai.Call{
Prompt: testPrompt,
@@ -1923,7 +1923,7 @@ func TestDoGenerate(t *testing.T) {
WithOpenAIApiKey("test-api-key"),
WithOpenAIBaseURL(server.server.URL),
)
- model := provider.LanguageModel("gpt-3.5-turbo")
+ model, _ := provider.LanguageModel("gpt-3.5-turbo")
result, err := model.Generate(context.Background(), ai.Call{
Prompt: testPrompt,
@@ -2232,7 +2232,7 @@ func TestDoStream(t *testing.T) {
WithOpenAIApiKey("test-api-key"),
WithOpenAIBaseURL(server.server.URL),
)
- model := provider.LanguageModel("gpt-3.5-turbo")
+ model, _ := provider.LanguageModel("gpt-3.5-turbo")
stream, err := model.Stream(context.Background(), ai.Call{
Prompt: testPrompt,
@@ -2288,7 +2288,7 @@ func TestDoStream(t *testing.T) {
WithOpenAIApiKey("test-api-key"),
WithOpenAIBaseURL(server.server.URL),
)
- model := provider.LanguageModel("gpt-3.5-turbo")
+ model, _ := provider.LanguageModel("gpt-3.5-turbo")
stream, err := model.Stream(context.Background(), ai.Call{
Prompt: testPrompt,
@@ -2374,7 +2374,7 @@ func TestDoStream(t *testing.T) {
WithOpenAIApiKey("test-api-key"),
WithOpenAIBaseURL(server.server.URL),
)
- model := provider.LanguageModel("gpt-3.5-turbo")
+ model, _ := provider.LanguageModel("gpt-3.5-turbo")
stream, err := model.Stream(context.Background(), ai.Call{
Prompt: testPrompt,
@@ -2413,7 +2413,7 @@ func TestDoStream(t *testing.T) {
WithOpenAIApiKey("test-api-key"),
WithOpenAIBaseURL(server.server.URL),
)
- model := provider.LanguageModel("gpt-3.5-turbo")
+ model, _ := provider.LanguageModel("gpt-3.5-turbo")
stream, err := model.Stream(context.Background(), ai.Call{
Prompt: testPrompt,
@@ -2454,7 +2454,7 @@ func TestDoStream(t *testing.T) {
WithOpenAIApiKey("test-api-key"),
WithOpenAIBaseURL(server.server.URL),
)
- model := provider.LanguageModel("gpt-3.5-turbo")
+ model, _ := provider.LanguageModel("gpt-3.5-turbo")
_, err := model.Stream(context.Background(), ai.Call{
Prompt: testPrompt,
@@ -2502,7 +2502,7 @@ func TestDoStream(t *testing.T) {
WithOpenAIApiKey("test-api-key"),
WithOpenAIBaseURL(server.server.URL),
)
- model := provider.LanguageModel("gpt-3.5-turbo")
+ model, _ := provider.LanguageModel("gpt-3.5-turbo")
stream, err := model.Stream(context.Background(), ai.Call{
Prompt: testPrompt,
@@ -2552,7 +2552,7 @@ func TestDoStream(t *testing.T) {
WithOpenAIApiKey("test-api-key"),
WithOpenAIBaseURL(server.server.URL),
)
- model := provider.LanguageModel("gpt-3.5-turbo")
+ model, _ := provider.LanguageModel("gpt-3.5-turbo")
stream, err := model.Stream(context.Background(), ai.Call{
Prompt: testPrompt,
@@ -2595,7 +2595,7 @@ func TestDoStream(t *testing.T) {
WithOpenAIApiKey("test-api-key"),
WithOpenAIBaseURL(server.server.URL),
)
- model := provider.LanguageModel("gpt-3.5-turbo")
+ model, _ := provider.LanguageModel("gpt-3.5-turbo")
_, err := model.Stream(context.Background(), ai.Call{
Prompt: testPrompt,
@@ -2639,7 +2639,7 @@ func TestDoStream(t *testing.T) {
WithOpenAIApiKey("test-api-key"),
WithOpenAIBaseURL(server.server.URL),
)
- model := provider.LanguageModel("gpt-3.5-turbo")
+ model, _ := provider.LanguageModel("gpt-3.5-turbo")
_, err := model.Stream(context.Background(), ai.Call{
Prompt: testPrompt,
@@ -2687,7 +2687,7 @@ func TestDoStream(t *testing.T) {
WithOpenAIApiKey("test-api-key"),
WithOpenAIBaseURL(server.server.URL),
)
- model := provider.LanguageModel("o3-mini")
+ model, _ := provider.LanguageModel("o3-mini")
_, err := model.Stream(context.Background(), ai.Call{
Prompt: testPrompt,
@@ -2731,7 +2731,7 @@ func TestDoStream(t *testing.T) {
WithOpenAIApiKey("test-api-key"),
WithOpenAIBaseURL(server.server.URL),
)
- model := provider.LanguageModel("gpt-4o-mini")
+ model, _ := provider.LanguageModel("gpt-4o-mini")
_, err := model.Stream(context.Background(), ai.Call{
Prompt: testPrompt,
@@ -2776,7 +2776,7 @@ func TestDoStream(t *testing.T) {
WithOpenAIApiKey("test-api-key"),
WithOpenAIBaseURL(server.server.URL),
)
- model := provider.LanguageModel("o1-preview")
+ model, _ := provider.LanguageModel("o1-preview")
stream, err := model.Stream(context.Background(), ai.Call{
Prompt: testPrompt,
@@ -2822,7 +2822,7 @@ func TestDoStream(t *testing.T) {
WithOpenAIApiKey("test-api-key"),
WithOpenAIBaseURL(server.server.URL),
)
- model := provider.LanguageModel("o1-preview")
+ model, _ := provider.LanguageModel("o1-preview")
stream, err := model.Stream(context.Background(), ai.Call{
Prompt: testPrompt,