refactor(openai): rename default functions and export them

Andrey Nering created

Change summary

openai/language_model.go       | 14 +++++++-------
openai/language_model_hooks.go | 12 ++++++------
2 files changed, 13 insertions(+), 13 deletions(-)

Detailed changes

openai/language_model.go 🔗

@@ -87,12 +87,12 @@ func newLanguageModel(modelID string, provider string, client openai.Client, opt
 		modelID:                    modelID,
 		provider:                   provider,
 		client:                     client,
-		generateIDFunc:             defaultGenerateID,
-		prepareCallFunc:            defaultPrepareLanguageModelCall,
-		mapFinishReasonFunc:        defaultMapFinishReason,
-		usageFunc:                  defaultUsage,
-		streamUsageFunc:            defaultStreamUsage,
-		streamProviderMetadataFunc: defaultStreamProviderMetadataFunc,
+		generateIDFunc:             DefaultGenerateID,
+		prepareCallFunc:            DefaultPrepareCallFunc,
+		mapFinishReasonFunc:        DefaultMapFinishReasonFunc,
+		usageFunc:                  DefaultUsageFunc,
+		streamUsageFunc:            DefaultStreamUsageFunc,
+		streamProviderMetadataFunc: DefaultStreamProviderMetadataFunc,
 	}
 
 	for _, o := range opts {
@@ -303,7 +303,7 @@ func (o languageModel) Generate(ctx context.Context, call ai.Call) (*ai.Response
 	return &ai.Response{
 		Content:      content,
 		Usage:        usage,
-		FinishReason: defaultMapFinishReason(choice),
+		FinishReason: DefaultMapFinishReasonFunc(choice),
 		ProviderMetadata: ai.ProviderMetadata{
 			Name: providerMetadata,
 		},

openai/language_model_hooks.go 🔗

@@ -21,11 +21,11 @@ type (
 	LanguageModelStreamProviderMetadataFunc = func(choice openai.ChatCompletionChoice, metadata ai.ProviderMetadata) ai.ProviderMetadata
 )
 
-func defaultGenerateID() string {
+func DefaultGenerateID() string {
 	return uuid.NewString()
 }
 
-func defaultPrepareLanguageModelCall(model ai.LanguageModel, params *openai.ChatCompletionNewParams, call ai.Call) ([]ai.CallWarning, error) {
+func DefaultPrepareCallFunc(model ai.LanguageModel, params *openai.ChatCompletionNewParams, call ai.Call) ([]ai.CallWarning, error) {
 	if call.ProviderOptions == nil {
 		return nil, nil
 	}
@@ -162,7 +162,7 @@ func defaultPrepareLanguageModelCall(model ai.LanguageModel, params *openai.Chat
 	return warnings, nil
 }
 
-func defaultMapFinishReason(choice openai.ChatCompletionChoice) ai.FinishReason {
+func DefaultMapFinishReasonFunc(choice openai.ChatCompletionChoice) ai.FinishReason {
 	finishReason := choice.FinishReason
 	switch finishReason {
 	case "stop":
@@ -178,7 +178,7 @@ func defaultMapFinishReason(choice openai.ChatCompletionChoice) ai.FinishReason
 	}
 }
 
-func defaultUsage(response openai.ChatCompletion) (ai.Usage, ai.ProviderOptionsData) {
+func DefaultUsageFunc(response openai.ChatCompletion) (ai.Usage, ai.ProviderOptionsData) {
 	if len(response.Choices) == 0 {
 		return ai.Usage{}, nil
 	}
@@ -211,7 +211,7 @@ func defaultUsage(response openai.ChatCompletion) (ai.Usage, ai.ProviderOptionsD
 	}, providerMetadata
 }
 
-func defaultStreamUsage(chunk openai.ChatCompletionChunk, ctx map[string]any, metadata ai.ProviderMetadata) (ai.Usage, ai.ProviderMetadata) {
+func DefaultStreamUsageFunc(chunk openai.ChatCompletionChunk, ctx map[string]any, metadata ai.ProviderMetadata) (ai.Usage, ai.ProviderMetadata) {
 	if chunk.Usage.TotalTokens == 0 {
 		return ai.Usage{}, nil
 	}
@@ -250,7 +250,7 @@ func defaultStreamUsage(chunk openai.ChatCompletionChunk, ctx map[string]any, me
 	}
 }
 
-func defaultStreamProviderMetadataFunc(choice openai.ChatCompletionChoice, metadata ai.ProviderMetadata) ai.ProviderMetadata {
+func DefaultStreamProviderMetadataFunc(choice openai.ChatCompletionChoice, metadata ai.ProviderMetadata) ai.ProviderMetadata {
 	streamProviderMetadata, ok := metadata[Name]
 	if !ok {
 		streamProviderMetadata = &ProviderMetadata{}