1// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
2
3package openai
4
5import (
6 "context"
7 "errors"
8 "fmt"
9 "net/http"
10 "net/url"
11
12 "github.com/openai/openai-go/internal/apijson"
13 "github.com/openai/openai-go/internal/apiquery"
14 "github.com/openai/openai-go/internal/requestconfig"
15 "github.com/openai/openai-go/option"
16 "github.com/openai/openai-go/packages/pagination"
17 "github.com/openai/openai-go/packages/param"
18 "github.com/openai/openai-go/packages/resp"
19 "github.com/openai/openai-go/shared"
20 "github.com/openai/openai-go/shared/constant"
21)
22
23// BatchService contains methods and other services that help with interacting with
24// the openai API.
25//
26// Note, unlike clients, this service does not read variables from the environment
27// automatically. You should not instantiate this service directly, and instead use
28// the [NewBatchService] method instead.
29type BatchService struct {
30 Options []option.RequestOption
31}
32
33// NewBatchService generates a new service that applies the given options to each
34// request. These options are applied after the parent client's options (if there
35// is one), and before any request-specific options.
36func NewBatchService(opts ...option.RequestOption) (r BatchService) {
37 r = BatchService{}
38 r.Options = opts
39 return
40}
41
42// Creates and executes a batch from an uploaded file of requests
43func (r *BatchService) New(ctx context.Context, body BatchNewParams, opts ...option.RequestOption) (res *Batch, err error) {
44 opts = append(r.Options[:], opts...)
45 path := "batches"
46 err = requestconfig.ExecuteNewRequest(ctx, http.MethodPost, path, body, &res, opts...)
47 return
48}
49
50// Retrieves a batch.
51func (r *BatchService) Get(ctx context.Context, batchID string, opts ...option.RequestOption) (res *Batch, err error) {
52 opts = append(r.Options[:], opts...)
53 if batchID == "" {
54 err = errors.New("missing required batch_id parameter")
55 return
56 }
57 path := fmt.Sprintf("batches/%s", batchID)
58 err = requestconfig.ExecuteNewRequest(ctx, http.MethodGet, path, nil, &res, opts...)
59 return
60}
61
62// List your organization's batches.
63func (r *BatchService) List(ctx context.Context, query BatchListParams, opts ...option.RequestOption) (res *pagination.CursorPage[Batch], err error) {
64 var raw *http.Response
65 opts = append(r.Options[:], opts...)
66 opts = append([]option.RequestOption{option.WithResponseInto(&raw)}, opts...)
67 path := "batches"
68 cfg, err := requestconfig.NewRequestConfig(ctx, http.MethodGet, path, query, &res, opts...)
69 if err != nil {
70 return nil, err
71 }
72 err = cfg.Execute()
73 if err != nil {
74 return nil, err
75 }
76 res.SetPageConfig(cfg, raw)
77 return res, nil
78}
79
80// List your organization's batches.
81func (r *BatchService) ListAutoPaging(ctx context.Context, query BatchListParams, opts ...option.RequestOption) *pagination.CursorPageAutoPager[Batch] {
82 return pagination.NewCursorPageAutoPager(r.List(ctx, query, opts...))
83}
84
85// Cancels an in-progress batch. The batch will be in status `cancelling` for up to
86// 10 minutes, before changing to `cancelled`, where it will have partial results
87// (if any) available in the output file.
88func (r *BatchService) Cancel(ctx context.Context, batchID string, opts ...option.RequestOption) (res *Batch, err error) {
89 opts = append(r.Options[:], opts...)
90 if batchID == "" {
91 err = errors.New("missing required batch_id parameter")
92 return
93 }
94 path := fmt.Sprintf("batches/%s/cancel", batchID)
95 err = requestconfig.ExecuteNewRequest(ctx, http.MethodPost, path, nil, &res, opts...)
96 return
97}
98
99type Batch struct {
100 ID string `json:"id,required"`
101 // The time frame within which the batch should be processed.
102 CompletionWindow string `json:"completion_window,required"`
103 // The Unix timestamp (in seconds) for when the batch was created.
104 CreatedAt int64 `json:"created_at,required"`
105 // The OpenAI API endpoint used by the batch.
106 Endpoint string `json:"endpoint,required"`
107 // The ID of the input file for the batch.
108 InputFileID string `json:"input_file_id,required"`
109 // The object type, which is always `batch`.
110 Object constant.Batch `json:"object,required"`
111 // The current status of the batch.
112 //
113 // Any of "validating", "failed", "in_progress", "finalizing", "completed",
114 // "expired", "cancelling", "cancelled".
115 Status BatchStatus `json:"status,required"`
116 // The Unix timestamp (in seconds) for when the batch was cancelled.
117 CancelledAt int64 `json:"cancelled_at"`
118 // The Unix timestamp (in seconds) for when the batch started cancelling.
119 CancellingAt int64 `json:"cancelling_at"`
120 // The Unix timestamp (in seconds) for when the batch was completed.
121 CompletedAt int64 `json:"completed_at"`
122 // The ID of the file containing the outputs of requests with errors.
123 ErrorFileID string `json:"error_file_id"`
124 Errors BatchErrors `json:"errors"`
125 // The Unix timestamp (in seconds) for when the batch expired.
126 ExpiredAt int64 `json:"expired_at"`
127 // The Unix timestamp (in seconds) for when the batch will expire.
128 ExpiresAt int64 `json:"expires_at"`
129 // The Unix timestamp (in seconds) for when the batch failed.
130 FailedAt int64 `json:"failed_at"`
131 // The Unix timestamp (in seconds) for when the batch started finalizing.
132 FinalizingAt int64 `json:"finalizing_at"`
133 // The Unix timestamp (in seconds) for when the batch started processing.
134 InProgressAt int64 `json:"in_progress_at"`
135 // Set of 16 key-value pairs that can be attached to an object. This can be useful
136 // for storing additional information about the object in a structured format, and
137 // querying for objects via API or the dashboard.
138 //
139 // Keys are strings with a maximum length of 64 characters. Values are strings with
140 // a maximum length of 512 characters.
141 Metadata shared.Metadata `json:"metadata,nullable"`
142 // The ID of the file containing the outputs of successfully executed requests.
143 OutputFileID string `json:"output_file_id"`
144 // The request counts for different statuses within the batch.
145 RequestCounts BatchRequestCounts `json:"request_counts"`
146 // Metadata for the response, check the presence of optional fields with the
147 // [resp.Field.IsPresent] method.
148 JSON struct {
149 ID resp.Field
150 CompletionWindow resp.Field
151 CreatedAt resp.Field
152 Endpoint resp.Field
153 InputFileID resp.Field
154 Object resp.Field
155 Status resp.Field
156 CancelledAt resp.Field
157 CancellingAt resp.Field
158 CompletedAt resp.Field
159 ErrorFileID resp.Field
160 Errors resp.Field
161 ExpiredAt resp.Field
162 ExpiresAt resp.Field
163 FailedAt resp.Field
164 FinalizingAt resp.Field
165 InProgressAt resp.Field
166 Metadata resp.Field
167 OutputFileID resp.Field
168 RequestCounts resp.Field
169 ExtraFields map[string]resp.Field
170 raw string
171 } `json:"-"`
172}
173
174// Returns the unmodified JSON received from the API
175func (r Batch) RawJSON() string { return r.JSON.raw }
176func (r *Batch) UnmarshalJSON(data []byte) error {
177 return apijson.UnmarshalRoot(data, r)
178}
179
180// The current status of the batch.
181type BatchStatus string
182
183const (
184 BatchStatusValidating BatchStatus = "validating"
185 BatchStatusFailed BatchStatus = "failed"
186 BatchStatusInProgress BatchStatus = "in_progress"
187 BatchStatusFinalizing BatchStatus = "finalizing"
188 BatchStatusCompleted BatchStatus = "completed"
189 BatchStatusExpired BatchStatus = "expired"
190 BatchStatusCancelling BatchStatus = "cancelling"
191 BatchStatusCancelled BatchStatus = "cancelled"
192)
193
194type BatchErrors struct {
195 Data []BatchError `json:"data"`
196 // The object type, which is always `list`.
197 Object string `json:"object"`
198 // Metadata for the response, check the presence of optional fields with the
199 // [resp.Field.IsPresent] method.
200 JSON struct {
201 Data resp.Field
202 Object resp.Field
203 ExtraFields map[string]resp.Field
204 raw string
205 } `json:"-"`
206}
207
208// Returns the unmodified JSON received from the API
209func (r BatchErrors) RawJSON() string { return r.JSON.raw }
210func (r *BatchErrors) UnmarshalJSON(data []byte) error {
211 return apijson.UnmarshalRoot(data, r)
212}
213
214type BatchError struct {
215 // An error code identifying the error type.
216 Code string `json:"code"`
217 // The line number of the input file where the error occurred, if applicable.
218 Line int64 `json:"line,nullable"`
219 // A human-readable message providing more details about the error.
220 Message string `json:"message"`
221 // The name of the parameter that caused the error, if applicable.
222 Param string `json:"param,nullable"`
223 // Metadata for the response, check the presence of optional fields with the
224 // [resp.Field.IsPresent] method.
225 JSON struct {
226 Code resp.Field
227 Line resp.Field
228 Message resp.Field
229 Param resp.Field
230 ExtraFields map[string]resp.Field
231 raw string
232 } `json:"-"`
233}
234
235// Returns the unmodified JSON received from the API
236func (r BatchError) RawJSON() string { return r.JSON.raw }
237func (r *BatchError) UnmarshalJSON(data []byte) error {
238 return apijson.UnmarshalRoot(data, r)
239}
240
241// The request counts for different statuses within the batch.
242type BatchRequestCounts struct {
243 // Number of requests that have been completed successfully.
244 Completed int64 `json:"completed,required"`
245 // Number of requests that have failed.
246 Failed int64 `json:"failed,required"`
247 // Total number of requests in the batch.
248 Total int64 `json:"total,required"`
249 // Metadata for the response, check the presence of optional fields with the
250 // [resp.Field.IsPresent] method.
251 JSON struct {
252 Completed resp.Field
253 Failed resp.Field
254 Total resp.Field
255 ExtraFields map[string]resp.Field
256 raw string
257 } `json:"-"`
258}
259
260// Returns the unmodified JSON received from the API
261func (r BatchRequestCounts) RawJSON() string { return r.JSON.raw }
262func (r *BatchRequestCounts) UnmarshalJSON(data []byte) error {
263 return apijson.UnmarshalRoot(data, r)
264}
265
266type BatchNewParams struct {
267 // The time frame within which the batch should be processed. Currently only `24h`
268 // is supported.
269 //
270 // Any of "24h".
271 CompletionWindow BatchNewParamsCompletionWindow `json:"completion_window,omitzero,required"`
272 // The endpoint to be used for all requests in the batch. Currently
273 // `/v1/responses`, `/v1/chat/completions`, `/v1/embeddings`, and `/v1/completions`
274 // are supported. Note that `/v1/embeddings` batches are also restricted to a
275 // maximum of 50,000 embedding inputs across all requests in the batch.
276 //
277 // Any of "/v1/responses", "/v1/chat/completions", "/v1/embeddings",
278 // "/v1/completions".
279 Endpoint BatchNewParamsEndpoint `json:"endpoint,omitzero,required"`
280 // The ID of an uploaded file that contains requests for the new batch.
281 //
282 // See [upload file](https://platform.openai.com/docs/api-reference/files/create)
283 // for how to upload a file.
284 //
285 // Your input file must be formatted as a
286 // [JSONL file](https://platform.openai.com/docs/api-reference/batch/request-input),
287 // and must be uploaded with the purpose `batch`. The file can contain up to 50,000
288 // requests, and can be up to 200 MB in size.
289 InputFileID string `json:"input_file_id,required"`
290 // Set of 16 key-value pairs that can be attached to an object. This can be useful
291 // for storing additional information about the object in a structured format, and
292 // querying for objects via API or the dashboard.
293 //
294 // Keys are strings with a maximum length of 64 characters. Values are strings with
295 // a maximum length of 512 characters.
296 Metadata shared.MetadataParam `json:"metadata,omitzero"`
297 paramObj
298}
299
300// IsPresent returns true if the field's value is not omitted and not the JSON
301// "null". To check if this field is omitted, use [param.IsOmitted].
302func (f BatchNewParams) IsPresent() bool { return !param.IsOmitted(f) && !f.IsNull() }
303
304func (r BatchNewParams) MarshalJSON() (data []byte, err error) {
305 type shadow BatchNewParams
306 return param.MarshalObject(r, (*shadow)(&r))
307}
308
309// The time frame within which the batch should be processed. Currently only `24h`
310// is supported.
311type BatchNewParamsCompletionWindow string
312
313const (
314 BatchNewParamsCompletionWindow24h BatchNewParamsCompletionWindow = "24h"
315)
316
317// The endpoint to be used for all requests in the batch. Currently
318// `/v1/responses`, `/v1/chat/completions`, `/v1/embeddings`, and `/v1/completions`
319// are supported. Note that `/v1/embeddings` batches are also restricted to a
320// maximum of 50,000 embedding inputs across all requests in the batch.
321type BatchNewParamsEndpoint string
322
323const (
324 BatchNewParamsEndpointV1Responses BatchNewParamsEndpoint = "/v1/responses"
325 BatchNewParamsEndpointV1ChatCompletions BatchNewParamsEndpoint = "/v1/chat/completions"
326 BatchNewParamsEndpointV1Embeddings BatchNewParamsEndpoint = "/v1/embeddings"
327 BatchNewParamsEndpointV1Completions BatchNewParamsEndpoint = "/v1/completions"
328)
329
330type BatchListParams struct {
331 // A cursor for use in pagination. `after` is an object ID that defines your place
332 // in the list. For instance, if you make a list request and receive 100 objects,
333 // ending with obj_foo, your subsequent call can include after=obj_foo in order to
334 // fetch the next page of the list.
335 After param.Opt[string] `query:"after,omitzero" json:"-"`
336 // A limit on the number of objects to be returned. Limit can range between 1 and
337 // 100, and the default is 20.
338 Limit param.Opt[int64] `query:"limit,omitzero" json:"-"`
339 paramObj
340}
341
342// IsPresent returns true if the field's value is not omitted and not the JSON
343// "null". To check if this field is omitted, use [param.IsOmitted].
344func (f BatchListParams) IsPresent() bool { return !param.IsOmitted(f) && !f.IsNull() }
345
346// URLQuery serializes [BatchListParams]'s query parameters as `url.Values`.
347func (r BatchListParams) URLQuery() (v url.Values) {
348 return apiquery.MarshalWithSettings(r, apiquery.QuerySettings{
349 ArrayFormat: apiquery.ArrayQueryFormatBrackets,
350 NestedFormat: apiquery.NestedQueryFormatBrackets,
351 })
352}