1// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
2
3package openai
4
5import (
6 "context"
7 "errors"
8 "fmt"
9 "net/http"
10 "net/url"
11
12 "github.com/openai/openai-go/internal/apijson"
13 "github.com/openai/openai-go/internal/apiquery"
14 "github.com/openai/openai-go/internal/requestconfig"
15 "github.com/openai/openai-go/option"
16 "github.com/openai/openai-go/packages/pagination"
17 "github.com/openai/openai-go/packages/param"
18 "github.com/openai/openai-go/packages/respjson"
19 "github.com/openai/openai-go/shared"
20 "github.com/openai/openai-go/shared/constant"
21)
22
23// BatchService contains methods and other services that help with interacting with
24// the openai API.
25//
26// Note, unlike clients, this service does not read variables from the environment
27// automatically. You should not instantiate this service directly, and instead use
28// the [NewBatchService] method instead.
29type BatchService struct {
30 Options []option.RequestOption
31}
32
33// NewBatchService generates a new service that applies the given options to each
34// request. These options are applied after the parent client's options (if there
35// is one), and before any request-specific options.
36func NewBatchService(opts ...option.RequestOption) (r BatchService) {
37 r = BatchService{}
38 r.Options = opts
39 return
40}
41
42// Creates and executes a batch from an uploaded file of requests
43func (r *BatchService) New(ctx context.Context, body BatchNewParams, opts ...option.RequestOption) (res *Batch, err error) {
44 opts = append(r.Options[:], opts...)
45 path := "batches"
46 err = requestconfig.ExecuteNewRequest(ctx, http.MethodPost, path, body, &res, opts...)
47 return
48}
49
50// Retrieves a batch.
51func (r *BatchService) Get(ctx context.Context, batchID string, opts ...option.RequestOption) (res *Batch, err error) {
52 opts = append(r.Options[:], opts...)
53 if batchID == "" {
54 err = errors.New("missing required batch_id parameter")
55 return
56 }
57 path := fmt.Sprintf("batches/%s", batchID)
58 err = requestconfig.ExecuteNewRequest(ctx, http.MethodGet, path, nil, &res, opts...)
59 return
60}
61
62// List your organization's batches.
63func (r *BatchService) List(ctx context.Context, query BatchListParams, opts ...option.RequestOption) (res *pagination.CursorPage[Batch], err error) {
64 var raw *http.Response
65 opts = append(r.Options[:], opts...)
66 opts = append([]option.RequestOption{option.WithResponseInto(&raw)}, opts...)
67 path := "batches"
68 cfg, err := requestconfig.NewRequestConfig(ctx, http.MethodGet, path, query, &res, opts...)
69 if err != nil {
70 return nil, err
71 }
72 err = cfg.Execute()
73 if err != nil {
74 return nil, err
75 }
76 res.SetPageConfig(cfg, raw)
77 return res, nil
78}
79
80// List your organization's batches.
81func (r *BatchService) ListAutoPaging(ctx context.Context, query BatchListParams, opts ...option.RequestOption) *pagination.CursorPageAutoPager[Batch] {
82 return pagination.NewCursorPageAutoPager(r.List(ctx, query, opts...))
83}
84
85// Cancels an in-progress batch. The batch will be in status `cancelling` for up to
86// 10 minutes, before changing to `cancelled`, where it will have partial results
87// (if any) available in the output file.
88func (r *BatchService) Cancel(ctx context.Context, batchID string, opts ...option.RequestOption) (res *Batch, err error) {
89 opts = append(r.Options[:], opts...)
90 if batchID == "" {
91 err = errors.New("missing required batch_id parameter")
92 return
93 }
94 path := fmt.Sprintf("batches/%s/cancel", batchID)
95 err = requestconfig.ExecuteNewRequest(ctx, http.MethodPost, path, nil, &res, opts...)
96 return
97}
98
99type Batch struct {
100 ID string `json:"id,required"`
101 // The time frame within which the batch should be processed.
102 CompletionWindow string `json:"completion_window,required"`
103 // The Unix timestamp (in seconds) for when the batch was created.
104 CreatedAt int64 `json:"created_at,required"`
105 // The OpenAI API endpoint used by the batch.
106 Endpoint string `json:"endpoint,required"`
107 // The ID of the input file for the batch.
108 InputFileID string `json:"input_file_id,required"`
109 // The object type, which is always `batch`.
110 Object constant.Batch `json:"object,required"`
111 // The current status of the batch.
112 //
113 // Any of "validating", "failed", "in_progress", "finalizing", "completed",
114 // "expired", "cancelling", "cancelled".
115 Status BatchStatus `json:"status,required"`
116 // The Unix timestamp (in seconds) for when the batch was cancelled.
117 CancelledAt int64 `json:"cancelled_at"`
118 // The Unix timestamp (in seconds) for when the batch started cancelling.
119 CancellingAt int64 `json:"cancelling_at"`
120 // The Unix timestamp (in seconds) for when the batch was completed.
121 CompletedAt int64 `json:"completed_at"`
122 // The ID of the file containing the outputs of requests with errors.
123 ErrorFileID string `json:"error_file_id"`
124 Errors BatchErrors `json:"errors"`
125 // The Unix timestamp (in seconds) for when the batch expired.
126 ExpiredAt int64 `json:"expired_at"`
127 // The Unix timestamp (in seconds) for when the batch will expire.
128 ExpiresAt int64 `json:"expires_at"`
129 // The Unix timestamp (in seconds) for when the batch failed.
130 FailedAt int64 `json:"failed_at"`
131 // The Unix timestamp (in seconds) for when the batch started finalizing.
132 FinalizingAt int64 `json:"finalizing_at"`
133 // The Unix timestamp (in seconds) for when the batch started processing.
134 InProgressAt int64 `json:"in_progress_at"`
135 // Set of 16 key-value pairs that can be attached to an object. This can be useful
136 // for storing additional information about the object in a structured format, and
137 // querying for objects via API or the dashboard.
138 //
139 // Keys are strings with a maximum length of 64 characters. Values are strings with
140 // a maximum length of 512 characters.
141 Metadata shared.Metadata `json:"metadata,nullable"`
142 // The ID of the file containing the outputs of successfully executed requests.
143 OutputFileID string `json:"output_file_id"`
144 // The request counts for different statuses within the batch.
145 RequestCounts BatchRequestCounts `json:"request_counts"`
146 // JSON contains metadata for fields, check presence with [respjson.Field.Valid].
147 JSON struct {
148 ID respjson.Field
149 CompletionWindow respjson.Field
150 CreatedAt respjson.Field
151 Endpoint respjson.Field
152 InputFileID respjson.Field
153 Object respjson.Field
154 Status respjson.Field
155 CancelledAt respjson.Field
156 CancellingAt respjson.Field
157 CompletedAt respjson.Field
158 ErrorFileID respjson.Field
159 Errors respjson.Field
160 ExpiredAt respjson.Field
161 ExpiresAt respjson.Field
162 FailedAt respjson.Field
163 FinalizingAt respjson.Field
164 InProgressAt respjson.Field
165 Metadata respjson.Field
166 OutputFileID respjson.Field
167 RequestCounts respjson.Field
168 ExtraFields map[string]respjson.Field
169 raw string
170 } `json:"-"`
171}
172
173// Returns the unmodified JSON received from the API
174func (r Batch) RawJSON() string { return r.JSON.raw }
175func (r *Batch) UnmarshalJSON(data []byte) error {
176 return apijson.UnmarshalRoot(data, r)
177}
178
179// The current status of the batch.
180type BatchStatus string
181
182const (
183 BatchStatusValidating BatchStatus = "validating"
184 BatchStatusFailed BatchStatus = "failed"
185 BatchStatusInProgress BatchStatus = "in_progress"
186 BatchStatusFinalizing BatchStatus = "finalizing"
187 BatchStatusCompleted BatchStatus = "completed"
188 BatchStatusExpired BatchStatus = "expired"
189 BatchStatusCancelling BatchStatus = "cancelling"
190 BatchStatusCancelled BatchStatus = "cancelled"
191)
192
193type BatchErrors struct {
194 Data []BatchError `json:"data"`
195 // The object type, which is always `list`.
196 Object string `json:"object"`
197 // JSON contains metadata for fields, check presence with [respjson.Field.Valid].
198 JSON struct {
199 Data respjson.Field
200 Object respjson.Field
201 ExtraFields map[string]respjson.Field
202 raw string
203 } `json:"-"`
204}
205
206// Returns the unmodified JSON received from the API
207func (r BatchErrors) RawJSON() string { return r.JSON.raw }
208func (r *BatchErrors) UnmarshalJSON(data []byte) error {
209 return apijson.UnmarshalRoot(data, r)
210}
211
212type BatchError struct {
213 // An error code identifying the error type.
214 Code string `json:"code"`
215 // The line number of the input file where the error occurred, if applicable.
216 Line int64 `json:"line,nullable"`
217 // A human-readable message providing more details about the error.
218 Message string `json:"message"`
219 // The name of the parameter that caused the error, if applicable.
220 Param string `json:"param,nullable"`
221 // JSON contains metadata for fields, check presence with [respjson.Field.Valid].
222 JSON struct {
223 Code respjson.Field
224 Line respjson.Field
225 Message respjson.Field
226 Param respjson.Field
227 ExtraFields map[string]respjson.Field
228 raw string
229 } `json:"-"`
230}
231
232// Returns the unmodified JSON received from the API
233func (r BatchError) RawJSON() string { return r.JSON.raw }
234func (r *BatchError) UnmarshalJSON(data []byte) error {
235 return apijson.UnmarshalRoot(data, r)
236}
237
238// The request counts for different statuses within the batch.
239type BatchRequestCounts struct {
240 // Number of requests that have been completed successfully.
241 Completed int64 `json:"completed,required"`
242 // Number of requests that have failed.
243 Failed int64 `json:"failed,required"`
244 // Total number of requests in the batch.
245 Total int64 `json:"total,required"`
246 // JSON contains metadata for fields, check presence with [respjson.Field.Valid].
247 JSON struct {
248 Completed respjson.Field
249 Failed respjson.Field
250 Total respjson.Field
251 ExtraFields map[string]respjson.Field
252 raw string
253 } `json:"-"`
254}
255
256// Returns the unmodified JSON received from the API
257func (r BatchRequestCounts) RawJSON() string { return r.JSON.raw }
258func (r *BatchRequestCounts) UnmarshalJSON(data []byte) error {
259 return apijson.UnmarshalRoot(data, r)
260}
261
262type BatchNewParams struct {
263 // The time frame within which the batch should be processed. Currently only `24h`
264 // is supported.
265 //
266 // Any of "24h".
267 CompletionWindow BatchNewParamsCompletionWindow `json:"completion_window,omitzero,required"`
268 // The endpoint to be used for all requests in the batch. Currently
269 // `/v1/responses`, `/v1/chat/completions`, `/v1/embeddings`, and `/v1/completions`
270 // are supported. Note that `/v1/embeddings` batches are also restricted to a
271 // maximum of 50,000 embedding inputs across all requests in the batch.
272 //
273 // Any of "/v1/responses", "/v1/chat/completions", "/v1/embeddings",
274 // "/v1/completions".
275 Endpoint BatchNewParamsEndpoint `json:"endpoint,omitzero,required"`
276 // The ID of an uploaded file that contains requests for the new batch.
277 //
278 // See [upload file](https://platform.openai.com/docs/api-reference/files/create)
279 // for how to upload a file.
280 //
281 // Your input file must be formatted as a
282 // [JSONL file](https://platform.openai.com/docs/api-reference/batch/request-input),
283 // and must be uploaded with the purpose `batch`. The file can contain up to 50,000
284 // requests, and can be up to 200 MB in size.
285 InputFileID string `json:"input_file_id,required"`
286 // Set of 16 key-value pairs that can be attached to an object. This can be useful
287 // for storing additional information about the object in a structured format, and
288 // querying for objects via API or the dashboard.
289 //
290 // Keys are strings with a maximum length of 64 characters. Values are strings with
291 // a maximum length of 512 characters.
292 Metadata shared.Metadata `json:"metadata,omitzero"`
293 paramObj
294}
295
296func (r BatchNewParams) MarshalJSON() (data []byte, err error) {
297 type shadow BatchNewParams
298 return param.MarshalObject(r, (*shadow)(&r))
299}
300func (r *BatchNewParams) UnmarshalJSON(data []byte) error {
301 return apijson.UnmarshalRoot(data, r)
302}
303
304// The time frame within which the batch should be processed. Currently only `24h`
305// is supported.
306type BatchNewParamsCompletionWindow string
307
308const (
309 BatchNewParamsCompletionWindow24h BatchNewParamsCompletionWindow = "24h"
310)
311
312// The endpoint to be used for all requests in the batch. Currently
313// `/v1/responses`, `/v1/chat/completions`, `/v1/embeddings`, and `/v1/completions`
314// are supported. Note that `/v1/embeddings` batches are also restricted to a
315// maximum of 50,000 embedding inputs across all requests in the batch.
316type BatchNewParamsEndpoint string
317
318const (
319 BatchNewParamsEndpointV1Responses BatchNewParamsEndpoint = "/v1/responses"
320 BatchNewParamsEndpointV1ChatCompletions BatchNewParamsEndpoint = "/v1/chat/completions"
321 BatchNewParamsEndpointV1Embeddings BatchNewParamsEndpoint = "/v1/embeddings"
322 BatchNewParamsEndpointV1Completions BatchNewParamsEndpoint = "/v1/completions"
323)
324
325type BatchListParams struct {
326 // A cursor for use in pagination. `after` is an object ID that defines your place
327 // in the list. For instance, if you make a list request and receive 100 objects,
328 // ending with obj_foo, your subsequent call can include after=obj_foo in order to
329 // fetch the next page of the list.
330 After param.Opt[string] `query:"after,omitzero" json:"-"`
331 // A limit on the number of objects to be returned. Limit can range between 1 and
332 // 100, and the default is 20.
333 Limit param.Opt[int64] `query:"limit,omitzero" json:"-"`
334 paramObj
335}
336
337// URLQuery serializes [BatchListParams]'s query parameters as `url.Values`.
338func (r BatchListParams) URLQuery() (v url.Values, err error) {
339 return apiquery.MarshalWithSettings(r, apiquery.QuerySettings{
340 ArrayFormat: apiquery.ArrayQueryFormatBrackets,
341 NestedFormat: apiquery.NestedQueryFormatBrackets,
342 })
343}