1// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
2
3package openai
4
5import (
6 "context"
7 "encoding/json"
8 "errors"
9 "fmt"
10 "net/http"
11 "net/url"
12
13 "github.com/openai/openai-go/internal/apijson"
14 "github.com/openai/openai-go/internal/apiquery"
15 "github.com/openai/openai-go/internal/requestconfig"
16 "github.com/openai/openai-go/option"
17 "github.com/openai/openai-go/packages/pagination"
18 "github.com/openai/openai-go/packages/param"
19 "github.com/openai/openai-go/packages/respjson"
20 "github.com/openai/openai-go/shared"
21 "github.com/openai/openai-go/shared/constant"
22)
23
24// FineTuningJobService contains methods and other services that help with
25// interacting with the openai API.
26//
27// Note, unlike clients, this service does not read variables from the environment
28// automatically. You should not instantiate this service directly, and instead use
29// the [NewFineTuningJobService] method instead.
30type FineTuningJobService struct {
31 Options []option.RequestOption
32 Checkpoints FineTuningJobCheckpointService
33}
34
35// NewFineTuningJobService generates a new service that applies the given options
36// to each request. These options are applied after the parent client's options (if
37// there is one), and before any request-specific options.
38func NewFineTuningJobService(opts ...option.RequestOption) (r FineTuningJobService) {
39 r = FineTuningJobService{}
40 r.Options = opts
41 r.Checkpoints = NewFineTuningJobCheckpointService(opts...)
42 return
43}
44
45// Creates a fine-tuning job which begins the process of creating a new model from
46// a given dataset.
47//
48// Response includes details of the enqueued job including job status and the name
49// of the fine-tuned models once complete.
50//
51// [Learn more about fine-tuning](https://platform.openai.com/docs/guides/model-optimization)
52func (r *FineTuningJobService) New(ctx context.Context, body FineTuningJobNewParams, opts ...option.RequestOption) (res *FineTuningJob, err error) {
53 opts = append(r.Options[:], opts...)
54 path := "fine_tuning/jobs"
55 err = requestconfig.ExecuteNewRequest(ctx, http.MethodPost, path, body, &res, opts...)
56 return
57}
58
59// Get info about a fine-tuning job.
60//
61// [Learn more about fine-tuning](https://platform.openai.com/docs/guides/model-optimization)
62func (r *FineTuningJobService) Get(ctx context.Context, fineTuningJobID string, opts ...option.RequestOption) (res *FineTuningJob, err error) {
63 opts = append(r.Options[:], opts...)
64 if fineTuningJobID == "" {
65 err = errors.New("missing required fine_tuning_job_id parameter")
66 return
67 }
68 path := fmt.Sprintf("fine_tuning/jobs/%s", fineTuningJobID)
69 err = requestconfig.ExecuteNewRequest(ctx, http.MethodGet, path, nil, &res, opts...)
70 return
71}
72
73// List your organization's fine-tuning jobs
74func (r *FineTuningJobService) List(ctx context.Context, query FineTuningJobListParams, opts ...option.RequestOption) (res *pagination.CursorPage[FineTuningJob], err error) {
75 var raw *http.Response
76 opts = append(r.Options[:], opts...)
77 opts = append([]option.RequestOption{option.WithResponseInto(&raw)}, opts...)
78 path := "fine_tuning/jobs"
79 cfg, err := requestconfig.NewRequestConfig(ctx, http.MethodGet, path, query, &res, opts...)
80 if err != nil {
81 return nil, err
82 }
83 err = cfg.Execute()
84 if err != nil {
85 return nil, err
86 }
87 res.SetPageConfig(cfg, raw)
88 return res, nil
89}
90
91// List your organization's fine-tuning jobs
92func (r *FineTuningJobService) ListAutoPaging(ctx context.Context, query FineTuningJobListParams, opts ...option.RequestOption) *pagination.CursorPageAutoPager[FineTuningJob] {
93 return pagination.NewCursorPageAutoPager(r.List(ctx, query, opts...))
94}
95
96// Immediately cancel a fine-tune job.
97func (r *FineTuningJobService) Cancel(ctx context.Context, fineTuningJobID string, opts ...option.RequestOption) (res *FineTuningJob, err error) {
98 opts = append(r.Options[:], opts...)
99 if fineTuningJobID == "" {
100 err = errors.New("missing required fine_tuning_job_id parameter")
101 return
102 }
103 path := fmt.Sprintf("fine_tuning/jobs/%s/cancel", fineTuningJobID)
104 err = requestconfig.ExecuteNewRequest(ctx, http.MethodPost, path, nil, &res, opts...)
105 return
106}
107
108// Get status updates for a fine-tuning job.
109func (r *FineTuningJobService) ListEvents(ctx context.Context, fineTuningJobID string, query FineTuningJobListEventsParams, opts ...option.RequestOption) (res *pagination.CursorPage[FineTuningJobEvent], err error) {
110 var raw *http.Response
111 opts = append(r.Options[:], opts...)
112 opts = append([]option.RequestOption{option.WithResponseInto(&raw)}, opts...)
113 if fineTuningJobID == "" {
114 err = errors.New("missing required fine_tuning_job_id parameter")
115 return
116 }
117 path := fmt.Sprintf("fine_tuning/jobs/%s/events", fineTuningJobID)
118 cfg, err := requestconfig.NewRequestConfig(ctx, http.MethodGet, path, query, &res, opts...)
119 if err != nil {
120 return nil, err
121 }
122 err = cfg.Execute()
123 if err != nil {
124 return nil, err
125 }
126 res.SetPageConfig(cfg, raw)
127 return res, nil
128}
129
130// Get status updates for a fine-tuning job.
131func (r *FineTuningJobService) ListEventsAutoPaging(ctx context.Context, fineTuningJobID string, query FineTuningJobListEventsParams, opts ...option.RequestOption) *pagination.CursorPageAutoPager[FineTuningJobEvent] {
132 return pagination.NewCursorPageAutoPager(r.ListEvents(ctx, fineTuningJobID, query, opts...))
133}
134
135// Pause a fine-tune job.
136func (r *FineTuningJobService) Pause(ctx context.Context, fineTuningJobID string, opts ...option.RequestOption) (res *FineTuningJob, err error) {
137 opts = append(r.Options[:], opts...)
138 if fineTuningJobID == "" {
139 err = errors.New("missing required fine_tuning_job_id parameter")
140 return
141 }
142 path := fmt.Sprintf("fine_tuning/jobs/%s/pause", fineTuningJobID)
143 err = requestconfig.ExecuteNewRequest(ctx, http.MethodPost, path, nil, &res, opts...)
144 return
145}
146
147// Resume a fine-tune job.
148func (r *FineTuningJobService) Resume(ctx context.Context, fineTuningJobID string, opts ...option.RequestOption) (res *FineTuningJob, err error) {
149 opts = append(r.Options[:], opts...)
150 if fineTuningJobID == "" {
151 err = errors.New("missing required fine_tuning_job_id parameter")
152 return
153 }
154 path := fmt.Sprintf("fine_tuning/jobs/%s/resume", fineTuningJobID)
155 err = requestconfig.ExecuteNewRequest(ctx, http.MethodPost, path, nil, &res, opts...)
156 return
157}
158
159// The `fine_tuning.job` object represents a fine-tuning job that has been created
160// through the API.
161type FineTuningJob struct {
162 // The object identifier, which can be referenced in the API endpoints.
163 ID string `json:"id,required"`
164 // The Unix timestamp (in seconds) for when the fine-tuning job was created.
165 CreatedAt int64 `json:"created_at,required"`
166 // For fine-tuning jobs that have `failed`, this will contain more information on
167 // the cause of the failure.
168 Error FineTuningJobError `json:"error,required"`
169 // The name of the fine-tuned model that is being created. The value will be null
170 // if the fine-tuning job is still running.
171 FineTunedModel string `json:"fine_tuned_model,required"`
172 // The Unix timestamp (in seconds) for when the fine-tuning job was finished. The
173 // value will be null if the fine-tuning job is still running.
174 FinishedAt int64 `json:"finished_at,required"`
175 // The hyperparameters used for the fine-tuning job. This value will only be
176 // returned when running `supervised` jobs.
177 Hyperparameters FineTuningJobHyperparameters `json:"hyperparameters,required"`
178 // The base model that is being fine-tuned.
179 Model string `json:"model,required"`
180 // The object type, which is always "fine_tuning.job".
181 Object constant.FineTuningJob `json:"object,required"`
182 // The organization that owns the fine-tuning job.
183 OrganizationID string `json:"organization_id,required"`
184 // The compiled results file ID(s) for the fine-tuning job. You can retrieve the
185 // results with the
186 // [Files API](https://platform.openai.com/docs/api-reference/files/retrieve-contents).
187 ResultFiles []string `json:"result_files,required"`
188 // The seed used for the fine-tuning job.
189 Seed int64 `json:"seed,required"`
190 // The current status of the fine-tuning job, which can be either
191 // `validating_files`, `queued`, `running`, `succeeded`, `failed`, or `cancelled`.
192 //
193 // Any of "validating_files", "queued", "running", "succeeded", "failed",
194 // "cancelled".
195 Status FineTuningJobStatus `json:"status,required"`
196 // The total number of billable tokens processed by this fine-tuning job. The value
197 // will be null if the fine-tuning job is still running.
198 TrainedTokens int64 `json:"trained_tokens,required"`
199 // The file ID used for training. You can retrieve the training data with the
200 // [Files API](https://platform.openai.com/docs/api-reference/files/retrieve-contents).
201 TrainingFile string `json:"training_file,required"`
202 // The file ID used for validation. You can retrieve the validation results with
203 // the
204 // [Files API](https://platform.openai.com/docs/api-reference/files/retrieve-contents).
205 ValidationFile string `json:"validation_file,required"`
206 // The Unix timestamp (in seconds) for when the fine-tuning job is estimated to
207 // finish. The value will be null if the fine-tuning job is not running.
208 EstimatedFinish int64 `json:"estimated_finish,nullable"`
209 // A list of integrations to enable for this fine-tuning job.
210 Integrations []FineTuningJobWandbIntegrationObject `json:"integrations,nullable"`
211 // Set of 16 key-value pairs that can be attached to an object. This can be useful
212 // for storing additional information about the object in a structured format, and
213 // querying for objects via API or the dashboard.
214 //
215 // Keys are strings with a maximum length of 64 characters. Values are strings with
216 // a maximum length of 512 characters.
217 Metadata shared.Metadata `json:"metadata,nullable"`
218 // The method used for fine-tuning.
219 Method FineTuningJobMethod `json:"method"`
220 // JSON contains metadata for fields, check presence with [respjson.Field.Valid].
221 JSON struct {
222 ID respjson.Field
223 CreatedAt respjson.Field
224 Error respjson.Field
225 FineTunedModel respjson.Field
226 FinishedAt respjson.Field
227 Hyperparameters respjson.Field
228 Model respjson.Field
229 Object respjson.Field
230 OrganizationID respjson.Field
231 ResultFiles respjson.Field
232 Seed respjson.Field
233 Status respjson.Field
234 TrainedTokens respjson.Field
235 TrainingFile respjson.Field
236 ValidationFile respjson.Field
237 EstimatedFinish respjson.Field
238 Integrations respjson.Field
239 Metadata respjson.Field
240 Method respjson.Field
241 ExtraFields map[string]respjson.Field
242 raw string
243 } `json:"-"`
244}
245
246// Returns the unmodified JSON received from the API
247func (r FineTuningJob) RawJSON() string { return r.JSON.raw }
248func (r *FineTuningJob) UnmarshalJSON(data []byte) error {
249 return apijson.UnmarshalRoot(data, r)
250}
251
252// For fine-tuning jobs that have `failed`, this will contain more information on
253// the cause of the failure.
254type FineTuningJobError struct {
255 // A machine-readable error code.
256 Code string `json:"code,required"`
257 // A human-readable error message.
258 Message string `json:"message,required"`
259 // The parameter that was invalid, usually `training_file` or `validation_file`.
260 // This field will be null if the failure was not parameter-specific.
261 Param string `json:"param,required"`
262 // JSON contains metadata for fields, check presence with [respjson.Field.Valid].
263 JSON struct {
264 Code respjson.Field
265 Message respjson.Field
266 Param respjson.Field
267 ExtraFields map[string]respjson.Field
268 raw string
269 } `json:"-"`
270}
271
272// Returns the unmodified JSON received from the API
273func (r FineTuningJobError) RawJSON() string { return r.JSON.raw }
274func (r *FineTuningJobError) UnmarshalJSON(data []byte) error {
275 return apijson.UnmarshalRoot(data, r)
276}
277
278// The hyperparameters used for the fine-tuning job. This value will only be
279// returned when running `supervised` jobs.
280type FineTuningJobHyperparameters struct {
281 // Number of examples in each batch. A larger batch size means that model
282 // parameters are updated less frequently, but with lower variance.
283 BatchSize FineTuningJobHyperparametersBatchSizeUnion `json:"batch_size,nullable"`
284 // Scaling factor for the learning rate. A smaller learning rate may be useful to
285 // avoid overfitting.
286 LearningRateMultiplier FineTuningJobHyperparametersLearningRateMultiplierUnion `json:"learning_rate_multiplier"`
287 // The number of epochs to train the model for. An epoch refers to one full cycle
288 // through the training dataset.
289 NEpochs FineTuningJobHyperparametersNEpochsUnion `json:"n_epochs"`
290 // JSON contains metadata for fields, check presence with [respjson.Field.Valid].
291 JSON struct {
292 BatchSize respjson.Field
293 LearningRateMultiplier respjson.Field
294 NEpochs respjson.Field
295 ExtraFields map[string]respjson.Field
296 raw string
297 } `json:"-"`
298}
299
300// Returns the unmodified JSON received from the API
301func (r FineTuningJobHyperparameters) RawJSON() string { return r.JSON.raw }
302func (r *FineTuningJobHyperparameters) UnmarshalJSON(data []byte) error {
303 return apijson.UnmarshalRoot(data, r)
304}
305
306// FineTuningJobHyperparametersBatchSizeUnion contains all possible properties and
307// values from [constant.Auto], [int64].
308//
309// Use the methods beginning with 'As' to cast the union to one of its variants.
310//
311// If the underlying value is not a json object, one of the following properties
312// will be valid: OfAuto OfInt]
313type FineTuningJobHyperparametersBatchSizeUnion struct {
314 // This field will be present if the value is a [constant.Auto] instead of an
315 // object.
316 OfAuto constant.Auto `json:",inline"`
317 // This field will be present if the value is a [int64] instead of an object.
318 OfInt int64 `json:",inline"`
319 JSON struct {
320 OfAuto respjson.Field
321 OfInt respjson.Field
322 raw string
323 } `json:"-"`
324}
325
326func (u FineTuningJobHyperparametersBatchSizeUnion) AsAuto() (v constant.Auto) {
327 apijson.UnmarshalRoot(json.RawMessage(u.JSON.raw), &v)
328 return
329}
330
331func (u FineTuningJobHyperparametersBatchSizeUnion) AsInt() (v int64) {
332 apijson.UnmarshalRoot(json.RawMessage(u.JSON.raw), &v)
333 return
334}
335
336// Returns the unmodified JSON received from the API
337func (u FineTuningJobHyperparametersBatchSizeUnion) RawJSON() string { return u.JSON.raw }
338
339func (r *FineTuningJobHyperparametersBatchSizeUnion) UnmarshalJSON(data []byte) error {
340 return apijson.UnmarshalRoot(data, r)
341}
342
343// FineTuningJobHyperparametersLearningRateMultiplierUnion contains all possible
344// properties and values from [constant.Auto], [float64].
345//
346// Use the methods beginning with 'As' to cast the union to one of its variants.
347//
348// If the underlying value is not a json object, one of the following properties
349// will be valid: OfAuto OfFloat]
350type FineTuningJobHyperparametersLearningRateMultiplierUnion struct {
351 // This field will be present if the value is a [constant.Auto] instead of an
352 // object.
353 OfAuto constant.Auto `json:",inline"`
354 // This field will be present if the value is a [float64] instead of an object.
355 OfFloat float64 `json:",inline"`
356 JSON struct {
357 OfAuto respjson.Field
358 OfFloat respjson.Field
359 raw string
360 } `json:"-"`
361}
362
363func (u FineTuningJobHyperparametersLearningRateMultiplierUnion) AsAuto() (v constant.Auto) {
364 apijson.UnmarshalRoot(json.RawMessage(u.JSON.raw), &v)
365 return
366}
367
368func (u FineTuningJobHyperparametersLearningRateMultiplierUnion) AsFloat() (v float64) {
369 apijson.UnmarshalRoot(json.RawMessage(u.JSON.raw), &v)
370 return
371}
372
373// Returns the unmodified JSON received from the API
374func (u FineTuningJobHyperparametersLearningRateMultiplierUnion) RawJSON() string { return u.JSON.raw }
375
376func (r *FineTuningJobHyperparametersLearningRateMultiplierUnion) UnmarshalJSON(data []byte) error {
377 return apijson.UnmarshalRoot(data, r)
378}
379
380// FineTuningJobHyperparametersNEpochsUnion contains all possible properties and
381// values from [constant.Auto], [int64].
382//
383// Use the methods beginning with 'As' to cast the union to one of its variants.
384//
385// If the underlying value is not a json object, one of the following properties
386// will be valid: OfAuto OfInt]
387type FineTuningJobHyperparametersNEpochsUnion struct {
388 // This field will be present if the value is a [constant.Auto] instead of an
389 // object.
390 OfAuto constant.Auto `json:",inline"`
391 // This field will be present if the value is a [int64] instead of an object.
392 OfInt int64 `json:",inline"`
393 JSON struct {
394 OfAuto respjson.Field
395 OfInt respjson.Field
396 raw string
397 } `json:"-"`
398}
399
400func (u FineTuningJobHyperparametersNEpochsUnion) AsAuto() (v constant.Auto) {
401 apijson.UnmarshalRoot(json.RawMessage(u.JSON.raw), &v)
402 return
403}
404
405func (u FineTuningJobHyperparametersNEpochsUnion) AsInt() (v int64) {
406 apijson.UnmarshalRoot(json.RawMessage(u.JSON.raw), &v)
407 return
408}
409
410// Returns the unmodified JSON received from the API
411func (u FineTuningJobHyperparametersNEpochsUnion) RawJSON() string { return u.JSON.raw }
412
413func (r *FineTuningJobHyperparametersNEpochsUnion) UnmarshalJSON(data []byte) error {
414 return apijson.UnmarshalRoot(data, r)
415}
416
417// The current status of the fine-tuning job, which can be either
418// `validating_files`, `queued`, `running`, `succeeded`, `failed`, or `cancelled`.
419type FineTuningJobStatus string
420
421const (
422 FineTuningJobStatusValidatingFiles FineTuningJobStatus = "validating_files"
423 FineTuningJobStatusQueued FineTuningJobStatus = "queued"
424 FineTuningJobStatusRunning FineTuningJobStatus = "running"
425 FineTuningJobStatusSucceeded FineTuningJobStatus = "succeeded"
426 FineTuningJobStatusFailed FineTuningJobStatus = "failed"
427 FineTuningJobStatusCancelled FineTuningJobStatus = "cancelled"
428)
429
430// The method used for fine-tuning.
431type FineTuningJobMethod struct {
432 // The type of method. Is either `supervised`, `dpo`, or `reinforcement`.
433 //
434 // Any of "supervised", "dpo", "reinforcement".
435 Type string `json:"type,required"`
436 // Configuration for the DPO fine-tuning method.
437 Dpo DpoMethod `json:"dpo"`
438 // Configuration for the reinforcement fine-tuning method.
439 Reinforcement ReinforcementMethod `json:"reinforcement"`
440 // Configuration for the supervised fine-tuning method.
441 Supervised SupervisedMethod `json:"supervised"`
442 // JSON contains metadata for fields, check presence with [respjson.Field.Valid].
443 JSON struct {
444 Type respjson.Field
445 Dpo respjson.Field
446 Reinforcement respjson.Field
447 Supervised respjson.Field
448 ExtraFields map[string]respjson.Field
449 raw string
450 } `json:"-"`
451}
452
453// Returns the unmodified JSON received from the API
454func (r FineTuningJobMethod) RawJSON() string { return r.JSON.raw }
455func (r *FineTuningJobMethod) UnmarshalJSON(data []byte) error {
456 return apijson.UnmarshalRoot(data, r)
457}
458
459// Fine-tuning job event object
460type FineTuningJobEvent struct {
461 // The object identifier.
462 ID string `json:"id,required"`
463 // The Unix timestamp (in seconds) for when the fine-tuning job was created.
464 CreatedAt int64 `json:"created_at,required"`
465 // The log level of the event.
466 //
467 // Any of "info", "warn", "error".
468 Level FineTuningJobEventLevel `json:"level,required"`
469 // The message of the event.
470 Message string `json:"message,required"`
471 // The object type, which is always "fine_tuning.job.event".
472 Object constant.FineTuningJobEvent `json:"object,required"`
473 // The data associated with the event.
474 Data any `json:"data"`
475 // The type of event.
476 //
477 // Any of "message", "metrics".
478 Type FineTuningJobEventType `json:"type"`
479 // JSON contains metadata for fields, check presence with [respjson.Field.Valid].
480 JSON struct {
481 ID respjson.Field
482 CreatedAt respjson.Field
483 Level respjson.Field
484 Message respjson.Field
485 Object respjson.Field
486 Data respjson.Field
487 Type respjson.Field
488 ExtraFields map[string]respjson.Field
489 raw string
490 } `json:"-"`
491}
492
493// Returns the unmodified JSON received from the API
494func (r FineTuningJobEvent) RawJSON() string { return r.JSON.raw }
495func (r *FineTuningJobEvent) UnmarshalJSON(data []byte) error {
496 return apijson.UnmarshalRoot(data, r)
497}
498
499// The log level of the event.
500type FineTuningJobEventLevel string
501
502const (
503 FineTuningJobEventLevelInfo FineTuningJobEventLevel = "info"
504 FineTuningJobEventLevelWarn FineTuningJobEventLevel = "warn"
505 FineTuningJobEventLevelError FineTuningJobEventLevel = "error"
506)
507
508// The type of event.
509type FineTuningJobEventType string
510
511const (
512 FineTuningJobEventTypeMessage FineTuningJobEventType = "message"
513 FineTuningJobEventTypeMetrics FineTuningJobEventType = "metrics"
514)
515
516// The settings for your integration with Weights and Biases. This payload
517// specifies the project that metrics will be sent to. Optionally, you can set an
518// explicit display name for your run, add tags to your run, and set a default
519// entity (team, username, etc) to be associated with your run.
520type FineTuningJobWandbIntegration struct {
521 // The name of the project that the new run will be created under.
522 Project string `json:"project,required"`
523 // The entity to use for the run. This allows you to set the team or username of
524 // the WandB user that you would like associated with the run. If not set, the
525 // default entity for the registered WandB API key is used.
526 Entity string `json:"entity,nullable"`
527 // A display name to set for the run. If not set, we will use the Job ID as the
528 // name.
529 Name string `json:"name,nullable"`
530 // A list of tags to be attached to the newly created run. These tags are passed
531 // through directly to WandB. Some default tags are generated by OpenAI:
532 // "openai/finetune", "openai/{base-model}", "openai/{ftjob-abcdef}".
533 Tags []string `json:"tags"`
534 // JSON contains metadata for fields, check presence with [respjson.Field.Valid].
535 JSON struct {
536 Project respjson.Field
537 Entity respjson.Field
538 Name respjson.Field
539 Tags respjson.Field
540 ExtraFields map[string]respjson.Field
541 raw string
542 } `json:"-"`
543}
544
545// Returns the unmodified JSON received from the API
546func (r FineTuningJobWandbIntegration) RawJSON() string { return r.JSON.raw }
547func (r *FineTuningJobWandbIntegration) UnmarshalJSON(data []byte) error {
548 return apijson.UnmarshalRoot(data, r)
549}
550
551type FineTuningJobWandbIntegrationObject struct {
552 // The type of the integration being enabled for the fine-tuning job
553 Type constant.Wandb `json:"type,required"`
554 // The settings for your integration with Weights and Biases. This payload
555 // specifies the project that metrics will be sent to. Optionally, you can set an
556 // explicit display name for your run, add tags to your run, and set a default
557 // entity (team, username, etc) to be associated with your run.
558 Wandb FineTuningJobWandbIntegration `json:"wandb,required"`
559 // JSON contains metadata for fields, check presence with [respjson.Field.Valid].
560 JSON struct {
561 Type respjson.Field
562 Wandb respjson.Field
563 ExtraFields map[string]respjson.Field
564 raw string
565 } `json:"-"`
566}
567
568// Returns the unmodified JSON received from the API
569func (r FineTuningJobWandbIntegrationObject) RawJSON() string { return r.JSON.raw }
570func (r *FineTuningJobWandbIntegrationObject) UnmarshalJSON(data []byte) error {
571 return apijson.UnmarshalRoot(data, r)
572}
573
574type FineTuningJobNewParams struct {
575 // The name of the model to fine-tune. You can select one of the
576 // [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned).
577 Model FineTuningJobNewParamsModel `json:"model,omitzero,required"`
578 // The ID of an uploaded file that contains training data.
579 //
580 // See [upload file](https://platform.openai.com/docs/api-reference/files/create)
581 // for how to upload a file.
582 //
583 // Your dataset must be formatted as a JSONL file. Additionally, you must upload
584 // your file with the purpose `fine-tune`.
585 //
586 // The contents of the file should differ depending on if the model uses the
587 // [chat](https://platform.openai.com/docs/api-reference/fine-tuning/chat-input),
588 // [completions](https://platform.openai.com/docs/api-reference/fine-tuning/completions-input)
589 // format, or if the fine-tuning method uses the
590 // [preference](https://platform.openai.com/docs/api-reference/fine-tuning/preference-input)
591 // format.
592 //
593 // See the
594 // [fine-tuning guide](https://platform.openai.com/docs/guides/model-optimization)
595 // for more details.
596 TrainingFile string `json:"training_file,required"`
597 // The seed controls the reproducibility of the job. Passing in the same seed and
598 // job parameters should produce the same results, but may differ in rare cases. If
599 // a seed is not specified, one will be generated for you.
600 Seed param.Opt[int64] `json:"seed,omitzero"`
601 // A string of up to 64 characters that will be added to your fine-tuned model
602 // name.
603 //
604 // For example, a `suffix` of "custom-model-name" would produce a model name like
605 // `ft:gpt-4o-mini:openai:custom-model-name:7p4lURel`.
606 Suffix param.Opt[string] `json:"suffix,omitzero"`
607 // The ID of an uploaded file that contains validation data.
608 //
609 // If you provide this file, the data is used to generate validation metrics
610 // periodically during fine-tuning. These metrics can be viewed in the fine-tuning
611 // results file. The same data should not be present in both train and validation
612 // files.
613 //
614 // Your dataset must be formatted as a JSONL file. You must upload your file with
615 // the purpose `fine-tune`.
616 //
617 // See the
618 // [fine-tuning guide](https://platform.openai.com/docs/guides/model-optimization)
619 // for more details.
620 ValidationFile param.Opt[string] `json:"validation_file,omitzero"`
621 // A list of integrations to enable for your fine-tuning job.
622 Integrations []FineTuningJobNewParamsIntegration `json:"integrations,omitzero"`
623 // Set of 16 key-value pairs that can be attached to an object. This can be useful
624 // for storing additional information about the object in a structured format, and
625 // querying for objects via API or the dashboard.
626 //
627 // Keys are strings with a maximum length of 64 characters. Values are strings with
628 // a maximum length of 512 characters.
629 Metadata shared.Metadata `json:"metadata,omitzero"`
630 // The hyperparameters used for the fine-tuning job. This value is now deprecated
631 // in favor of `method`, and should be passed in under the `method` parameter.
632 Hyperparameters FineTuningJobNewParamsHyperparameters `json:"hyperparameters,omitzero"`
633 // The method used for fine-tuning.
634 Method FineTuningJobNewParamsMethod `json:"method,omitzero"`
635 paramObj
636}
637
638func (r FineTuningJobNewParams) MarshalJSON() (data []byte, err error) {
639 type shadow FineTuningJobNewParams
640 return param.MarshalObject(r, (*shadow)(&r))
641}
642func (r *FineTuningJobNewParams) UnmarshalJSON(data []byte) error {
643 return apijson.UnmarshalRoot(data, r)
644}
645
646// The name of the model to fine-tune. You can select one of the
647// [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned).
648type FineTuningJobNewParamsModel string
649
650const (
651 FineTuningJobNewParamsModelBabbage002 FineTuningJobNewParamsModel = "babbage-002"
652 FineTuningJobNewParamsModelDavinci002 FineTuningJobNewParamsModel = "davinci-002"
653 FineTuningJobNewParamsModelGPT3_5Turbo FineTuningJobNewParamsModel = "gpt-3.5-turbo"
654 FineTuningJobNewParamsModelGPT4oMini FineTuningJobNewParamsModel = "gpt-4o-mini"
655)
656
657// The hyperparameters used for the fine-tuning job. This value is now deprecated
658// in favor of `method`, and should be passed in under the `method` parameter.
659//
660// Deprecated: deprecated
661type FineTuningJobNewParamsHyperparameters struct {
662 // Number of examples in each batch. A larger batch size means that model
663 // parameters are updated less frequently, but with lower variance.
664 BatchSize FineTuningJobNewParamsHyperparametersBatchSizeUnion `json:"batch_size,omitzero"`
665 // Scaling factor for the learning rate. A smaller learning rate may be useful to
666 // avoid overfitting.
667 LearningRateMultiplier FineTuningJobNewParamsHyperparametersLearningRateMultiplierUnion `json:"learning_rate_multiplier,omitzero"`
668 // The number of epochs to train the model for. An epoch refers to one full cycle
669 // through the training dataset.
670 NEpochs FineTuningJobNewParamsHyperparametersNEpochsUnion `json:"n_epochs,omitzero"`
671 paramObj
672}
673
674func (r FineTuningJobNewParamsHyperparameters) MarshalJSON() (data []byte, err error) {
675 type shadow FineTuningJobNewParamsHyperparameters
676 return param.MarshalObject(r, (*shadow)(&r))
677}
678func (r *FineTuningJobNewParamsHyperparameters) UnmarshalJSON(data []byte) error {
679 return apijson.UnmarshalRoot(data, r)
680}
681
682// Only one field can be non-zero.
683//
684// Use [param.IsOmitted] to confirm if a field is set.
685type FineTuningJobNewParamsHyperparametersBatchSizeUnion struct {
686 // Construct this variant with constant.ValueOf[constant.Auto]()
687 OfAuto constant.Auto `json:",omitzero,inline"`
688 OfInt param.Opt[int64] `json:",omitzero,inline"`
689 paramUnion
690}
691
692func (u FineTuningJobNewParamsHyperparametersBatchSizeUnion) MarshalJSON() ([]byte, error) {
693 return param.MarshalUnion(u, u.OfAuto, u.OfInt)
694}
695func (u *FineTuningJobNewParamsHyperparametersBatchSizeUnion) UnmarshalJSON(data []byte) error {
696 return apijson.UnmarshalRoot(data, u)
697}
698
699func (u *FineTuningJobNewParamsHyperparametersBatchSizeUnion) asAny() any {
700 if !param.IsOmitted(u.OfAuto) {
701 return &u.OfAuto
702 } else if !param.IsOmitted(u.OfInt) {
703 return &u.OfInt.Value
704 }
705 return nil
706}
707
708// Only one field can be non-zero.
709//
710// Use [param.IsOmitted] to confirm if a field is set.
711type FineTuningJobNewParamsHyperparametersLearningRateMultiplierUnion struct {
712 // Construct this variant with constant.ValueOf[constant.Auto]()
713 OfAuto constant.Auto `json:",omitzero,inline"`
714 OfFloat param.Opt[float64] `json:",omitzero,inline"`
715 paramUnion
716}
717
718func (u FineTuningJobNewParamsHyperparametersLearningRateMultiplierUnion) MarshalJSON() ([]byte, error) {
719 return param.MarshalUnion(u, u.OfAuto, u.OfFloat)
720}
721func (u *FineTuningJobNewParamsHyperparametersLearningRateMultiplierUnion) UnmarshalJSON(data []byte) error {
722 return apijson.UnmarshalRoot(data, u)
723}
724
725func (u *FineTuningJobNewParamsHyperparametersLearningRateMultiplierUnion) asAny() any {
726 if !param.IsOmitted(u.OfAuto) {
727 return &u.OfAuto
728 } else if !param.IsOmitted(u.OfFloat) {
729 return &u.OfFloat.Value
730 }
731 return nil
732}
733
734// Only one field can be non-zero.
735//
736// Use [param.IsOmitted] to confirm if a field is set.
737type FineTuningJobNewParamsHyperparametersNEpochsUnion struct {
738 // Construct this variant with constant.ValueOf[constant.Auto]()
739 OfAuto constant.Auto `json:",omitzero,inline"`
740 OfInt param.Opt[int64] `json:",omitzero,inline"`
741 paramUnion
742}
743
744func (u FineTuningJobNewParamsHyperparametersNEpochsUnion) MarshalJSON() ([]byte, error) {
745 return param.MarshalUnion(u, u.OfAuto, u.OfInt)
746}
747func (u *FineTuningJobNewParamsHyperparametersNEpochsUnion) UnmarshalJSON(data []byte) error {
748 return apijson.UnmarshalRoot(data, u)
749}
750
751func (u *FineTuningJobNewParamsHyperparametersNEpochsUnion) asAny() any {
752 if !param.IsOmitted(u.OfAuto) {
753 return &u.OfAuto
754 } else if !param.IsOmitted(u.OfInt) {
755 return &u.OfInt.Value
756 }
757 return nil
758}
759
760// The properties Type, Wandb are required.
761type FineTuningJobNewParamsIntegration struct {
762 // The settings for your integration with Weights and Biases. This payload
763 // specifies the project that metrics will be sent to. Optionally, you can set an
764 // explicit display name for your run, add tags to your run, and set a default
765 // entity (team, username, etc) to be associated with your run.
766 Wandb FineTuningJobNewParamsIntegrationWandb `json:"wandb,omitzero,required"`
767 // The type of integration to enable. Currently, only "wandb" (Weights and Biases)
768 // is supported.
769 //
770 // This field can be elided, and will marshal its zero value as "wandb".
771 Type constant.Wandb `json:"type,required"`
772 paramObj
773}
774
775func (r FineTuningJobNewParamsIntegration) MarshalJSON() (data []byte, err error) {
776 type shadow FineTuningJobNewParamsIntegration
777 return param.MarshalObject(r, (*shadow)(&r))
778}
779func (r *FineTuningJobNewParamsIntegration) UnmarshalJSON(data []byte) error {
780 return apijson.UnmarshalRoot(data, r)
781}
782
783// The settings for your integration with Weights and Biases. This payload
784// specifies the project that metrics will be sent to. Optionally, you can set an
785// explicit display name for your run, add tags to your run, and set a default
786// entity (team, username, etc) to be associated with your run.
787//
788// The property Project is required.
789type FineTuningJobNewParamsIntegrationWandb struct {
790 // The name of the project that the new run will be created under.
791 Project string `json:"project,required"`
792 // The entity to use for the run. This allows you to set the team or username of
793 // the WandB user that you would like associated with the run. If not set, the
794 // default entity for the registered WandB API key is used.
795 Entity param.Opt[string] `json:"entity,omitzero"`
796 // A display name to set for the run. If not set, we will use the Job ID as the
797 // name.
798 Name param.Opt[string] `json:"name,omitzero"`
799 // A list of tags to be attached to the newly created run. These tags are passed
800 // through directly to WandB. Some default tags are generated by OpenAI:
801 // "openai/finetune", "openai/{base-model}", "openai/{ftjob-abcdef}".
802 Tags []string `json:"tags,omitzero"`
803 paramObj
804}
805
806func (r FineTuningJobNewParamsIntegrationWandb) MarshalJSON() (data []byte, err error) {
807 type shadow FineTuningJobNewParamsIntegrationWandb
808 return param.MarshalObject(r, (*shadow)(&r))
809}
810func (r *FineTuningJobNewParamsIntegrationWandb) UnmarshalJSON(data []byte) error {
811 return apijson.UnmarshalRoot(data, r)
812}
813
814// The method used for fine-tuning.
815//
816// The property Type is required.
817type FineTuningJobNewParamsMethod struct {
818 // The type of method. Is either `supervised`, `dpo`, or `reinforcement`.
819 //
820 // Any of "supervised", "dpo", "reinforcement".
821 Type string `json:"type,omitzero,required"`
822 // Configuration for the DPO fine-tuning method.
823 Dpo DpoMethodParam `json:"dpo,omitzero"`
824 // Configuration for the reinforcement fine-tuning method.
825 Reinforcement ReinforcementMethodParam `json:"reinforcement,omitzero"`
826 // Configuration for the supervised fine-tuning method.
827 Supervised SupervisedMethodParam `json:"supervised,omitzero"`
828 paramObj
829}
830
831func (r FineTuningJobNewParamsMethod) MarshalJSON() (data []byte, err error) {
832 type shadow FineTuningJobNewParamsMethod
833 return param.MarshalObject(r, (*shadow)(&r))
834}
835func (r *FineTuningJobNewParamsMethod) UnmarshalJSON(data []byte) error {
836 return apijson.UnmarshalRoot(data, r)
837}
838
839func init() {
840 apijson.RegisterFieldValidator[FineTuningJobNewParamsMethod](
841 "type", "supervised", "dpo", "reinforcement",
842 )
843}
844
845type FineTuningJobListParams struct {
846 // Identifier for the last job from the previous pagination request.
847 After param.Opt[string] `query:"after,omitzero" json:"-"`
848 // Number of fine-tuning jobs to retrieve.
849 Limit param.Opt[int64] `query:"limit,omitzero" json:"-"`
850 // Optional metadata filter. To filter, use the syntax `metadata[k]=v`.
851 // Alternatively, set `metadata=null` to indicate no metadata.
852 Metadata map[string]string `query:"metadata,omitzero" json:"-"`
853 paramObj
854}
855
856// URLQuery serializes [FineTuningJobListParams]'s query parameters as
857// `url.Values`.
858func (r FineTuningJobListParams) URLQuery() (v url.Values, err error) {
859 return apiquery.MarshalWithSettings(r, apiquery.QuerySettings{
860 ArrayFormat: apiquery.ArrayQueryFormatBrackets,
861 NestedFormat: apiquery.NestedQueryFormatBrackets,
862 })
863}
864
865type FineTuningJobListEventsParams struct {
866 // Identifier for the last event from the previous pagination request.
867 After param.Opt[string] `query:"after,omitzero" json:"-"`
868 // Number of events to retrieve.
869 Limit param.Opt[int64] `query:"limit,omitzero" json:"-"`
870 paramObj
871}
872
873// URLQuery serializes [FineTuningJobListEventsParams]'s query parameters as
874// `url.Values`.
875func (r FineTuningJobListEventsParams) URLQuery() (v url.Values, err error) {
876 return apiquery.MarshalWithSettings(r, apiquery.QuerySettings{
877 ArrayFormat: apiquery.ArrayQueryFormatBrackets,
878 NestedFormat: apiquery.NestedQueryFormatBrackets,
879 })
880}