1// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
2
3package openai
4
5import (
6 "context"
7 "encoding/json"
8 "errors"
9 "fmt"
10 "net/http"
11 "net/url"
12
13 "github.com/openai/openai-go/internal/apijson"
14 "github.com/openai/openai-go/internal/apiquery"
15 "github.com/openai/openai-go/internal/requestconfig"
16 "github.com/openai/openai-go/option"
17 "github.com/openai/openai-go/packages/pagination"
18 "github.com/openai/openai-go/packages/param"
19 "github.com/openai/openai-go/packages/resp"
20 "github.com/openai/openai-go/shared"
21 "github.com/openai/openai-go/shared/constant"
22)
23
24// FineTuningJobService contains methods and other services that help with
25// interacting with the openai API.
26//
27// Note, unlike clients, this service does not read variables from the environment
28// automatically. You should not instantiate this service directly, and instead use
29// the [NewFineTuningJobService] method instead.
30type FineTuningJobService struct {
31 Options []option.RequestOption
32 Checkpoints FineTuningJobCheckpointService
33}
34
35// NewFineTuningJobService generates a new service that applies the given options
36// to each request. These options are applied after the parent client's options (if
37// there is one), and before any request-specific options.
38func NewFineTuningJobService(opts ...option.RequestOption) (r FineTuningJobService) {
39 r = FineTuningJobService{}
40 r.Options = opts
41 r.Checkpoints = NewFineTuningJobCheckpointService(opts...)
42 return
43}
44
45// Creates a fine-tuning job which begins the process of creating a new model from
46// a given dataset.
47//
48// Response includes details of the enqueued job including job status and the name
49// of the fine-tuned models once complete.
50//
51// [Learn more about fine-tuning](https://platform.openai.com/docs/guides/fine-tuning)
52func (r *FineTuningJobService) New(ctx context.Context, body FineTuningJobNewParams, opts ...option.RequestOption) (res *FineTuningJob, err error) {
53 opts = append(r.Options[:], opts...)
54 path := "fine_tuning/jobs"
55 err = requestconfig.ExecuteNewRequest(ctx, http.MethodPost, path, body, &res, opts...)
56 return
57}
58
59// Get info about a fine-tuning job.
60//
61// [Learn more about fine-tuning](https://platform.openai.com/docs/guides/fine-tuning)
62func (r *FineTuningJobService) Get(ctx context.Context, fineTuningJobID string, opts ...option.RequestOption) (res *FineTuningJob, err error) {
63 opts = append(r.Options[:], opts...)
64 if fineTuningJobID == "" {
65 err = errors.New("missing required fine_tuning_job_id parameter")
66 return
67 }
68 path := fmt.Sprintf("fine_tuning/jobs/%s", fineTuningJobID)
69 err = requestconfig.ExecuteNewRequest(ctx, http.MethodGet, path, nil, &res, opts...)
70 return
71}
72
73// List your organization's fine-tuning jobs
74func (r *FineTuningJobService) List(ctx context.Context, query FineTuningJobListParams, opts ...option.RequestOption) (res *pagination.CursorPage[FineTuningJob], err error) {
75 var raw *http.Response
76 opts = append(r.Options[:], opts...)
77 opts = append([]option.RequestOption{option.WithResponseInto(&raw)}, opts...)
78 path := "fine_tuning/jobs"
79 cfg, err := requestconfig.NewRequestConfig(ctx, http.MethodGet, path, query, &res, opts...)
80 if err != nil {
81 return nil, err
82 }
83 err = cfg.Execute()
84 if err != nil {
85 return nil, err
86 }
87 res.SetPageConfig(cfg, raw)
88 return res, nil
89}
90
91// List your organization's fine-tuning jobs
92func (r *FineTuningJobService) ListAutoPaging(ctx context.Context, query FineTuningJobListParams, opts ...option.RequestOption) *pagination.CursorPageAutoPager[FineTuningJob] {
93 return pagination.NewCursorPageAutoPager(r.List(ctx, query, opts...))
94}
95
96// Immediately cancel a fine-tune job.
97func (r *FineTuningJobService) Cancel(ctx context.Context, fineTuningJobID string, opts ...option.RequestOption) (res *FineTuningJob, err error) {
98 opts = append(r.Options[:], opts...)
99 if fineTuningJobID == "" {
100 err = errors.New("missing required fine_tuning_job_id parameter")
101 return
102 }
103 path := fmt.Sprintf("fine_tuning/jobs/%s/cancel", fineTuningJobID)
104 err = requestconfig.ExecuteNewRequest(ctx, http.MethodPost, path, nil, &res, opts...)
105 return
106}
107
108// Get status updates for a fine-tuning job.
109func (r *FineTuningJobService) ListEvents(ctx context.Context, fineTuningJobID string, query FineTuningJobListEventsParams, opts ...option.RequestOption) (res *pagination.CursorPage[FineTuningJobEvent], err error) {
110 var raw *http.Response
111 opts = append(r.Options[:], opts...)
112 opts = append([]option.RequestOption{option.WithResponseInto(&raw)}, opts...)
113 if fineTuningJobID == "" {
114 err = errors.New("missing required fine_tuning_job_id parameter")
115 return
116 }
117 path := fmt.Sprintf("fine_tuning/jobs/%s/events", fineTuningJobID)
118 cfg, err := requestconfig.NewRequestConfig(ctx, http.MethodGet, path, query, &res, opts...)
119 if err != nil {
120 return nil, err
121 }
122 err = cfg.Execute()
123 if err != nil {
124 return nil, err
125 }
126 res.SetPageConfig(cfg, raw)
127 return res, nil
128}
129
130// Get status updates for a fine-tuning job.
131func (r *FineTuningJobService) ListEventsAutoPaging(ctx context.Context, fineTuningJobID string, query FineTuningJobListEventsParams, opts ...option.RequestOption) *pagination.CursorPageAutoPager[FineTuningJobEvent] {
132 return pagination.NewCursorPageAutoPager(r.ListEvents(ctx, fineTuningJobID, query, opts...))
133}
134
135// The `fine_tuning.job` object represents a fine-tuning job that has been created
136// through the API.
137type FineTuningJob struct {
138 // The object identifier, which can be referenced in the API endpoints.
139 ID string `json:"id,required"`
140 // The Unix timestamp (in seconds) for when the fine-tuning job was created.
141 CreatedAt int64 `json:"created_at,required"`
142 // For fine-tuning jobs that have `failed`, this will contain more information on
143 // the cause of the failure.
144 Error FineTuningJobError `json:"error,required"`
145 // The name of the fine-tuned model that is being created. The value will be null
146 // if the fine-tuning job is still running.
147 FineTunedModel string `json:"fine_tuned_model,required"`
148 // The Unix timestamp (in seconds) for when the fine-tuning job was finished. The
149 // value will be null if the fine-tuning job is still running.
150 FinishedAt int64 `json:"finished_at,required"`
151 // The hyperparameters used for the fine-tuning job. This value will only be
152 // returned when running `supervised` jobs.
153 Hyperparameters FineTuningJobHyperparameters `json:"hyperparameters,required"`
154 // The base model that is being fine-tuned.
155 Model string `json:"model,required"`
156 // The object type, which is always "fine_tuning.job".
157 Object constant.FineTuningJob `json:"object,required"`
158 // The organization that owns the fine-tuning job.
159 OrganizationID string `json:"organization_id,required"`
160 // The compiled results file ID(s) for the fine-tuning job. You can retrieve the
161 // results with the
162 // [Files API](https://platform.openai.com/docs/api-reference/files/retrieve-contents).
163 ResultFiles []string `json:"result_files,required"`
164 // The seed used for the fine-tuning job.
165 Seed int64 `json:"seed,required"`
166 // The current status of the fine-tuning job, which can be either
167 // `validating_files`, `queued`, `running`, `succeeded`, `failed`, or `cancelled`.
168 //
169 // Any of "validating_files", "queued", "running", "succeeded", "failed",
170 // "cancelled".
171 Status FineTuningJobStatus `json:"status,required"`
172 // The total number of billable tokens processed by this fine-tuning job. The value
173 // will be null if the fine-tuning job is still running.
174 TrainedTokens int64 `json:"trained_tokens,required"`
175 // The file ID used for training. You can retrieve the training data with the
176 // [Files API](https://platform.openai.com/docs/api-reference/files/retrieve-contents).
177 TrainingFile string `json:"training_file,required"`
178 // The file ID used for validation. You can retrieve the validation results with
179 // the
180 // [Files API](https://platform.openai.com/docs/api-reference/files/retrieve-contents).
181 ValidationFile string `json:"validation_file,required"`
182 // The Unix timestamp (in seconds) for when the fine-tuning job is estimated to
183 // finish. The value will be null if the fine-tuning job is not running.
184 EstimatedFinish int64 `json:"estimated_finish,nullable"`
185 // A list of integrations to enable for this fine-tuning job.
186 Integrations []FineTuningJobWandbIntegrationObject `json:"integrations,nullable"`
187 // Set of 16 key-value pairs that can be attached to an object. This can be useful
188 // for storing additional information about the object in a structured format, and
189 // querying for objects via API or the dashboard.
190 //
191 // Keys are strings with a maximum length of 64 characters. Values are strings with
192 // a maximum length of 512 characters.
193 Metadata shared.Metadata `json:"metadata,nullable"`
194 // The method used for fine-tuning.
195 Method FineTuningJobMethod `json:"method"`
196 // Metadata for the response, check the presence of optional fields with the
197 // [resp.Field.IsPresent] method.
198 JSON struct {
199 ID resp.Field
200 CreatedAt resp.Field
201 Error resp.Field
202 FineTunedModel resp.Field
203 FinishedAt resp.Field
204 Hyperparameters resp.Field
205 Model resp.Field
206 Object resp.Field
207 OrganizationID resp.Field
208 ResultFiles resp.Field
209 Seed resp.Field
210 Status resp.Field
211 TrainedTokens resp.Field
212 TrainingFile resp.Field
213 ValidationFile resp.Field
214 EstimatedFinish resp.Field
215 Integrations resp.Field
216 Metadata resp.Field
217 Method resp.Field
218 ExtraFields map[string]resp.Field
219 raw string
220 } `json:"-"`
221}
222
223// Returns the unmodified JSON received from the API
224func (r FineTuningJob) RawJSON() string { return r.JSON.raw }
225func (r *FineTuningJob) UnmarshalJSON(data []byte) error {
226 return apijson.UnmarshalRoot(data, r)
227}
228
229// For fine-tuning jobs that have `failed`, this will contain more information on
230// the cause of the failure.
231type FineTuningJobError struct {
232 // A machine-readable error code.
233 Code string `json:"code,required"`
234 // A human-readable error message.
235 Message string `json:"message,required"`
236 // The parameter that was invalid, usually `training_file` or `validation_file`.
237 // This field will be null if the failure was not parameter-specific.
238 Param string `json:"param,required"`
239 // Metadata for the response, check the presence of optional fields with the
240 // [resp.Field.IsPresent] method.
241 JSON struct {
242 Code resp.Field
243 Message resp.Field
244 Param resp.Field
245 ExtraFields map[string]resp.Field
246 raw string
247 } `json:"-"`
248}
249
250// Returns the unmodified JSON received from the API
251func (r FineTuningJobError) RawJSON() string { return r.JSON.raw }
252func (r *FineTuningJobError) UnmarshalJSON(data []byte) error {
253 return apijson.UnmarshalRoot(data, r)
254}
255
256// The hyperparameters used for the fine-tuning job. This value will only be
257// returned when running `supervised` jobs.
258type FineTuningJobHyperparameters struct {
259 // Number of examples in each batch. A larger batch size means that model
260 // parameters are updated less frequently, but with lower variance.
261 BatchSize FineTuningJobHyperparametersBatchSizeUnion `json:"batch_size"`
262 // Scaling factor for the learning rate. A smaller learning rate may be useful to
263 // avoid overfitting.
264 LearningRateMultiplier FineTuningJobHyperparametersLearningRateMultiplierUnion `json:"learning_rate_multiplier"`
265 // The number of epochs to train the model for. An epoch refers to one full cycle
266 // through the training dataset.
267 NEpochs FineTuningJobHyperparametersNEpochsUnion `json:"n_epochs"`
268 // Metadata for the response, check the presence of optional fields with the
269 // [resp.Field.IsPresent] method.
270 JSON struct {
271 BatchSize resp.Field
272 LearningRateMultiplier resp.Field
273 NEpochs resp.Field
274 ExtraFields map[string]resp.Field
275 raw string
276 } `json:"-"`
277}
278
279// Returns the unmodified JSON received from the API
280func (r FineTuningJobHyperparameters) RawJSON() string { return r.JSON.raw }
281func (r *FineTuningJobHyperparameters) UnmarshalJSON(data []byte) error {
282 return apijson.UnmarshalRoot(data, r)
283}
284
285// FineTuningJobHyperparametersBatchSizeUnion contains all possible properties and
286// values from [constant.Auto], [int64].
287//
288// Use the methods beginning with 'As' to cast the union to one of its variants.
289//
290// If the underlying value is not a json object, one of the following properties
291// will be valid: OfAuto OfInt]
292type FineTuningJobHyperparametersBatchSizeUnion struct {
293 // This field will be present if the value is a [constant.Auto] instead of an
294 // object.
295 OfAuto constant.Auto `json:",inline"`
296 // This field will be present if the value is a [int64] instead of an object.
297 OfInt int64 `json:",inline"`
298 JSON struct {
299 OfAuto resp.Field
300 OfInt resp.Field
301 raw string
302 } `json:"-"`
303}
304
305func (u FineTuningJobHyperparametersBatchSizeUnion) AsAuto() (v constant.Auto) {
306 apijson.UnmarshalRoot(json.RawMessage(u.JSON.raw), &v)
307 return
308}
309
310func (u FineTuningJobHyperparametersBatchSizeUnion) AsInt() (v int64) {
311 apijson.UnmarshalRoot(json.RawMessage(u.JSON.raw), &v)
312 return
313}
314
315// Returns the unmodified JSON received from the API
316func (u FineTuningJobHyperparametersBatchSizeUnion) RawJSON() string { return u.JSON.raw }
317
318func (r *FineTuningJobHyperparametersBatchSizeUnion) UnmarshalJSON(data []byte) error {
319 return apijson.UnmarshalRoot(data, r)
320}
321
322// FineTuningJobHyperparametersLearningRateMultiplierUnion contains all possible
323// properties and values from [constant.Auto], [float64].
324//
325// Use the methods beginning with 'As' to cast the union to one of its variants.
326//
327// If the underlying value is not a json object, one of the following properties
328// will be valid: OfAuto OfFloat]
329type FineTuningJobHyperparametersLearningRateMultiplierUnion struct {
330 // This field will be present if the value is a [constant.Auto] instead of an
331 // object.
332 OfAuto constant.Auto `json:",inline"`
333 // This field will be present if the value is a [float64] instead of an object.
334 OfFloat float64 `json:",inline"`
335 JSON struct {
336 OfAuto resp.Field
337 OfFloat resp.Field
338 raw string
339 } `json:"-"`
340}
341
342func (u FineTuningJobHyperparametersLearningRateMultiplierUnion) AsAuto() (v constant.Auto) {
343 apijson.UnmarshalRoot(json.RawMessage(u.JSON.raw), &v)
344 return
345}
346
347func (u FineTuningJobHyperparametersLearningRateMultiplierUnion) AsFloat() (v float64) {
348 apijson.UnmarshalRoot(json.RawMessage(u.JSON.raw), &v)
349 return
350}
351
352// Returns the unmodified JSON received from the API
353func (u FineTuningJobHyperparametersLearningRateMultiplierUnion) RawJSON() string { return u.JSON.raw }
354
355func (r *FineTuningJobHyperparametersLearningRateMultiplierUnion) UnmarshalJSON(data []byte) error {
356 return apijson.UnmarshalRoot(data, r)
357}
358
359// FineTuningJobHyperparametersNEpochsUnion contains all possible properties and
360// values from [constant.Auto], [int64].
361//
362// Use the methods beginning with 'As' to cast the union to one of its variants.
363//
364// If the underlying value is not a json object, one of the following properties
365// will be valid: OfAuto OfInt]
366type FineTuningJobHyperparametersNEpochsUnion struct {
367 // This field will be present if the value is a [constant.Auto] instead of an
368 // object.
369 OfAuto constant.Auto `json:",inline"`
370 // This field will be present if the value is a [int64] instead of an object.
371 OfInt int64 `json:",inline"`
372 JSON struct {
373 OfAuto resp.Field
374 OfInt resp.Field
375 raw string
376 } `json:"-"`
377}
378
379func (u FineTuningJobHyperparametersNEpochsUnion) AsAuto() (v constant.Auto) {
380 apijson.UnmarshalRoot(json.RawMessage(u.JSON.raw), &v)
381 return
382}
383
384func (u FineTuningJobHyperparametersNEpochsUnion) AsInt() (v int64) {
385 apijson.UnmarshalRoot(json.RawMessage(u.JSON.raw), &v)
386 return
387}
388
389// Returns the unmodified JSON received from the API
390func (u FineTuningJobHyperparametersNEpochsUnion) RawJSON() string { return u.JSON.raw }
391
392func (r *FineTuningJobHyperparametersNEpochsUnion) UnmarshalJSON(data []byte) error {
393 return apijson.UnmarshalRoot(data, r)
394}
395
396// The current status of the fine-tuning job, which can be either
397// `validating_files`, `queued`, `running`, `succeeded`, `failed`, or `cancelled`.
398type FineTuningJobStatus string
399
400const (
401 FineTuningJobStatusValidatingFiles FineTuningJobStatus = "validating_files"
402 FineTuningJobStatusQueued FineTuningJobStatus = "queued"
403 FineTuningJobStatusRunning FineTuningJobStatus = "running"
404 FineTuningJobStatusSucceeded FineTuningJobStatus = "succeeded"
405 FineTuningJobStatusFailed FineTuningJobStatus = "failed"
406 FineTuningJobStatusCancelled FineTuningJobStatus = "cancelled"
407)
408
409// The method used for fine-tuning.
410type FineTuningJobMethod struct {
411 // Configuration for the DPO fine-tuning method.
412 Dpo FineTuningJobMethodDpo `json:"dpo"`
413 // Configuration for the supervised fine-tuning method.
414 Supervised FineTuningJobMethodSupervised `json:"supervised"`
415 // The type of method. Is either `supervised` or `dpo`.
416 //
417 // Any of "supervised", "dpo".
418 Type string `json:"type"`
419 // Metadata for the response, check the presence of optional fields with the
420 // [resp.Field.IsPresent] method.
421 JSON struct {
422 Dpo resp.Field
423 Supervised resp.Field
424 Type resp.Field
425 ExtraFields map[string]resp.Field
426 raw string
427 } `json:"-"`
428}
429
430// Returns the unmodified JSON received from the API
431func (r FineTuningJobMethod) RawJSON() string { return r.JSON.raw }
432func (r *FineTuningJobMethod) UnmarshalJSON(data []byte) error {
433 return apijson.UnmarshalRoot(data, r)
434}
435
436// Configuration for the DPO fine-tuning method.
437type FineTuningJobMethodDpo struct {
438 // The hyperparameters used for the fine-tuning job.
439 Hyperparameters FineTuningJobMethodDpoHyperparameters `json:"hyperparameters"`
440 // Metadata for the response, check the presence of optional fields with the
441 // [resp.Field.IsPresent] method.
442 JSON struct {
443 Hyperparameters resp.Field
444 ExtraFields map[string]resp.Field
445 raw string
446 } `json:"-"`
447}
448
449// Returns the unmodified JSON received from the API
450func (r FineTuningJobMethodDpo) RawJSON() string { return r.JSON.raw }
451func (r *FineTuningJobMethodDpo) UnmarshalJSON(data []byte) error {
452 return apijson.UnmarshalRoot(data, r)
453}
454
455// The hyperparameters used for the fine-tuning job.
456type FineTuningJobMethodDpoHyperparameters struct {
457 // Number of examples in each batch. A larger batch size means that model
458 // parameters are updated less frequently, but with lower variance.
459 BatchSize FineTuningJobMethodDpoHyperparametersBatchSizeUnion `json:"batch_size"`
460 // The beta value for the DPO method. A higher beta value will increase the weight
461 // of the penalty between the policy and reference model.
462 Beta FineTuningJobMethodDpoHyperparametersBetaUnion `json:"beta"`
463 // Scaling factor for the learning rate. A smaller learning rate may be useful to
464 // avoid overfitting.
465 LearningRateMultiplier FineTuningJobMethodDpoHyperparametersLearningRateMultiplierUnion `json:"learning_rate_multiplier"`
466 // The number of epochs to train the model for. An epoch refers to one full cycle
467 // through the training dataset.
468 NEpochs FineTuningJobMethodDpoHyperparametersNEpochsUnion `json:"n_epochs"`
469 // Metadata for the response, check the presence of optional fields with the
470 // [resp.Field.IsPresent] method.
471 JSON struct {
472 BatchSize resp.Field
473 Beta resp.Field
474 LearningRateMultiplier resp.Field
475 NEpochs resp.Field
476 ExtraFields map[string]resp.Field
477 raw string
478 } `json:"-"`
479}
480
481// Returns the unmodified JSON received from the API
482func (r FineTuningJobMethodDpoHyperparameters) RawJSON() string { return r.JSON.raw }
483func (r *FineTuningJobMethodDpoHyperparameters) UnmarshalJSON(data []byte) error {
484 return apijson.UnmarshalRoot(data, r)
485}
486
487// FineTuningJobMethodDpoHyperparametersBatchSizeUnion contains all possible
488// properties and values from [constant.Auto], [int64].
489//
490// Use the methods beginning with 'As' to cast the union to one of its variants.
491//
492// If the underlying value is not a json object, one of the following properties
493// will be valid: OfAuto OfInt]
494type FineTuningJobMethodDpoHyperparametersBatchSizeUnion struct {
495 // This field will be present if the value is a [constant.Auto] instead of an
496 // object.
497 OfAuto constant.Auto `json:",inline"`
498 // This field will be present if the value is a [int64] instead of an object.
499 OfInt int64 `json:",inline"`
500 JSON struct {
501 OfAuto resp.Field
502 OfInt resp.Field
503 raw string
504 } `json:"-"`
505}
506
507func (u FineTuningJobMethodDpoHyperparametersBatchSizeUnion) AsAuto() (v constant.Auto) {
508 apijson.UnmarshalRoot(json.RawMessage(u.JSON.raw), &v)
509 return
510}
511
512func (u FineTuningJobMethodDpoHyperparametersBatchSizeUnion) AsInt() (v int64) {
513 apijson.UnmarshalRoot(json.RawMessage(u.JSON.raw), &v)
514 return
515}
516
517// Returns the unmodified JSON received from the API
518func (u FineTuningJobMethodDpoHyperparametersBatchSizeUnion) RawJSON() string { return u.JSON.raw }
519
520func (r *FineTuningJobMethodDpoHyperparametersBatchSizeUnion) UnmarshalJSON(data []byte) error {
521 return apijson.UnmarshalRoot(data, r)
522}
523
524// FineTuningJobMethodDpoHyperparametersBetaUnion contains all possible properties
525// and values from [constant.Auto], [float64].
526//
527// Use the methods beginning with 'As' to cast the union to one of its variants.
528//
529// If the underlying value is not a json object, one of the following properties
530// will be valid: OfAuto OfFloat]
531type FineTuningJobMethodDpoHyperparametersBetaUnion struct {
532 // This field will be present if the value is a [constant.Auto] instead of an
533 // object.
534 OfAuto constant.Auto `json:",inline"`
535 // This field will be present if the value is a [float64] instead of an object.
536 OfFloat float64 `json:",inline"`
537 JSON struct {
538 OfAuto resp.Field
539 OfFloat resp.Field
540 raw string
541 } `json:"-"`
542}
543
544func (u FineTuningJobMethodDpoHyperparametersBetaUnion) AsAuto() (v constant.Auto) {
545 apijson.UnmarshalRoot(json.RawMessage(u.JSON.raw), &v)
546 return
547}
548
549func (u FineTuningJobMethodDpoHyperparametersBetaUnion) AsFloat() (v float64) {
550 apijson.UnmarshalRoot(json.RawMessage(u.JSON.raw), &v)
551 return
552}
553
554// Returns the unmodified JSON received from the API
555func (u FineTuningJobMethodDpoHyperparametersBetaUnion) RawJSON() string { return u.JSON.raw }
556
557func (r *FineTuningJobMethodDpoHyperparametersBetaUnion) UnmarshalJSON(data []byte) error {
558 return apijson.UnmarshalRoot(data, r)
559}
560
561// FineTuningJobMethodDpoHyperparametersLearningRateMultiplierUnion contains all
562// possible properties and values from [constant.Auto], [float64].
563//
564// Use the methods beginning with 'As' to cast the union to one of its variants.
565//
566// If the underlying value is not a json object, one of the following properties
567// will be valid: OfAuto OfFloat]
568type FineTuningJobMethodDpoHyperparametersLearningRateMultiplierUnion struct {
569 // This field will be present if the value is a [constant.Auto] instead of an
570 // object.
571 OfAuto constant.Auto `json:",inline"`
572 // This field will be present if the value is a [float64] instead of an object.
573 OfFloat float64 `json:",inline"`
574 JSON struct {
575 OfAuto resp.Field
576 OfFloat resp.Field
577 raw string
578 } `json:"-"`
579}
580
581func (u FineTuningJobMethodDpoHyperparametersLearningRateMultiplierUnion) AsAuto() (v constant.Auto) {
582 apijson.UnmarshalRoot(json.RawMessage(u.JSON.raw), &v)
583 return
584}
585
586func (u FineTuningJobMethodDpoHyperparametersLearningRateMultiplierUnion) AsFloat() (v float64) {
587 apijson.UnmarshalRoot(json.RawMessage(u.JSON.raw), &v)
588 return
589}
590
591// Returns the unmodified JSON received from the API
592func (u FineTuningJobMethodDpoHyperparametersLearningRateMultiplierUnion) RawJSON() string {
593 return u.JSON.raw
594}
595
596func (r *FineTuningJobMethodDpoHyperparametersLearningRateMultiplierUnion) UnmarshalJSON(data []byte) error {
597 return apijson.UnmarshalRoot(data, r)
598}
599
600// FineTuningJobMethodDpoHyperparametersNEpochsUnion contains all possible
601// properties and values from [constant.Auto], [int64].
602//
603// Use the methods beginning with 'As' to cast the union to one of its variants.
604//
605// If the underlying value is not a json object, one of the following properties
606// will be valid: OfAuto OfInt]
607type FineTuningJobMethodDpoHyperparametersNEpochsUnion struct {
608 // This field will be present if the value is a [constant.Auto] instead of an
609 // object.
610 OfAuto constant.Auto `json:",inline"`
611 // This field will be present if the value is a [int64] instead of an object.
612 OfInt int64 `json:",inline"`
613 JSON struct {
614 OfAuto resp.Field
615 OfInt resp.Field
616 raw string
617 } `json:"-"`
618}
619
620func (u FineTuningJobMethodDpoHyperparametersNEpochsUnion) AsAuto() (v constant.Auto) {
621 apijson.UnmarshalRoot(json.RawMessage(u.JSON.raw), &v)
622 return
623}
624
625func (u FineTuningJobMethodDpoHyperparametersNEpochsUnion) AsInt() (v int64) {
626 apijson.UnmarshalRoot(json.RawMessage(u.JSON.raw), &v)
627 return
628}
629
630// Returns the unmodified JSON received from the API
631func (u FineTuningJobMethodDpoHyperparametersNEpochsUnion) RawJSON() string { return u.JSON.raw }
632
633func (r *FineTuningJobMethodDpoHyperparametersNEpochsUnion) UnmarshalJSON(data []byte) error {
634 return apijson.UnmarshalRoot(data, r)
635}
636
637// Configuration for the supervised fine-tuning method.
638type FineTuningJobMethodSupervised struct {
639 // The hyperparameters used for the fine-tuning job.
640 Hyperparameters FineTuningJobMethodSupervisedHyperparameters `json:"hyperparameters"`
641 // Metadata for the response, check the presence of optional fields with the
642 // [resp.Field.IsPresent] method.
643 JSON struct {
644 Hyperparameters resp.Field
645 ExtraFields map[string]resp.Field
646 raw string
647 } `json:"-"`
648}
649
650// Returns the unmodified JSON received from the API
651func (r FineTuningJobMethodSupervised) RawJSON() string { return r.JSON.raw }
652func (r *FineTuningJobMethodSupervised) UnmarshalJSON(data []byte) error {
653 return apijson.UnmarshalRoot(data, r)
654}
655
656// The hyperparameters used for the fine-tuning job.
657type FineTuningJobMethodSupervisedHyperparameters struct {
658 // Number of examples in each batch. A larger batch size means that model
659 // parameters are updated less frequently, but with lower variance.
660 BatchSize FineTuningJobMethodSupervisedHyperparametersBatchSizeUnion `json:"batch_size"`
661 // Scaling factor for the learning rate. A smaller learning rate may be useful to
662 // avoid overfitting.
663 LearningRateMultiplier FineTuningJobMethodSupervisedHyperparametersLearningRateMultiplierUnion `json:"learning_rate_multiplier"`
664 // The number of epochs to train the model for. An epoch refers to one full cycle
665 // through the training dataset.
666 NEpochs FineTuningJobMethodSupervisedHyperparametersNEpochsUnion `json:"n_epochs"`
667 // Metadata for the response, check the presence of optional fields with the
668 // [resp.Field.IsPresent] method.
669 JSON struct {
670 BatchSize resp.Field
671 LearningRateMultiplier resp.Field
672 NEpochs resp.Field
673 ExtraFields map[string]resp.Field
674 raw string
675 } `json:"-"`
676}
677
678// Returns the unmodified JSON received from the API
679func (r FineTuningJobMethodSupervisedHyperparameters) RawJSON() string { return r.JSON.raw }
680func (r *FineTuningJobMethodSupervisedHyperparameters) UnmarshalJSON(data []byte) error {
681 return apijson.UnmarshalRoot(data, r)
682}
683
684// FineTuningJobMethodSupervisedHyperparametersBatchSizeUnion contains all possible
685// properties and values from [constant.Auto], [int64].
686//
687// Use the methods beginning with 'As' to cast the union to one of its variants.
688//
689// If the underlying value is not a json object, one of the following properties
690// will be valid: OfAuto OfInt]
691type FineTuningJobMethodSupervisedHyperparametersBatchSizeUnion struct {
692 // This field will be present if the value is a [constant.Auto] instead of an
693 // object.
694 OfAuto constant.Auto `json:",inline"`
695 // This field will be present if the value is a [int64] instead of an object.
696 OfInt int64 `json:",inline"`
697 JSON struct {
698 OfAuto resp.Field
699 OfInt resp.Field
700 raw string
701 } `json:"-"`
702}
703
704func (u FineTuningJobMethodSupervisedHyperparametersBatchSizeUnion) AsAuto() (v constant.Auto) {
705 apijson.UnmarshalRoot(json.RawMessage(u.JSON.raw), &v)
706 return
707}
708
709func (u FineTuningJobMethodSupervisedHyperparametersBatchSizeUnion) AsInt() (v int64) {
710 apijson.UnmarshalRoot(json.RawMessage(u.JSON.raw), &v)
711 return
712}
713
714// Returns the unmodified JSON received from the API
715func (u FineTuningJobMethodSupervisedHyperparametersBatchSizeUnion) RawJSON() string {
716 return u.JSON.raw
717}
718
719func (r *FineTuningJobMethodSupervisedHyperparametersBatchSizeUnion) UnmarshalJSON(data []byte) error {
720 return apijson.UnmarshalRoot(data, r)
721}
722
723// FineTuningJobMethodSupervisedHyperparametersLearningRateMultiplierUnion contains
724// all possible properties and values from [constant.Auto], [float64].
725//
726// Use the methods beginning with 'As' to cast the union to one of its variants.
727//
728// If the underlying value is not a json object, one of the following properties
729// will be valid: OfAuto OfFloat]
730type FineTuningJobMethodSupervisedHyperparametersLearningRateMultiplierUnion struct {
731 // This field will be present if the value is a [constant.Auto] instead of an
732 // object.
733 OfAuto constant.Auto `json:",inline"`
734 // This field will be present if the value is a [float64] instead of an object.
735 OfFloat float64 `json:",inline"`
736 JSON struct {
737 OfAuto resp.Field
738 OfFloat resp.Field
739 raw string
740 } `json:"-"`
741}
742
743func (u FineTuningJobMethodSupervisedHyperparametersLearningRateMultiplierUnion) AsAuto() (v constant.Auto) {
744 apijson.UnmarshalRoot(json.RawMessage(u.JSON.raw), &v)
745 return
746}
747
748func (u FineTuningJobMethodSupervisedHyperparametersLearningRateMultiplierUnion) AsFloat() (v float64) {
749 apijson.UnmarshalRoot(json.RawMessage(u.JSON.raw), &v)
750 return
751}
752
753// Returns the unmodified JSON received from the API
754func (u FineTuningJobMethodSupervisedHyperparametersLearningRateMultiplierUnion) RawJSON() string {
755 return u.JSON.raw
756}
757
758func (r *FineTuningJobMethodSupervisedHyperparametersLearningRateMultiplierUnion) UnmarshalJSON(data []byte) error {
759 return apijson.UnmarshalRoot(data, r)
760}
761
762// FineTuningJobMethodSupervisedHyperparametersNEpochsUnion contains all possible
763// properties and values from [constant.Auto], [int64].
764//
765// Use the methods beginning with 'As' to cast the union to one of its variants.
766//
767// If the underlying value is not a json object, one of the following properties
768// will be valid: OfAuto OfInt]
769type FineTuningJobMethodSupervisedHyperparametersNEpochsUnion struct {
770 // This field will be present if the value is a [constant.Auto] instead of an
771 // object.
772 OfAuto constant.Auto `json:",inline"`
773 // This field will be present if the value is a [int64] instead of an object.
774 OfInt int64 `json:",inline"`
775 JSON struct {
776 OfAuto resp.Field
777 OfInt resp.Field
778 raw string
779 } `json:"-"`
780}
781
782func (u FineTuningJobMethodSupervisedHyperparametersNEpochsUnion) AsAuto() (v constant.Auto) {
783 apijson.UnmarshalRoot(json.RawMessage(u.JSON.raw), &v)
784 return
785}
786
787func (u FineTuningJobMethodSupervisedHyperparametersNEpochsUnion) AsInt() (v int64) {
788 apijson.UnmarshalRoot(json.RawMessage(u.JSON.raw), &v)
789 return
790}
791
792// Returns the unmodified JSON received from the API
793func (u FineTuningJobMethodSupervisedHyperparametersNEpochsUnion) RawJSON() string { return u.JSON.raw }
794
795func (r *FineTuningJobMethodSupervisedHyperparametersNEpochsUnion) UnmarshalJSON(data []byte) error {
796 return apijson.UnmarshalRoot(data, r)
797}
798
799// Fine-tuning job event object
800type FineTuningJobEvent struct {
801 // The object identifier.
802 ID string `json:"id,required"`
803 // The Unix timestamp (in seconds) for when the fine-tuning job was created.
804 CreatedAt int64 `json:"created_at,required"`
805 // The log level of the event.
806 //
807 // Any of "info", "warn", "error".
808 Level FineTuningJobEventLevel `json:"level,required"`
809 // The message of the event.
810 Message string `json:"message,required"`
811 // The object type, which is always "fine_tuning.job.event".
812 Object constant.FineTuningJobEvent `json:"object,required"`
813 // The data associated with the event.
814 Data interface{} `json:"data"`
815 // The type of event.
816 //
817 // Any of "message", "metrics".
818 Type FineTuningJobEventType `json:"type"`
819 // Metadata for the response, check the presence of optional fields with the
820 // [resp.Field.IsPresent] method.
821 JSON struct {
822 ID resp.Field
823 CreatedAt resp.Field
824 Level resp.Field
825 Message resp.Field
826 Object resp.Field
827 Data resp.Field
828 Type resp.Field
829 ExtraFields map[string]resp.Field
830 raw string
831 } `json:"-"`
832}
833
834// Returns the unmodified JSON received from the API
835func (r FineTuningJobEvent) RawJSON() string { return r.JSON.raw }
836func (r *FineTuningJobEvent) UnmarshalJSON(data []byte) error {
837 return apijson.UnmarshalRoot(data, r)
838}
839
840// The log level of the event.
841type FineTuningJobEventLevel string
842
843const (
844 FineTuningJobEventLevelInfo FineTuningJobEventLevel = "info"
845 FineTuningJobEventLevelWarn FineTuningJobEventLevel = "warn"
846 FineTuningJobEventLevelError FineTuningJobEventLevel = "error"
847)
848
849// The type of event.
850type FineTuningJobEventType string
851
852const (
853 FineTuningJobEventTypeMessage FineTuningJobEventType = "message"
854 FineTuningJobEventTypeMetrics FineTuningJobEventType = "metrics"
855)
856
857// The settings for your integration with Weights and Biases. This payload
858// specifies the project that metrics will be sent to. Optionally, you can set an
859// explicit display name for your run, add tags to your run, and set a default
860// entity (team, username, etc) to be associated with your run.
861type FineTuningJobWandbIntegration struct {
862 // The name of the project that the new run will be created under.
863 Project string `json:"project,required"`
864 // The entity to use for the run. This allows you to set the team or username of
865 // the WandB user that you would like associated with the run. If not set, the
866 // default entity for the registered WandB API key is used.
867 Entity string `json:"entity,nullable"`
868 // A display name to set for the run. If not set, we will use the Job ID as the
869 // name.
870 Name string `json:"name,nullable"`
871 // A list of tags to be attached to the newly created run. These tags are passed
872 // through directly to WandB. Some default tags are generated by OpenAI:
873 // "openai/finetune", "openai/{base-model}", "openai/{ftjob-abcdef}".
874 Tags []string `json:"tags"`
875 // Metadata for the response, check the presence of optional fields with the
876 // [resp.Field.IsPresent] method.
877 JSON struct {
878 Project resp.Field
879 Entity resp.Field
880 Name resp.Field
881 Tags resp.Field
882 ExtraFields map[string]resp.Field
883 raw string
884 } `json:"-"`
885}
886
887// Returns the unmodified JSON received from the API
888func (r FineTuningJobWandbIntegration) RawJSON() string { return r.JSON.raw }
889func (r *FineTuningJobWandbIntegration) UnmarshalJSON(data []byte) error {
890 return apijson.UnmarshalRoot(data, r)
891}
892
893type FineTuningJobWandbIntegrationObject struct {
894 // The type of the integration being enabled for the fine-tuning job
895 Type constant.Wandb `json:"type,required"`
896 // The settings for your integration with Weights and Biases. This payload
897 // specifies the project that metrics will be sent to. Optionally, you can set an
898 // explicit display name for your run, add tags to your run, and set a default
899 // entity (team, username, etc) to be associated with your run.
900 Wandb FineTuningJobWandbIntegration `json:"wandb,required"`
901 // Metadata for the response, check the presence of optional fields with the
902 // [resp.Field.IsPresent] method.
903 JSON struct {
904 Type resp.Field
905 Wandb resp.Field
906 ExtraFields map[string]resp.Field
907 raw string
908 } `json:"-"`
909}
910
911// Returns the unmodified JSON received from the API
912func (r FineTuningJobWandbIntegrationObject) RawJSON() string { return r.JSON.raw }
913func (r *FineTuningJobWandbIntegrationObject) UnmarshalJSON(data []byte) error {
914 return apijson.UnmarshalRoot(data, r)
915}
916
917type FineTuningJobNewParams struct {
918 // The name of the model to fine-tune. You can select one of the
919 // [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned).
920 Model string `json:"model,omitzero,required"`
921 // The ID of an uploaded file that contains training data.
922 //
923 // See [upload file](https://platform.openai.com/docs/api-reference/files/create)
924 // for how to upload a file.
925 //
926 // Your dataset must be formatted as a JSONL file. Additionally, you must upload
927 // your file with the purpose `fine-tune`.
928 //
929 // The contents of the file should differ depending on if the model uses the
930 // [chat](https://platform.openai.com/docs/api-reference/fine-tuning/chat-input),
931 // [completions](https://platform.openai.com/docs/api-reference/fine-tuning/completions-input)
932 // format, or if the fine-tuning method uses the
933 // [preference](https://platform.openai.com/docs/api-reference/fine-tuning/preference-input)
934 // format.
935 //
936 // See the [fine-tuning guide](https://platform.openai.com/docs/guides/fine-tuning)
937 // for more details.
938 TrainingFile string `json:"training_file,required"`
939 // The seed controls the reproducibility of the job. Passing in the same seed and
940 // job parameters should produce the same results, but may differ in rare cases. If
941 // a seed is not specified, one will be generated for you.
942 Seed param.Opt[int64] `json:"seed,omitzero"`
943 // A string of up to 64 characters that will be added to your fine-tuned model
944 // name.
945 //
946 // For example, a `suffix` of "custom-model-name" would produce a model name like
947 // `ft:gpt-4o-mini:openai:custom-model-name:7p4lURel`.
948 Suffix param.Opt[string] `json:"suffix,omitzero"`
949 // The ID of an uploaded file that contains validation data.
950 //
951 // If you provide this file, the data is used to generate validation metrics
952 // periodically during fine-tuning. These metrics can be viewed in the fine-tuning
953 // results file. The same data should not be present in both train and validation
954 // files.
955 //
956 // Your dataset must be formatted as a JSONL file. You must upload your file with
957 // the purpose `fine-tune`.
958 //
959 // See the [fine-tuning guide](https://platform.openai.com/docs/guides/fine-tuning)
960 // for more details.
961 ValidationFile param.Opt[string] `json:"validation_file,omitzero"`
962 // A list of integrations to enable for your fine-tuning job.
963 Integrations []FineTuningJobNewParamsIntegration `json:"integrations,omitzero"`
964 // Set of 16 key-value pairs that can be attached to an object. This can be useful
965 // for storing additional information about the object in a structured format, and
966 // querying for objects via API or the dashboard.
967 //
968 // Keys are strings with a maximum length of 64 characters. Values are strings with
969 // a maximum length of 512 characters.
970 Metadata shared.MetadataParam `json:"metadata,omitzero"`
971 // The hyperparameters used for the fine-tuning job. This value is now deprecated
972 // in favor of `method`, and should be passed in under the `method` parameter.
973 Hyperparameters FineTuningJobNewParamsHyperparameters `json:"hyperparameters,omitzero"`
974 // The method used for fine-tuning.
975 Method FineTuningJobNewParamsMethod `json:"method,omitzero"`
976 paramObj
977}
978
979// IsPresent returns true if the field's value is not omitted and not the JSON
980// "null". To check if this field is omitted, use [param.IsOmitted].
981func (f FineTuningJobNewParams) IsPresent() bool { return !param.IsOmitted(f) && !f.IsNull() }
982
983func (r FineTuningJobNewParams) MarshalJSON() (data []byte, err error) {
984 type shadow FineTuningJobNewParams
985 return param.MarshalObject(r, (*shadow)(&r))
986}
987
988// The hyperparameters used for the fine-tuning job. This value is now deprecated
989// in favor of `method`, and should be passed in under the `method` parameter.
990//
991// Deprecated: deprecated
992type FineTuningJobNewParamsHyperparameters struct {
993 // Number of examples in each batch. A larger batch size means that model
994 // parameters are updated less frequently, but with lower variance.
995 BatchSize FineTuningJobNewParamsHyperparametersBatchSizeUnion `json:"batch_size,omitzero"`
996 // Scaling factor for the learning rate. A smaller learning rate may be useful to
997 // avoid overfitting.
998 LearningRateMultiplier FineTuningJobNewParamsHyperparametersLearningRateMultiplierUnion `json:"learning_rate_multiplier,omitzero"`
999 // The number of epochs to train the model for. An epoch refers to one full cycle
1000 // through the training dataset.
1001 NEpochs FineTuningJobNewParamsHyperparametersNEpochsUnion `json:"n_epochs,omitzero"`
1002 paramObj
1003}
1004
1005// IsPresent returns true if the field's value is not omitted and not the JSON
1006// "null". To check if this field is omitted, use [param.IsOmitted].
1007func (f FineTuningJobNewParamsHyperparameters) IsPresent() bool {
1008 return !param.IsOmitted(f) && !f.IsNull()
1009}
1010func (r FineTuningJobNewParamsHyperparameters) MarshalJSON() (data []byte, err error) {
1011 type shadow FineTuningJobNewParamsHyperparameters
1012 return param.MarshalObject(r, (*shadow)(&r))
1013}
1014
1015// Only one field can be non-zero.
1016//
1017// Use [param.IsOmitted] to confirm if a field is set.
1018type FineTuningJobNewParamsHyperparametersBatchSizeUnion struct {
1019 // Construct this variant with constant.New[constant.Auto]() Check if union is this
1020 // variant with !param.IsOmitted(union.OfAuto)
1021 OfAuto constant.Auto `json:",omitzero,inline"`
1022 OfInt param.Opt[int64] `json:",omitzero,inline"`
1023 paramUnion
1024}
1025
1026// IsPresent returns true if the field's value is not omitted and not the JSON
1027// "null". To check if this field is omitted, use [param.IsOmitted].
1028func (u FineTuningJobNewParamsHyperparametersBatchSizeUnion) IsPresent() bool {
1029 return !param.IsOmitted(u) && !u.IsNull()
1030}
1031func (u FineTuningJobNewParamsHyperparametersBatchSizeUnion) MarshalJSON() ([]byte, error) {
1032 return param.MarshalUnion[FineTuningJobNewParamsHyperparametersBatchSizeUnion](u.OfAuto, u.OfInt)
1033}
1034
1035func (u *FineTuningJobNewParamsHyperparametersBatchSizeUnion) asAny() any {
1036 if !param.IsOmitted(u.OfAuto) {
1037 return &u.OfAuto
1038 } else if !param.IsOmitted(u.OfInt) {
1039 return &u.OfInt.Value
1040 }
1041 return nil
1042}
1043
1044// Only one field can be non-zero.
1045//
1046// Use [param.IsOmitted] to confirm if a field is set.
1047type FineTuningJobNewParamsHyperparametersLearningRateMultiplierUnion struct {
1048 // Construct this variant with constant.New[constant.Auto]() Check if union is this
1049 // variant with !param.IsOmitted(union.OfAuto)
1050 OfAuto constant.Auto `json:",omitzero,inline"`
1051 OfFloat param.Opt[float64] `json:",omitzero,inline"`
1052 paramUnion
1053}
1054
1055// IsPresent returns true if the field's value is not omitted and not the JSON
1056// "null". To check if this field is omitted, use [param.IsOmitted].
1057func (u FineTuningJobNewParamsHyperparametersLearningRateMultiplierUnion) IsPresent() bool {
1058 return !param.IsOmitted(u) && !u.IsNull()
1059}
1060func (u FineTuningJobNewParamsHyperparametersLearningRateMultiplierUnion) MarshalJSON() ([]byte, error) {
1061 return param.MarshalUnion[FineTuningJobNewParamsHyperparametersLearningRateMultiplierUnion](u.OfAuto, u.OfFloat)
1062}
1063
1064func (u *FineTuningJobNewParamsHyperparametersLearningRateMultiplierUnion) asAny() any {
1065 if !param.IsOmitted(u.OfAuto) {
1066 return &u.OfAuto
1067 } else if !param.IsOmitted(u.OfFloat) {
1068 return &u.OfFloat.Value
1069 }
1070 return nil
1071}
1072
1073// Only one field can be non-zero.
1074//
1075// Use [param.IsOmitted] to confirm if a field is set.
1076type FineTuningJobNewParamsHyperparametersNEpochsUnion struct {
1077 // Construct this variant with constant.New[constant.Auto]() Check if union is this
1078 // variant with !param.IsOmitted(union.OfAuto)
1079 OfAuto constant.Auto `json:",omitzero,inline"`
1080 OfInt param.Opt[int64] `json:",omitzero,inline"`
1081 paramUnion
1082}
1083
1084// IsPresent returns true if the field's value is not omitted and not the JSON
1085// "null". To check if this field is omitted, use [param.IsOmitted].
1086func (u FineTuningJobNewParamsHyperparametersNEpochsUnion) IsPresent() bool {
1087 return !param.IsOmitted(u) && !u.IsNull()
1088}
1089func (u FineTuningJobNewParamsHyperparametersNEpochsUnion) MarshalJSON() ([]byte, error) {
1090 return param.MarshalUnion[FineTuningJobNewParamsHyperparametersNEpochsUnion](u.OfAuto, u.OfInt)
1091}
1092
1093func (u *FineTuningJobNewParamsHyperparametersNEpochsUnion) asAny() any {
1094 if !param.IsOmitted(u.OfAuto) {
1095 return &u.OfAuto
1096 } else if !param.IsOmitted(u.OfInt) {
1097 return &u.OfInt.Value
1098 }
1099 return nil
1100}
1101
1102// The properties Type, Wandb are required.
1103type FineTuningJobNewParamsIntegration struct {
1104 // The settings for your integration with Weights and Biases. This payload
1105 // specifies the project that metrics will be sent to. Optionally, you can set an
1106 // explicit display name for your run, add tags to your run, and set a default
1107 // entity (team, username, etc) to be associated with your run.
1108 Wandb FineTuningJobNewParamsIntegrationWandb `json:"wandb,omitzero,required"`
1109 // The type of integration to enable. Currently, only "wandb" (Weights and Biases)
1110 // is supported.
1111 //
1112 // This field can be elided, and will marshal its zero value as "wandb".
1113 Type constant.Wandb `json:"type,required"`
1114 paramObj
1115}
1116
1117// IsPresent returns true if the field's value is not omitted and not the JSON
1118// "null". To check if this field is omitted, use [param.IsOmitted].
1119func (f FineTuningJobNewParamsIntegration) IsPresent() bool {
1120 return !param.IsOmitted(f) && !f.IsNull()
1121}
1122func (r FineTuningJobNewParamsIntegration) MarshalJSON() (data []byte, err error) {
1123 type shadow FineTuningJobNewParamsIntegration
1124 return param.MarshalObject(r, (*shadow)(&r))
1125}
1126
1127// The settings for your integration with Weights and Biases. This payload
1128// specifies the project that metrics will be sent to. Optionally, you can set an
1129// explicit display name for your run, add tags to your run, and set a default
1130// entity (team, username, etc) to be associated with your run.
1131//
1132// The property Project is required.
1133type FineTuningJobNewParamsIntegrationWandb struct {
1134 // The name of the project that the new run will be created under.
1135 Project string `json:"project,required"`
1136 // The entity to use for the run. This allows you to set the team or username of
1137 // the WandB user that you would like associated with the run. If not set, the
1138 // default entity for the registered WandB API key is used.
1139 Entity param.Opt[string] `json:"entity,omitzero"`
1140 // A display name to set for the run. If not set, we will use the Job ID as the
1141 // name.
1142 Name param.Opt[string] `json:"name,omitzero"`
1143 // A list of tags to be attached to the newly created run. These tags are passed
1144 // through directly to WandB. Some default tags are generated by OpenAI:
1145 // "openai/finetune", "openai/{base-model}", "openai/{ftjob-abcdef}".
1146 Tags []string `json:"tags,omitzero"`
1147 paramObj
1148}
1149
1150// IsPresent returns true if the field's value is not omitted and not the JSON
1151// "null". To check if this field is omitted, use [param.IsOmitted].
1152func (f FineTuningJobNewParamsIntegrationWandb) IsPresent() bool {
1153 return !param.IsOmitted(f) && !f.IsNull()
1154}
1155func (r FineTuningJobNewParamsIntegrationWandb) MarshalJSON() (data []byte, err error) {
1156 type shadow FineTuningJobNewParamsIntegrationWandb
1157 return param.MarshalObject(r, (*shadow)(&r))
1158}
1159
1160// The method used for fine-tuning.
1161type FineTuningJobNewParamsMethod struct {
1162 // Configuration for the DPO fine-tuning method.
1163 Dpo FineTuningJobNewParamsMethodDpo `json:"dpo,omitzero"`
1164 // Configuration for the supervised fine-tuning method.
1165 Supervised FineTuningJobNewParamsMethodSupervised `json:"supervised,omitzero"`
1166 // The type of method. Is either `supervised` or `dpo`.
1167 //
1168 // Any of "supervised", "dpo".
1169 Type string `json:"type,omitzero"`
1170 paramObj
1171}
1172
1173// IsPresent returns true if the field's value is not omitted and not the JSON
1174// "null". To check if this field is omitted, use [param.IsOmitted].
1175func (f FineTuningJobNewParamsMethod) IsPresent() bool { return !param.IsOmitted(f) && !f.IsNull() }
1176func (r FineTuningJobNewParamsMethod) MarshalJSON() (data []byte, err error) {
1177 type shadow FineTuningJobNewParamsMethod
1178 return param.MarshalObject(r, (*shadow)(&r))
1179}
1180
1181func init() {
1182 apijson.RegisterFieldValidator[FineTuningJobNewParamsMethod](
1183 "Type", false, "supervised", "dpo",
1184 )
1185}
1186
1187// Configuration for the DPO fine-tuning method.
1188type FineTuningJobNewParamsMethodDpo struct {
1189 // The hyperparameters used for the fine-tuning job.
1190 Hyperparameters FineTuningJobNewParamsMethodDpoHyperparameters `json:"hyperparameters,omitzero"`
1191 paramObj
1192}
1193
1194// IsPresent returns true if the field's value is not omitted and not the JSON
1195// "null". To check if this field is omitted, use [param.IsOmitted].
1196func (f FineTuningJobNewParamsMethodDpo) IsPresent() bool { return !param.IsOmitted(f) && !f.IsNull() }
1197func (r FineTuningJobNewParamsMethodDpo) MarshalJSON() (data []byte, err error) {
1198 type shadow FineTuningJobNewParamsMethodDpo
1199 return param.MarshalObject(r, (*shadow)(&r))
1200}
1201
1202// The hyperparameters used for the fine-tuning job.
1203type FineTuningJobNewParamsMethodDpoHyperparameters struct {
1204 // Number of examples in each batch. A larger batch size means that model
1205 // parameters are updated less frequently, but with lower variance.
1206 BatchSize FineTuningJobNewParamsMethodDpoHyperparametersBatchSizeUnion `json:"batch_size,omitzero"`
1207 // The beta value for the DPO method. A higher beta value will increase the weight
1208 // of the penalty between the policy and reference model.
1209 Beta FineTuningJobNewParamsMethodDpoHyperparametersBetaUnion `json:"beta,omitzero"`
1210 // Scaling factor for the learning rate. A smaller learning rate may be useful to
1211 // avoid overfitting.
1212 LearningRateMultiplier FineTuningJobNewParamsMethodDpoHyperparametersLearningRateMultiplierUnion `json:"learning_rate_multiplier,omitzero"`
1213 // The number of epochs to train the model for. An epoch refers to one full cycle
1214 // through the training dataset.
1215 NEpochs FineTuningJobNewParamsMethodDpoHyperparametersNEpochsUnion `json:"n_epochs,omitzero"`
1216 paramObj
1217}
1218
1219// IsPresent returns true if the field's value is not omitted and not the JSON
1220// "null". To check if this field is omitted, use [param.IsOmitted].
1221func (f FineTuningJobNewParamsMethodDpoHyperparameters) IsPresent() bool {
1222 return !param.IsOmitted(f) && !f.IsNull()
1223}
1224func (r FineTuningJobNewParamsMethodDpoHyperparameters) MarshalJSON() (data []byte, err error) {
1225 type shadow FineTuningJobNewParamsMethodDpoHyperparameters
1226 return param.MarshalObject(r, (*shadow)(&r))
1227}
1228
1229// Only one field can be non-zero.
1230//
1231// Use [param.IsOmitted] to confirm if a field is set.
1232type FineTuningJobNewParamsMethodDpoHyperparametersBatchSizeUnion struct {
1233 // Construct this variant with constant.New[constant.Auto]() Check if union is this
1234 // variant with !param.IsOmitted(union.OfAuto)
1235 OfAuto constant.Auto `json:",omitzero,inline"`
1236 OfInt param.Opt[int64] `json:",omitzero,inline"`
1237 paramUnion
1238}
1239
1240// IsPresent returns true if the field's value is not omitted and not the JSON
1241// "null". To check if this field is omitted, use [param.IsOmitted].
1242func (u FineTuningJobNewParamsMethodDpoHyperparametersBatchSizeUnion) IsPresent() bool {
1243 return !param.IsOmitted(u) && !u.IsNull()
1244}
1245func (u FineTuningJobNewParamsMethodDpoHyperparametersBatchSizeUnion) MarshalJSON() ([]byte, error) {
1246 return param.MarshalUnion[FineTuningJobNewParamsMethodDpoHyperparametersBatchSizeUnion](u.OfAuto, u.OfInt)
1247}
1248
1249func (u *FineTuningJobNewParamsMethodDpoHyperparametersBatchSizeUnion) asAny() any {
1250 if !param.IsOmitted(u.OfAuto) {
1251 return &u.OfAuto
1252 } else if !param.IsOmitted(u.OfInt) {
1253 return &u.OfInt.Value
1254 }
1255 return nil
1256}
1257
1258// Only one field can be non-zero.
1259//
1260// Use [param.IsOmitted] to confirm if a field is set.
1261type FineTuningJobNewParamsMethodDpoHyperparametersBetaUnion struct {
1262 // Construct this variant with constant.New[constant.Auto]() Check if union is this
1263 // variant with !param.IsOmitted(union.OfAuto)
1264 OfAuto constant.Auto `json:",omitzero,inline"`
1265 OfFloat param.Opt[float64] `json:",omitzero,inline"`
1266 paramUnion
1267}
1268
1269// IsPresent returns true if the field's value is not omitted and not the JSON
1270// "null". To check if this field is omitted, use [param.IsOmitted].
1271func (u FineTuningJobNewParamsMethodDpoHyperparametersBetaUnion) IsPresent() bool {
1272 return !param.IsOmitted(u) && !u.IsNull()
1273}
1274func (u FineTuningJobNewParamsMethodDpoHyperparametersBetaUnion) MarshalJSON() ([]byte, error) {
1275 return param.MarshalUnion[FineTuningJobNewParamsMethodDpoHyperparametersBetaUnion](u.OfAuto, u.OfFloat)
1276}
1277
1278func (u *FineTuningJobNewParamsMethodDpoHyperparametersBetaUnion) asAny() any {
1279 if !param.IsOmitted(u.OfAuto) {
1280 return &u.OfAuto
1281 } else if !param.IsOmitted(u.OfFloat) {
1282 return &u.OfFloat.Value
1283 }
1284 return nil
1285}
1286
1287// Only one field can be non-zero.
1288//
1289// Use [param.IsOmitted] to confirm if a field is set.
1290type FineTuningJobNewParamsMethodDpoHyperparametersLearningRateMultiplierUnion struct {
1291 // Construct this variant with constant.New[constant.Auto]() Check if union is this
1292 // variant with !param.IsOmitted(union.OfAuto)
1293 OfAuto constant.Auto `json:",omitzero,inline"`
1294 OfFloat param.Opt[float64] `json:",omitzero,inline"`
1295 paramUnion
1296}
1297
1298// IsPresent returns true if the field's value is not omitted and not the JSON
1299// "null". To check if this field is omitted, use [param.IsOmitted].
1300func (u FineTuningJobNewParamsMethodDpoHyperparametersLearningRateMultiplierUnion) IsPresent() bool {
1301 return !param.IsOmitted(u) && !u.IsNull()
1302}
1303func (u FineTuningJobNewParamsMethodDpoHyperparametersLearningRateMultiplierUnion) MarshalJSON() ([]byte, error) {
1304 return param.MarshalUnion[FineTuningJobNewParamsMethodDpoHyperparametersLearningRateMultiplierUnion](u.OfAuto, u.OfFloat)
1305}
1306
1307func (u *FineTuningJobNewParamsMethodDpoHyperparametersLearningRateMultiplierUnion) asAny() any {
1308 if !param.IsOmitted(u.OfAuto) {
1309 return &u.OfAuto
1310 } else if !param.IsOmitted(u.OfFloat) {
1311 return &u.OfFloat.Value
1312 }
1313 return nil
1314}
1315
1316// Only one field can be non-zero.
1317//
1318// Use [param.IsOmitted] to confirm if a field is set.
1319type FineTuningJobNewParamsMethodDpoHyperparametersNEpochsUnion struct {
1320 // Construct this variant with constant.New[constant.Auto]() Check if union is this
1321 // variant with !param.IsOmitted(union.OfAuto)
1322 OfAuto constant.Auto `json:",omitzero,inline"`
1323 OfInt param.Opt[int64] `json:",omitzero,inline"`
1324 paramUnion
1325}
1326
1327// IsPresent returns true if the field's value is not omitted and not the JSON
1328// "null". To check if this field is omitted, use [param.IsOmitted].
1329func (u FineTuningJobNewParamsMethodDpoHyperparametersNEpochsUnion) IsPresent() bool {
1330 return !param.IsOmitted(u) && !u.IsNull()
1331}
1332func (u FineTuningJobNewParamsMethodDpoHyperparametersNEpochsUnion) MarshalJSON() ([]byte, error) {
1333 return param.MarshalUnion[FineTuningJobNewParamsMethodDpoHyperparametersNEpochsUnion](u.OfAuto, u.OfInt)
1334}
1335
1336func (u *FineTuningJobNewParamsMethodDpoHyperparametersNEpochsUnion) asAny() any {
1337 if !param.IsOmitted(u.OfAuto) {
1338 return &u.OfAuto
1339 } else if !param.IsOmitted(u.OfInt) {
1340 return &u.OfInt.Value
1341 }
1342 return nil
1343}
1344
1345// Configuration for the supervised fine-tuning method.
1346type FineTuningJobNewParamsMethodSupervised struct {
1347 // The hyperparameters used for the fine-tuning job.
1348 Hyperparameters FineTuningJobNewParamsMethodSupervisedHyperparameters `json:"hyperparameters,omitzero"`
1349 paramObj
1350}
1351
1352// IsPresent returns true if the field's value is not omitted and not the JSON
1353// "null". To check if this field is omitted, use [param.IsOmitted].
1354func (f FineTuningJobNewParamsMethodSupervised) IsPresent() bool {
1355 return !param.IsOmitted(f) && !f.IsNull()
1356}
1357func (r FineTuningJobNewParamsMethodSupervised) MarshalJSON() (data []byte, err error) {
1358 type shadow FineTuningJobNewParamsMethodSupervised
1359 return param.MarshalObject(r, (*shadow)(&r))
1360}
1361
1362// The hyperparameters used for the fine-tuning job.
1363type FineTuningJobNewParamsMethodSupervisedHyperparameters struct {
1364 // Number of examples in each batch. A larger batch size means that model
1365 // parameters are updated less frequently, but with lower variance.
1366 BatchSize FineTuningJobNewParamsMethodSupervisedHyperparametersBatchSizeUnion `json:"batch_size,omitzero"`
1367 // Scaling factor for the learning rate. A smaller learning rate may be useful to
1368 // avoid overfitting.
1369 LearningRateMultiplier FineTuningJobNewParamsMethodSupervisedHyperparametersLearningRateMultiplierUnion `json:"learning_rate_multiplier,omitzero"`
1370 // The number of epochs to train the model for. An epoch refers to one full cycle
1371 // through the training dataset.
1372 NEpochs FineTuningJobNewParamsMethodSupervisedHyperparametersNEpochsUnion `json:"n_epochs,omitzero"`
1373 paramObj
1374}
1375
1376// IsPresent returns true if the field's value is not omitted and not the JSON
1377// "null". To check if this field is omitted, use [param.IsOmitted].
1378func (f FineTuningJobNewParamsMethodSupervisedHyperparameters) IsPresent() bool {
1379 return !param.IsOmitted(f) && !f.IsNull()
1380}
1381func (r FineTuningJobNewParamsMethodSupervisedHyperparameters) MarshalJSON() (data []byte, err error) {
1382 type shadow FineTuningJobNewParamsMethodSupervisedHyperparameters
1383 return param.MarshalObject(r, (*shadow)(&r))
1384}
1385
1386// Only one field can be non-zero.
1387//
1388// Use [param.IsOmitted] to confirm if a field is set.
1389type FineTuningJobNewParamsMethodSupervisedHyperparametersBatchSizeUnion struct {
1390 // Construct this variant with constant.New[constant.Auto]() Check if union is this
1391 // variant with !param.IsOmitted(union.OfAuto)
1392 OfAuto constant.Auto `json:",omitzero,inline"`
1393 OfInt param.Opt[int64] `json:",omitzero,inline"`
1394 paramUnion
1395}
1396
1397// IsPresent returns true if the field's value is not omitted and not the JSON
1398// "null". To check if this field is omitted, use [param.IsOmitted].
1399func (u FineTuningJobNewParamsMethodSupervisedHyperparametersBatchSizeUnion) IsPresent() bool {
1400 return !param.IsOmitted(u) && !u.IsNull()
1401}
1402func (u FineTuningJobNewParamsMethodSupervisedHyperparametersBatchSizeUnion) MarshalJSON() ([]byte, error) {
1403 return param.MarshalUnion[FineTuningJobNewParamsMethodSupervisedHyperparametersBatchSizeUnion](u.OfAuto, u.OfInt)
1404}
1405
1406func (u *FineTuningJobNewParamsMethodSupervisedHyperparametersBatchSizeUnion) asAny() any {
1407 if !param.IsOmitted(u.OfAuto) {
1408 return &u.OfAuto
1409 } else if !param.IsOmitted(u.OfInt) {
1410 return &u.OfInt.Value
1411 }
1412 return nil
1413}
1414
1415// Only one field can be non-zero.
1416//
1417// Use [param.IsOmitted] to confirm if a field is set.
1418type FineTuningJobNewParamsMethodSupervisedHyperparametersLearningRateMultiplierUnion struct {
1419 // Construct this variant with constant.New[constant.Auto]() Check if union is this
1420 // variant with !param.IsOmitted(union.OfAuto)
1421 OfAuto constant.Auto `json:",omitzero,inline"`
1422 OfFloat param.Opt[float64] `json:",omitzero,inline"`
1423 paramUnion
1424}
1425
1426// IsPresent returns true if the field's value is not omitted and not the JSON
1427// "null". To check if this field is omitted, use [param.IsOmitted].
1428func (u FineTuningJobNewParamsMethodSupervisedHyperparametersLearningRateMultiplierUnion) IsPresent() bool {
1429 return !param.IsOmitted(u) && !u.IsNull()
1430}
1431func (u FineTuningJobNewParamsMethodSupervisedHyperparametersLearningRateMultiplierUnion) MarshalJSON() ([]byte, error) {
1432 return param.MarshalUnion[FineTuningJobNewParamsMethodSupervisedHyperparametersLearningRateMultiplierUnion](u.OfAuto, u.OfFloat)
1433}
1434
1435func (u *FineTuningJobNewParamsMethodSupervisedHyperparametersLearningRateMultiplierUnion) asAny() any {
1436 if !param.IsOmitted(u.OfAuto) {
1437 return &u.OfAuto
1438 } else if !param.IsOmitted(u.OfFloat) {
1439 return &u.OfFloat.Value
1440 }
1441 return nil
1442}
1443
1444// Only one field can be non-zero.
1445//
1446// Use [param.IsOmitted] to confirm if a field is set.
1447type FineTuningJobNewParamsMethodSupervisedHyperparametersNEpochsUnion struct {
1448 // Construct this variant with constant.New[constant.Auto]() Check if union is this
1449 // variant with !param.IsOmitted(union.OfAuto)
1450 OfAuto constant.Auto `json:",omitzero,inline"`
1451 OfInt param.Opt[int64] `json:",omitzero,inline"`
1452 paramUnion
1453}
1454
1455// IsPresent returns true if the field's value is not omitted and not the JSON
1456// "null". To check if this field is omitted, use [param.IsOmitted].
1457func (u FineTuningJobNewParamsMethodSupervisedHyperparametersNEpochsUnion) IsPresent() bool {
1458 return !param.IsOmitted(u) && !u.IsNull()
1459}
1460func (u FineTuningJobNewParamsMethodSupervisedHyperparametersNEpochsUnion) MarshalJSON() ([]byte, error) {
1461 return param.MarshalUnion[FineTuningJobNewParamsMethodSupervisedHyperparametersNEpochsUnion](u.OfAuto, u.OfInt)
1462}
1463
1464func (u *FineTuningJobNewParamsMethodSupervisedHyperparametersNEpochsUnion) asAny() any {
1465 if !param.IsOmitted(u.OfAuto) {
1466 return &u.OfAuto
1467 } else if !param.IsOmitted(u.OfInt) {
1468 return &u.OfInt.Value
1469 }
1470 return nil
1471}
1472
1473type FineTuningJobListParams struct {
1474 // Identifier for the last job from the previous pagination request.
1475 After param.Opt[string] `query:"after,omitzero" json:"-"`
1476 // Number of fine-tuning jobs to retrieve.
1477 Limit param.Opt[int64] `query:"limit,omitzero" json:"-"`
1478 // Optional metadata filter. To filter, use the syntax `metadata[k]=v`.
1479 // Alternatively, set `metadata=null` to indicate no metadata.
1480 Metadata map[string]string `query:"metadata,omitzero" json:"-"`
1481 paramObj
1482}
1483
1484// IsPresent returns true if the field's value is not omitted and not the JSON
1485// "null". To check if this field is omitted, use [param.IsOmitted].
1486func (f FineTuningJobListParams) IsPresent() bool { return !param.IsOmitted(f) && !f.IsNull() }
1487
1488// URLQuery serializes [FineTuningJobListParams]'s query parameters as
1489// `url.Values`.
1490func (r FineTuningJobListParams) URLQuery() (v url.Values) {
1491 return apiquery.MarshalWithSettings(r, apiquery.QuerySettings{
1492 ArrayFormat: apiquery.ArrayQueryFormatBrackets,
1493 NestedFormat: apiquery.NestedQueryFormatBrackets,
1494 })
1495}
1496
1497type FineTuningJobListEventsParams struct {
1498 // Identifier for the last event from the previous pagination request.
1499 After param.Opt[string] `query:"after,omitzero" json:"-"`
1500 // Number of events to retrieve.
1501 Limit param.Opt[int64] `query:"limit,omitzero" json:"-"`
1502 paramObj
1503}
1504
1505// IsPresent returns true if the field's value is not omitted and not the JSON
1506// "null". To check if this field is omitted, use [param.IsOmitted].
1507func (f FineTuningJobListEventsParams) IsPresent() bool { return !param.IsOmitted(f) && !f.IsNull() }
1508
1509// URLQuery serializes [FineTuningJobListEventsParams]'s query parameters as
1510// `url.Values`.
1511func (r FineTuningJobListEventsParams) URLQuery() (v url.Values) {
1512 return apiquery.MarshalWithSettings(r, apiquery.QuerySettings{
1513 ArrayFormat: apiquery.ArrayQueryFormatBrackets,
1514 NestedFormat: apiquery.NestedQueryFormatBrackets,
1515 })
1516}