-
Notifications
You must be signed in to change notification settings - Fork 35
/
betavectorstorefilebatch.go
365 lines (322 loc) · 14.3 KB
/
betavectorstorefilebatch.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
package openai
import (
"context"
"errors"
"fmt"
"net/http"
"net/url"
"sync"
"github.com/openai/openai-go/internal/apijson"
"github.com/openai/openai-go/internal/apiquery"
"github.com/openai/openai-go/internal/param"
"github.com/openai/openai-go/internal/requestconfig"
"github.com/openai/openai-go/option"
"github.com/openai/openai-go/packages/pagination"
)
// BetaVectorStoreFileBatchService contains methods and other services that help
// with interacting with the openai API.
//
// Note, unlike clients, this service does not read variables from the environment
// automatically. You should not instantiate this service directly, and instead use
// the [NewBetaVectorStoreFileBatchService] method instead.
type BetaVectorStoreFileBatchService struct {
Options []option.RequestOption
}
// NewBetaVectorStoreFileBatchService generates a new service that applies the
// given options to each request. These options are applied after the parent
// client's options (if there is one), and before any request-specific options.
func NewBetaVectorStoreFileBatchService(opts ...option.RequestOption) (r *BetaVectorStoreFileBatchService) {
r = &BetaVectorStoreFileBatchService{}
r.Options = opts
return
}
// Create a vector store file batch.
func (r *BetaVectorStoreFileBatchService) New(ctx context.Context, vectorStoreID string, body BetaVectorStoreFileBatchNewParams, opts ...option.RequestOption) (res *VectorStoreFileBatch, err error) {
opts = append(r.Options[:], opts...)
opts = append([]option.RequestOption{option.WithHeader("OpenAI-Beta", "assistants=v2")}, opts...)
if vectorStoreID == "" {
err = errors.New("missing required vector_store_id parameter")
return
}
path := fmt.Sprintf("vector_stores/%s/file_batches", vectorStoreID)
err = requestconfig.ExecuteNewRequest(ctx, http.MethodPost, path, body, &res, opts...)
return
}
// Create a vector store file batch and polls the API until the task is complete.
// Pass 0 for pollIntervalMs to enable default polling interval.
func (r *BetaVectorStoreFileBatchService) NewAndPoll(ctx context.Context, vectorStoreId string, body BetaVectorStoreFileBatchNewParams, pollIntervalMs int, opts ...option.RequestOption) (res *VectorStoreFileBatch, err error) {
batch, err := r.New(ctx, vectorStoreId, body, opts...)
if err != nil {
return nil, err
}
return r.PollStatus(ctx, vectorStoreId, batch.ID, pollIntervalMs, opts...)
}
// Uploads the given files concurrently and then creates a vector store file batch.
//
// If you've already uploaded certain files that you want to include in this batch
// then you can pass their IDs through the file_ids argument.
//
// Pass 0 for pollIntervalMs to enable default polling interval.
//
// By default, if any file upload fails then an exception will be eagerly raised.
func (r *BetaVectorStoreFileBatchService) UploadAndPoll(ctx context.Context, vectorStoreID string, files []FileNewParams, fileIDs []string, pollIntervalMs int, opts ...option.RequestOption) (*VectorStoreFileBatch, error) {
if len(files) <= 0 {
return nil, errors.New("No `files` provided to process. If you've already uploaded files you should use `.NewAndPoll()` instead")
}
filesService := NewFileService(r.Options...)
uploadedFileIDs := make(chan string, len(files))
fileUploadErrors := make(chan error, len(files))
wg := sync.WaitGroup{}
for _, file := range files {
wg.Add(1)
go func(file FileNewParams) {
defer wg.Done()
fileObj, err := filesService.New(ctx, file, opts...)
if err != nil {
fileUploadErrors <- err
return
}
uploadedFileIDs <- fileObj.ID
}(file)
}
wg.Wait()
close(uploadedFileIDs)
close(fileUploadErrors)
for err := range fileUploadErrors {
return nil, err
}
for id := range uploadedFileIDs {
fileIDs = append(fileIDs, id)
}
return r.NewAndPoll(ctx, vectorStoreID, BetaVectorStoreFileBatchNewParams{
FileIDs: F(fileIDs),
}, pollIntervalMs, opts...)
}
// Retrieves a vector store file batch.
func (r *BetaVectorStoreFileBatchService) Get(ctx context.Context, vectorStoreID string, batchID string, opts ...option.RequestOption) (res *VectorStoreFileBatch, err error) {
opts = append(r.Options[:], opts...)
opts = append([]option.RequestOption{option.WithHeader("OpenAI-Beta", "assistants=v2")}, opts...)
if vectorStoreID == "" {
err = errors.New("missing required vector_store_id parameter")
return
}
if batchID == "" {
err = errors.New("missing required batch_id parameter")
return
}
path := fmt.Sprintf("vector_stores/%s/file_batches/%s", vectorStoreID, batchID)
err = requestconfig.ExecuteNewRequest(ctx, http.MethodGet, path, nil, &res, opts...)
return
}
// Cancel a vector store file batch. This attempts to cancel the processing of
// files in this batch as soon as possible.
func (r *BetaVectorStoreFileBatchService) Cancel(ctx context.Context, vectorStoreID string, batchID string, opts ...option.RequestOption) (res *VectorStoreFileBatch, err error) {
opts = append(r.Options[:], opts...)
opts = append([]option.RequestOption{option.WithHeader("OpenAI-Beta", "assistants=v2")}, opts...)
if vectorStoreID == "" {
err = errors.New("missing required vector_store_id parameter")
return
}
if batchID == "" {
err = errors.New("missing required batch_id parameter")
return
}
path := fmt.Sprintf("vector_stores/%s/file_batches/%s/cancel", vectorStoreID, batchID)
err = requestconfig.ExecuteNewRequest(ctx, http.MethodPost, path, nil, &res, opts...)
return
}
// Returns a list of vector store files in a batch.
func (r *BetaVectorStoreFileBatchService) ListFiles(ctx context.Context, vectorStoreID string, batchID string, query BetaVectorStoreFileBatchListFilesParams, opts ...option.RequestOption) (res *pagination.CursorPage[VectorStoreFile], err error) {
var raw *http.Response
opts = append(r.Options[:], opts...)
opts = append([]option.RequestOption{option.WithHeader("OpenAI-Beta", "assistants=v2"), option.WithResponseInto(&raw)}, opts...)
if vectorStoreID == "" {
err = errors.New("missing required vector_store_id parameter")
return
}
if batchID == "" {
err = errors.New("missing required batch_id parameter")
return
}
path := fmt.Sprintf("vector_stores/%s/file_batches/%s/files", vectorStoreID, batchID)
cfg, err := requestconfig.NewRequestConfig(ctx, http.MethodGet, path, query, &res, opts...)
if err != nil {
return nil, err
}
err = cfg.Execute()
if err != nil {
return nil, err
}
res.SetPageConfig(cfg, raw)
return res, nil
}
// Returns a list of vector store files in a batch.
func (r *BetaVectorStoreFileBatchService) ListFilesAutoPaging(ctx context.Context, vectorStoreID string, batchID string, query BetaVectorStoreFileBatchListFilesParams, opts ...option.RequestOption) *pagination.CursorPageAutoPager[VectorStoreFile] {
return pagination.NewCursorPageAutoPager(r.ListFiles(ctx, vectorStoreID, batchID, query, opts...))
}
// A batch of files attached to a vector store.
type VectorStoreFileBatch struct {
// The identifier, which can be referenced in API endpoints.
ID string `json:"id,required"`
// The Unix timestamp (in seconds) for when the vector store files batch was
// created.
CreatedAt int64 `json:"created_at,required"`
FileCounts VectorStoreFileBatchFileCounts `json:"file_counts,required"`
// The object type, which is always `vector_store.file_batch`.
Object VectorStoreFileBatchObject `json:"object,required"`
// The status of the vector store files batch, which can be either `in_progress`,
// `completed`, `cancelled` or `failed`.
Status VectorStoreFileBatchStatus `json:"status,required"`
// The ID of the
// [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object)
// that the [File](https://platform.openai.com/docs/api-reference/files) is
// attached to.
VectorStoreID string `json:"vector_store_id,required"`
JSON vectorStoreFileBatchJSON `json:"-"`
}
// vectorStoreFileBatchJSON contains the JSON metadata for the struct
// [VectorStoreFileBatch]
type vectorStoreFileBatchJSON struct {
ID apijson.Field
CreatedAt apijson.Field
FileCounts apijson.Field
Object apijson.Field
Status apijson.Field
VectorStoreID apijson.Field
raw string
ExtraFields map[string]apijson.Field
}
func (r *VectorStoreFileBatch) UnmarshalJSON(data []byte) (err error) {
return apijson.UnmarshalRoot(data, r)
}
func (r vectorStoreFileBatchJSON) RawJSON() string {
return r.raw
}
type VectorStoreFileBatchFileCounts struct {
// The number of files that where cancelled.
Cancelled int64 `json:"cancelled,required"`
// The number of files that have been processed.
Completed int64 `json:"completed,required"`
// The number of files that have failed to process.
Failed int64 `json:"failed,required"`
// The number of files that are currently being processed.
InProgress int64 `json:"in_progress,required"`
// The total number of files.
Total int64 `json:"total,required"`
JSON vectorStoreFileBatchFileCountsJSON `json:"-"`
}
// vectorStoreFileBatchFileCountsJSON contains the JSON metadata for the struct
// [VectorStoreFileBatchFileCounts]
type vectorStoreFileBatchFileCountsJSON struct {
Cancelled apijson.Field
Completed apijson.Field
Failed apijson.Field
InProgress apijson.Field
Total apijson.Field
raw string
ExtraFields map[string]apijson.Field
}
func (r *VectorStoreFileBatchFileCounts) UnmarshalJSON(data []byte) (err error) {
return apijson.UnmarshalRoot(data, r)
}
func (r vectorStoreFileBatchFileCountsJSON) RawJSON() string {
return r.raw
}
// The object type, which is always `vector_store.file_batch`.
type VectorStoreFileBatchObject string
const (
VectorStoreFileBatchObjectVectorStoreFilesBatch VectorStoreFileBatchObject = "vector_store.files_batch"
)
func (r VectorStoreFileBatchObject) IsKnown() bool {
switch r {
case VectorStoreFileBatchObjectVectorStoreFilesBatch:
return true
}
return false
}
// The status of the vector store files batch, which can be either `in_progress`,
// `completed`, `cancelled` or `failed`.
type VectorStoreFileBatchStatus string
const (
VectorStoreFileBatchStatusInProgress VectorStoreFileBatchStatus = "in_progress"
VectorStoreFileBatchStatusCompleted VectorStoreFileBatchStatus = "completed"
VectorStoreFileBatchStatusCancelled VectorStoreFileBatchStatus = "cancelled"
VectorStoreFileBatchStatusFailed VectorStoreFileBatchStatus = "failed"
)
func (r VectorStoreFileBatchStatus) IsKnown() bool {
switch r {
case VectorStoreFileBatchStatusInProgress, VectorStoreFileBatchStatusCompleted, VectorStoreFileBatchStatusCancelled, VectorStoreFileBatchStatusFailed:
return true
}
return false
}
type BetaVectorStoreFileBatchNewParams struct {
// A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that
// the vector store should use. Useful for tools like `file_search` that can access
// files.
FileIDs param.Field[[]string] `json:"file_ids,required"`
// The chunking strategy used to chunk the file(s). If not set, will use the `auto`
// strategy. Only applicable if `file_ids` is non-empty.
ChunkingStrategy param.Field[FileChunkingStrategyParamUnion] `json:"chunking_strategy"`
}
func (r BetaVectorStoreFileBatchNewParams) MarshalJSON() (data []byte, err error) {
return apijson.MarshalRoot(r)
}
type BetaVectorStoreFileBatchListFilesParams struct {
// A cursor for use in pagination. `after` is an object ID that defines your place
// in the list. For instance, if you make a list request and receive 100 objects,
// ending with obj_foo, your subsequent call can include after=obj_foo in order to
// fetch the next page of the list.
After param.Field[string] `query:"after"`
// A cursor for use in pagination. `before` is an object ID that defines your place
// in the list. For instance, if you make a list request and receive 100 objects,
// starting with obj_foo, your subsequent call can include before=obj_foo in order
// to fetch the previous page of the list.
Before param.Field[string] `query:"before"`
// Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`.
Filter param.Field[BetaVectorStoreFileBatchListFilesParamsFilter] `query:"filter"`
// A limit on the number of objects to be returned. Limit can range between 1 and
// 100, and the default is 20.
Limit param.Field[int64] `query:"limit"`
// Sort order by the `created_at` timestamp of the objects. `asc` for ascending
// order and `desc` for descending order.
Order param.Field[BetaVectorStoreFileBatchListFilesParamsOrder] `query:"order"`
}
// URLQuery serializes [BetaVectorStoreFileBatchListFilesParams]'s query parameters
// as `url.Values`.
func (r BetaVectorStoreFileBatchListFilesParams) URLQuery() (v url.Values) {
return apiquery.MarshalWithSettings(r, apiquery.QuerySettings{
ArrayFormat: apiquery.ArrayQueryFormatBrackets,
NestedFormat: apiquery.NestedQueryFormatBrackets,
})
}
// Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`.
type BetaVectorStoreFileBatchListFilesParamsFilter string
const (
BetaVectorStoreFileBatchListFilesParamsFilterInProgress BetaVectorStoreFileBatchListFilesParamsFilter = "in_progress"
BetaVectorStoreFileBatchListFilesParamsFilterCompleted BetaVectorStoreFileBatchListFilesParamsFilter = "completed"
BetaVectorStoreFileBatchListFilesParamsFilterFailed BetaVectorStoreFileBatchListFilesParamsFilter = "failed"
BetaVectorStoreFileBatchListFilesParamsFilterCancelled BetaVectorStoreFileBatchListFilesParamsFilter = "cancelled"
)
func (r BetaVectorStoreFileBatchListFilesParamsFilter) IsKnown() bool {
switch r {
case BetaVectorStoreFileBatchListFilesParamsFilterInProgress, BetaVectorStoreFileBatchListFilesParamsFilterCompleted, BetaVectorStoreFileBatchListFilesParamsFilterFailed, BetaVectorStoreFileBatchListFilesParamsFilterCancelled:
return true
}
return false
}
// Sort order by the `created_at` timestamp of the objects. `asc` for ascending
// order and `desc` for descending order.
type BetaVectorStoreFileBatchListFilesParamsOrder string
const (
BetaVectorStoreFileBatchListFilesParamsOrderAsc BetaVectorStoreFileBatchListFilesParamsOrder = "asc"
BetaVectorStoreFileBatchListFilesParamsOrderDesc BetaVectorStoreFileBatchListFilesParamsOrder = "desc"
)
func (r BetaVectorStoreFileBatchListFilesParamsOrder) IsKnown() bool {
switch r {
case BetaVectorStoreFileBatchListFilesParamsOrderAsc, BetaVectorStoreFileBatchListFilesParamsOrderDesc:
return true
}
return false
}