@@ -74,7 +74,7 @@ curl --location 'https://api.portkey.ai/v1/batches' \
7474 "input_file_id": "<file_id>",
7575 "endpoint": "<endpoint>",
7676 "completion_window": "<completion_window>",
77- "metadata": {},
77+ "metadata": {}
7878}'
7979```
8080
@@ -130,6 +130,133 @@ print(start_batch_response)
130130
131131</CodeGroup >
132132
133+ ### Create Batch Job with Blob Storage
134+ <CodeGroup >
135+ ``` python Python
136+ from portkey_ai import Portkey
137+
138+ # Initialize the Portkey client
139+ portkey = Portkey(
140+ api_key = " PORTKEY_API_KEY" , # Replace with your Portkey API key
141+ provider = " @PROVIDER"
142+ )
143+
144+ start_batch_response = portkey.batches.create(
145+ endpoint = " endpoint" , # ex: /v1/chat/completions
146+ completion_window = " completion_window" , # ex: 24h
147+ metadata = {}, # metadata for the batch
148+ input_blob = " <blob_url>" ,
149+ output_folder = {
150+ " url" : " <output_blob_folder>" # both error file and output file will be saved in this folder
151+ }
152+ )
153+
154+ print (start_batch_response)
155+ ```
156+
157+ ``` javascript Typescript
158+ import { Portkey } from ' portkey-ai' ;
159+
160+ // Initialize the Portkey client
161+ const portkey = new Portkey ({
162+ apiKey: " PORTKEY_API_KEY" , // Replace with your Portkey API key
163+ provider: " @PROVIDER"
164+ });
165+
166+ const startBatch = async () => {
167+ const startBatchResponse = await portkey .batches .create ({
168+ endpoint: " endpoint" , // ex: /v1/chat/completions
169+ completion_window: " completion_window" , // ex: 24h
170+ metadata: {}, // metadata for the batch
171+ input_blob: " <blob_url>" ,
172+ output_folder: {
173+ url: " <output_blob_folder>" // both error file and output file will be saved in this folder
174+ }
175+ });
176+
177+ console .log (startBatchResponse);
178+ }
179+
180+ await startBatch ();
181+ ```
182+
183+ ``` bash curl
184+ curl --location ' https://api.portkey.ai/v1/batches' \
185+ --header ' x-portkey-api-key: <portkey_api_key>' \
186+ --header ' x-portkey-provider: @provider' \
187+ --header ' Content-Type: application/json' \
188+ --data ' {
189+ "endpoint": "<endpoint>",
190+ "completion_window": "<completion_window>",
191+ "metadata": {},
192+ "input_blob": "<blob_url>",
193+ "output_folder": {
194+ "url": "<output_blob_folder>" # both error file and output file will be saved in this folder
195+ }
196+ }'
197+ ```
198+
199+ ``` javascript OpenAI NodeJS
200+ import OpenAI from ' openai' ; // We're using the v4 SDK
201+ import { PORTKEY_GATEWAY_URL , createHeaders } from ' portkey-ai'
202+
203+ const openai = new OpenAI ({
204+ apiKey: ' OPENAI_API_KEY' , // defaults to process.env["OPENAI_API_KEY"],
205+ baseURL: PORTKEY_GATEWAY_URL ,
206+ defaultHeaders: createHeaders ({
207+ provider: " openai" ,
208+ apiKey: " PORTKEY_API_KEY" // defaults to process.env["PORTKEY_API_KEY"]
209+ })
210+ });
211+
212+ const startBatch = async () => {
213+ const startBatchResponse = await openai .batches .create ({
214+ endpoint: " endpoint" , // ex: /v1/chat/completions
215+ completion_window: " completion_window" , // ex: 24h
216+ metadata: {}, // metadata for the batch
217+ extra_body: {
218+ input_blob: " <blob_url>" ,
219+ output_folder: {
220+ url: " <output_blob_folder>" // both error file and output file will be saved in this folder
221+ }
222+ }
223+ });
224+
225+ console .log (startBatchResponse);
226+ }
227+
228+ await startBatch ();
229+ ```
230+
231+ ``` python OpenAI Python
232+ from openai import OpenAI
233+ from portkey_ai import PORTKEY_GATEWAY_URL , createHeaders
234+
235+ openai = OpenAI(
236+ api_key = ' OPENAI_API_KEY' ,
237+ base_url = PORTKEY_GATEWAY_URL ,
238+ default_headers = createHeaders(
239+ provider = " openai" ,
240+ api_key = " PORTKEY_API_KEY"
241+ )
242+ )
243+
244+ start_batch_response = openai.batches.create(
245+ endpoint = " endpoint" , # ex: /v1/chat/completions
246+ completion_window = " completion_window" , # ex: 24h
247+ metadata = {}, # metadata for the batch
248+ extra_body = {
249+ " input_blob" : " <blob_url>" ,
250+ " output_folder" : {
251+ " url" : " <output_blob_folder>" # both error file and output file will be saved in this folder
252+ }
253+ }
254+ )
255+
256+ print (start_batch_response)
257+ ```
258+ </CodeGroup >
259+
133260### List Batch Jobs
134261<CodeGroup >
135262``` python Python
0 commit comments