Skip to content

Commit 5b2a9dc

Browse files
authored
Merge pull request #283453 from mrbullwinkle/mrb_08_01_2024_batch-003
[Azure OpenAI] [Release branch] Batch
2 parents 707aea3 + 3abaf04 commit 5b2a9dc

File tree

3 files changed

+123
-38
lines changed

3 files changed

+123
-38
lines changed

articles/ai-services/openai/concepts/models.md

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -224,7 +224,6 @@ Global batch is currently supported in the following regions:
224224
- East US
225225
- West US
226226
- Sweden Central
227-
- South India
228227

229228
### GPT-4 and GPT-4 Turbo model availability
230229

articles/ai-services/openai/how-to/batch.md

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,6 @@ Global batch is currently supported in the following regions:
4747
- East US
4848
- West US
4949
- Sweden Central
50-
- South India
5150

5251
The following models support global batch:
5352

articles/ai-services/openai/includes/batch/batch-python.md

Lines changed: 123 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -93,9 +93,9 @@ file_id = file.id
9393

9494
```json
9595
{
96-
"id": "file-31b0f44ef8334259be3a3f3bb793a7b9",
97-
"bytes": 821,
98-
"created_at": 1721666905,
96+
"id": "file-9f3a81d899b4442f98b640e4bc3535dd",
97+
"bytes": 815,
98+
"created_at": 1722476551,
9999
"filename": "test.jsonl",
100100
"object": "file",
101101
"purpose": "batch",
@@ -124,7 +124,7 @@ while status != "processed":
124124
**Output:**
125125

126126
```output
127-
2024-07-22 12:49:06.816651 File Id: file-31b0f44ef8334259be3a3f3bb793a7b9, Status: processed
127+
2024-07-31 21:42:53.663655 File Id: file-9f3a81d899b4442f98b640e4bc3535dd, Status: processed
128128
```
129129

130130
## Create batch job
@@ -152,11 +152,11 @@ print(batch_response.model_dump_json(indent=2))
152152

153153
```json
154154
{
155-
"id": "batch_73445352-d8cf-43c4-b51c-842864923600",
155+
"id": "batch_6caaf24d-54a5-46be-b1b7-518884fcbdde",
156156
"completion_window": "24h",
157-
"created_at": 1722450284,
157+
"created_at": 1722476583,
158158
"endpoint": null,
159-
"input_file_id": "file-ad5ebc5773534c5885ef4aea871d6b9f",
159+
"input_file_id": "file-9f3a81d899b4442f98b640e4bc3535dd",
160160
"object": "batch",
161161
"status": "validating",
162162
"cancelled_at": null,
@@ -165,7 +165,7 @@ print(batch_response.model_dump_json(indent=2))
165165
"error_file_id": null,
166166
"errors": null,
167167
"expired_at": null,
168-
"expires_at": 1722536684,
168+
"expires_at": 1722562983,
169169
"failed_at": null,
170170
"finalizing_at": null,
171171
"in_progress_at": null,
@@ -184,7 +184,6 @@ print(batch_response.model_dump_json(indent=2))
184184
Once you have created batch job successfully you can monitor its progress either in the Studio or programatically. When checking batch job progress we recommend waiting at least 60 seconds in between each status call.
185185

186186
```Python
187-
# Wait until the uploaded file is in processed state
188187
import time
189188
import datetime
190189

@@ -199,15 +198,15 @@ while status not in ("completed", "failed", "canceled"):
199198
**Output:**
200199

201200
```output
202-
2024-07-31 14:26:33.650577 Batch Id: batch_73445352-d8cf-43c4-b51c-842864923600, Status: validating
203-
2024-07-31 14:27:34.479144 Batch Id: batch_73445352-d8cf-43c4-b51c-842864923600, Status: validating
204-
2024-07-31 14:28:35.522783 Batch Id: batch_73445352-d8cf-43c4-b51c-842864923600, Status: validating
205-
2024-07-31 14:29:36.258073 Batch Id: batch_73445352-d8cf-43c4-b51c-842864923600, Status: validating
206-
2024-07-31 14:30:36.916150 Batch Id: batch_73445352-d8cf-43c4-b51c-842864923600, Status: in_progress
207-
2024-07-31 14:31:37.981857 Batch Id: batch_73445352-d8cf-43c4-b51c-842864923600, Status: in_progress
208-
2024-07-31 14:32:38.685983 Batch Id: batch_73445352-d8cf-43c4-b51c-842864923600, Status: in_progress
209-
2024-07-31 14:33:39.355531 Batch Id: batch_73445352-d8cf-43c4-b51c-842864923600, Status: in_progress
210-
2024-07-31 14:34:39.986518 Batch Id: batch_73445352-d8cf-43c4-b51c-842864923600, Status: completed
201+
2024-07-31 21:48:32.556488 Batch Id: batch_6caaf24d-54a5-46be-b1b7-518884fcbdde, Status: validating
202+
2024-07-31 21:49:39.221560 Batch Id: batch_6caaf24d-54a5-46be-b1b7-518884fcbdde, Status: in_progress
203+
2024-07-31 21:50:53.383138 Batch Id: batch_6caaf24d-54a5-46be-b1b7-518884fcbdde, Status: in_progress
204+
2024-07-31 21:52:07.274570 Batch Id: batch_6caaf24d-54a5-46be-b1b7-518884fcbdde, Status: in_progress
205+
2024-07-31 21:53:21.149501 Batch Id: batch_6caaf24d-54a5-46be-b1b7-518884fcbdde, Status: finalizing
206+
2024-07-31 21:54:34.572508 Batch Id: batch_6caaf24d-54a5-46be-b1b7-518884fcbdde, Status: finalizing
207+
2024-07-31 21:55:35.304713 Batch Id: batch_6caaf24d-54a5-46be-b1b7-518884fcbdde, Status: finalizing
208+
2024-07-31 21:56:36.531816 Batch Id: batch_6caaf24d-54a5-46be-b1b7-518884fcbdde, Status: finalizing
209+
2024-07-31 21:57:37.414105 Batch Id: batch_6caaf24d-54a5-46be-b1b7-518884fcbdde, Status: completed
211210
```
212211

213212
The following status values are possible:
@@ -229,33 +228,32 @@ To examine the job status details you can run:
229228
print(batch_response.model_dump_json(indent=2))
230229
```
231230

231+
**Output:**
232+
232233
```json
233234
{
234-
"id": "batch_f1423441-0935-4a3d-9a96-9c23bedc3289",
235+
"id": "batch_6caaf24d-54a5-46be-b1b7-518884fcbdde",
235236
"completion_window": "24h",
236-
"created_at": "2024-07-22T16:51:52.6450839+00:00",
237+
"created_at": 1722476583,
237238
"endpoint": null,
238-
"input_file_id": "file-31b0f44ef8334259be3a3f3bb793a7b9",
239+
"input_file_id": "file-9f3a81d899b4442f98b640e4bc3535dd",
239240
"object": "batch",
240-
"status": "Completed",
241+
"status": "completed",
241242
"cancelled_at": null,
242243
"cancelling_at": null,
243-
"completed_at": "2024-07-22T17:59:43.6332138+00:00",
244-
"error_file_id": "file-de3c3e8b-83b4-4a83-89c3-310f3d677df4",
245-
"errors": {
246-
"data": [],
247-
"object": "list"
248-
},
244+
"completed_at": 1722477429,
245+
"error_file_id": "file-c795ae52-3ba7-417d-86ec-07eebca57d0b",
246+
"errors": null,
249247
"expired_at": null,
250-
"expires_at": "2024-07-23T16:51:52.5940767+00:00",
248+
"expires_at": 1722562983,
251249
"failed_at": null,
252-
"finalizing_at": "2024-07-22T17:55:27.9985631+00:00",
253-
"in_progress_at": "2024-07-22T16:57:43.5157566+00:00",
250+
"finalizing_at": 1722477177,
251+
"in_progress_at": null,
254252
"metadata": null,
255-
"output_file_id": "file-ccd5d748-f5a4-4846-a0f8-2538d569000a",
253+
"output_file_id": "file-3304e310-3b39-4e34-9f1c-e1c1504b2b2a",
256254
"request_counts": {
257-
"completed": 0,
258-
"failed": 3,
255+
"completed": 3,
256+
"failed": 0,
259257
"total": 3
260258
}
261259
}
@@ -266,9 +264,98 @@ Observe that there's both `error_file_id` and a separate `output_file_id`. Use t
266264
## Retrieve batch job output file
267265

268266
```python
269-
response =client.files.content(batch_reponse.output_file_id)
267+
import json
268+
269+
response = client.files.content(batch_response.output_file_id)
270+
raw_responses = file_response.text.strip().split('\n')
270271

271-
print(response.text)
272+
for raw_response in raw_responses:
273+
json_response = json.loads(raw_response)
274+
formatted_json = json.dumps(json_response, indent=2)
275+
print(formatted_json)
276+
```
277+
278+
**Output:**
279+
280+
For brevity, we are only including a single chat completion response of output. If you follow the steps in this article you should have three responses similar to the one below:
281+
282+
```json
283+
{
284+
"custom_id": "task-0",
285+
"response": {
286+
"body": {
287+
"choices": [
288+
{
289+
"content_filter_results": {
290+
"hate": {
291+
"filtered": false,
292+
"severity": "safe"
293+
},
294+
"self_harm": {
295+
"filtered": false,
296+
"severity": "safe"
297+
},
298+
"sexual": {
299+
"filtered": false,
300+
"severity": "safe"
301+
},
302+
"violence": {
303+
"filtered": false,
304+
"severity": "safe"
305+
}
306+
},
307+
"finish_reason": "stop",
308+
"index": 0,
309+
"logprobs": null,
310+
"message": {
311+
"content": "Microsoft was founded on April 4, 1975, by Bill Gates and Paul Allen in Albuquerque, New Mexico.",
312+
"role": "assistant"
313+
}
314+
}
315+
],
316+
"created": 1722477079,
317+
"id": "chatcmpl-9rFGJ9dh08Tw9WRKqaEHwrkqRa4DJ",
318+
"model": "gpt-4o-2024-05-13",
319+
"object": "chat.completion",
320+
"prompt_filter_results": [
321+
{
322+
"prompt_index": 0,
323+
"content_filter_results": {
324+
"hate": {
325+
"filtered": false,
326+
"severity": "safe"
327+
},
328+
"jailbreak": {
329+
"filtered": false,
330+
"detected": false
331+
},
332+
"self_harm": {
333+
"filtered": false,
334+
"severity": "safe"
335+
},
336+
"sexual": {
337+
"filtered": false,
338+
"severity": "safe"
339+
},
340+
"violence": {
341+
"filtered": false,
342+
"severity": "safe"
343+
}
344+
}
345+
}
346+
],
347+
"system_fingerprint": "fp_a9bfe9d51d",
348+
"usage": {
349+
"completion_tokens": 24,
350+
"prompt_tokens": 27,
351+
"total_tokens": 51
352+
}
353+
},
354+
"request_id": "660b7424-b648-4b67-addc-862ba067d442",
355+
"status_code": 200
356+
},
357+
"error": null
358+
}
272359
```
273360

274361
### Additional batch commands

0 commit comments

Comments
 (0)