Skip to content
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
129 changes: 128 additions & 1 deletion integrations/llms/azure-openai/batches.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ curl --location 'https://api.portkey.ai/v1/batches' \
"input_file_id": "<file_id>",
"endpoint": "<endpoint>",
"completion_window": "<completion_window>",
"metadata": {},
"metadata": {}
}'
```

Expand Down Expand Up @@ -130,6 +130,133 @@ print(start_batch_response)

</CodeGroup>

### Create Batch Job with Blob Storage
<CodeGroup>
```python Python
from portkey_ai import Portkey

# Initialize the Portkey client
portkey = Portkey(
api_key="PORTKEY_API_KEY", # Replace with your Portkey API key
provider="@PROVIDER"
)

start_batch_response = portkey.batches.create(
endpoint="endpoint", # ex: /v1/chat/completions
completion_window="completion_window", # ex: 24h
metadata={}, # metadata for the batch
input_blob="<blob_url>",
output_folder={
"url": "<output_blob_folder>" # both error file and output file will be saved in this folder
}
)

print(start_batch_response)
```

```javascript Typescript
import { Portkey } from 'portkey-ai';

// Initialize the Portkey client
const portkey = new Portkey({
apiKey: "PORTKEY_API_KEY", // Replace with your Portkey API key
provider:"@PROVIDER"
});

const startBatch = async () => {
const startBatchResponse = await portkey.batches.create({
endpoint: "endpoint", // ex: /v1/chat/completions
completion_window: "completion_window", // ex: 24h
metadata: {}, // metadata for the batch
input_blob: "<blob_url>",
output_folder: {
url: "<output_blob_folder>" // both error file and output file will be saved in this folder
}
});

console.log(startBatchResponse);
}

await startBatch();
```

```bash curl
curl --location 'https://api.portkey.ai/v1/batches' \
--header 'x-portkey-api-key: <portkey_api_key>' \
--header 'x-portkey-provider: @provider' \
--header 'Content-Type: application/json' \
--data '{
"endpoint": "<endpoint>",
"completion_window": "<completion_window>",
"metadata": {},
"input_blob": "<blob_url>",
"output_folder": {
"url": "<output_blob_folder>" # both error file and output file will be saved in this folder
}
}'
```

```javascript OpenAI NodeJS
import OpenAI from 'openai'; // We're using the v4 SDK
import { PORTKEY_GATEWAY_URL, createHeaders } from 'portkey-ai'

const openai = new OpenAI({
apiKey: 'OPENAI_API_KEY', // defaults to process.env["OPENAI_API_KEY"],
baseURL: PORTKEY_GATEWAY_URL,
defaultHeaders: createHeaders({
provider: "openai",
apiKey: "PORTKEY_API_KEY" // defaults to process.env["PORTKEY_API_KEY"]
})
});

const startBatch = async () => {
const startBatchResponse = await openai.batches.create({
endpoint: "endpoint", // ex: /v1/chat/completions
completion_window: "completion_window", // ex: 24h
metadata: {}, // metadata for the batch
extra_body: {
input_blob: "<blob_url>",
output_folder: {
url: "<output_blob_folder>" // both error file and output file will be saved in this folder
}
}
});

console.log(startBatchResponse);
}

await startBatch();
```

```python OpenAI Python
from openai import OpenAI
from portkey_ai import PORTKEY_GATEWAY_URL, createHeaders

openai = OpenAI(
api_key='OPENAI_API_KEY',
base_url=PORTKEY_GATEWAY_URL,
default_headers=createHeaders(
provider="openai",
api_key="PORTKEY_API_KEY"
)
)

start_batch_response = openai.batches.create(
endpoint="endpoint", # ex: /v1/chat/completions
completion_window="completion_window", # ex: 24h
metadata={}, # metadata for the batch
extra_body={
"input_blob": "<blob_url>",
"output_folder": {
"url": "<output_blob_folder>" # both error file and output file will be saved in this folder
}
}
)

print(start_batch_response)
```
</CodeGroup>

### List Batch Jobs
<CodeGroup>
```python Python
Expand Down