diff --git a/integrations/llms/azure-openai/batches.mdx b/integrations/llms/azure-openai/batches.mdx index 632a01cf..0e61f1f3 100644 --- a/integrations/llms/azure-openai/batches.mdx +++ b/integrations/llms/azure-openai/batches.mdx @@ -74,7 +74,7 @@ curl --location 'https://api.portkey.ai/v1/batches' \ "input_file_id": "", "endpoint": "", "completion_window": "", - "metadata": {}, + "metadata": {} }' ``` @@ -130,6 +130,133 @@ print(start_batch_response) +### Create Batch Job with Blob Storage + +```python Python +from portkey_ai import Portkey + +# Initialize the Portkey client +portkey = Portkey( + api_key="PORTKEY_API_KEY", # Replace with your Portkey API key + provider="@PROVIDER" +) + +start_batch_response = portkey.batches.create( + endpoint="endpoint", # ex: /v1/chat/completions + completion_window="completion_window", # ex: 24h + metadata={}, # metadata for the batch + input_blob="", + output_folder={ + "url": "" # both error file and output file will be saved in this folder + } +) + +print(start_batch_response) +``` + +```javascript Typescript +import { Portkey } from 'portkey-ai'; + +// Initialize the Portkey client +const portkey = new Portkey({ + apiKey: "PORTKEY_API_KEY", // Replace with your Portkey API key + provider:"@PROVIDER" +}); + +const startBatch = async () => { + const startBatchResponse = await portkey.batches.create({ + endpoint: "endpoint", // ex: /v1/chat/completions + completion_window: "completion_window", // ex: 24h + metadata: {}, // metadata for the batch + input_blob: "", + output_folder: { + url: "" // both error file and output file will be saved in this folder + } + }); + + console.log(startBatchResponse); +} + +await startBatch(); +``` + +```bash curl +curl --location 'https://api.portkey.ai/v1/batches' \ +--header 'x-portkey-api-key: ' \ +--header 'x-portkey-provider: @provider' \ +--header 'Content-Type: application/json' \ +--data '{ + "endpoint": "", + "completion_window": "", + "metadata": {}, + "input_blob": "", + "output_folder": { + "url": "" # both error file and output file will be saved in this folder + } +}' +``` + +```javascript OpenAI NodeJS +import OpenAI from 'openai'; // We're using the v4 SDK +import { PORTKEY_GATEWAY_URL, createHeaders } from 'portkey-ai' + +const openai = new OpenAI({ + apiKey: 'OPENAI_API_KEY', // defaults to process.env["OPENAI_API_KEY"], + baseURL: PORTKEY_GATEWAY_URL, + defaultHeaders: createHeaders({ + provider: "openai", + apiKey: "PORTKEY_API_KEY" // defaults to process.env["PORTKEY_API_KEY"] + }) +}); + +const startBatch = async () => { + const startBatchResponse = await openai.batches.create({ + endpoint: "endpoint", // ex: /v1/chat/completions + completion_window: "completion_window", // ex: 24h + metadata: {}, // metadata for the batch + extra_body: { + input_blob: "", + output_folder: { + url: "" // both error file and output file will be saved in this folder + } + } + }); + + console.log(startBatchResponse); +} + +await startBatch(); +``` + +```python OpenAI Python +from openai import OpenAI +from portkey_ai import PORTKEY_GATEWAY_URL, createHeaders + +openai = OpenAI( + api_key='OPENAI_API_KEY', + base_url=PORTKEY_GATEWAY_URL, + default_headers=createHeaders( + provider="openai", + api_key="PORTKEY_API_KEY" + ) +) + +start_batch_response = openai.batches.create( + endpoint="endpoint", # ex: /v1/chat/completions + completion_window="completion_window", # ex: 24h + metadata={}, # metadata for the batch + extra_body={ + "input_blob": "", + "output_folder": { + "url": "" # both error file and output file will be saved in this folder + } + } +) + +print(start_batch_response) +``` + + ### List Batch Jobs ```python Python