Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
84 changes: 84 additions & 0 deletions .github/workflows/aimlapi.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
# This workflow comes from https://github.com/ofek/hatch-mypyc
# https://github.com/ofek/hatch-mypyc/blob/5a198c0ba8660494d02716cfc9d79ce4adfb1442/.github/workflows/test.yml
name: Test / aimlapi

on:
schedule:
- cron: "0 0 * * *"
pull_request:
paths:
- "integrations/aimlapi/**"
- "!integrations/aimlapi/*.md"
- ".github/workflows/aimlapi.yml"

defaults:
run:
working-directory: integrations/aimlapi

concurrency:
group: aimlapi-${{ github.head_ref }}
cancel-in-progress: true

env:
PYTHONUNBUFFERED: "1"
FORCE_COLOR: "1"
AIMLAPI_API_KEY: ${{ secrets.AIMLAPI_API_KEY }}

jobs:
run:
name: Python ${{ matrix.python-version }} on ${{ startsWith(matrix.os, 'macos-') && 'macOS' || startsWith(matrix.os, 'windows-') && 'Windows' || 'Linux' }}
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]
python-version: ["3.9", "3.13"]

steps:
- name: Support longpaths
if: matrix.os == 'windows-latest'
working-directory: .
run: git config --system core.longpaths true

- uses: actions/checkout@v5

- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v6
with:
python-version: ${{ matrix.python-version }}

- name: Install Hatch
run: pip install --upgrade hatch

- name: Lint
if: matrix.python-version == '3.9' && runner.os == 'Linux'
run: hatch run fmt-check && hatch run test:types

- name: Generate docs
if: matrix.python-version == '3.9' && runner.os == 'Linux'
run: hatch run docs

- name: Run tests
run: hatch run test:cov-retry

- name: Run unit tests with lowest direct dependencies
run: |
hatch run uv pip compile pyproject.toml --resolution lowest-direct --output-file requirements_lowest_direct.txt
hatch run uv pip install -r requirements_lowest_direct.txt
hatch run test:unit
# Since this integration inherits from OpenAIChatGenerator, we run ALL tests with Haystack main branch to catch regressions
- name: Nightly - run tests with Haystack main branch
if: github.event_name == 'schedule'
run: |
hatch env prune
hatch run uv pip install git+https://github.com/deepset-ai/haystack.git@main
hatch run test:all
- name: Send event to Datadog for nightly failures
if: failure() && github.event_name == 'schedule'
uses: ./.github/actions/send_failure
with:
title: |
Core integrations nightly tests failure: ${{ github.workflow }}
api-key: ${{ secrets.CORE_DATADOG_API_KEY }}
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ Please check out our [Contribution Guidelines](CONTRIBUTING.md) for all the deta

| Package | Type | PyPi Package | Status |
|----------------------------------------------------------------------------------------------------------------|-----------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| [aimlapi-haystack](integrations/aimlapi/) | Generator | [![PyPI - Version](https://img.shields.io/pypi/v/aimlapi-haystack.svg)](https://pypi.org/project/aimlapi-haystack) | [![Test / aimlapi](https://github.com/deepset-ai/haystack-core-integrations/actions/workflows/aimlapi.yml/badge.svg)](https://github.com/deepset-ai/haystack-core-integrations/actions/workflows/aimlapi.yml) |
| [amazon-bedrock-haystack](integrations/amazon_bedrock/) | Embedder, Generator, Ranker, Downloader | [![PyPI - Version](https://img.shields.io/pypi/v/amazon-bedrock-haystack.svg)](https://pypi.org/project/amazon-bedrock-haystack) | [![Test / amazon_bedrock](https://github.com/deepset-ai/haystack-core-integrations/actions/workflows/amazon_bedrock.yml/badge.svg)](https://github.com/deepset-ai/haystack-core-integrations/actions/workflows/amazon_bedrock.yml) |
| [amazon-sagemaker-haystack](integrations/amazon_sagemaker/) | Generator | [![PyPI - Version](https://img.shields.io/pypi/v/amazon-sagemaker-haystack.svg)](https://pypi.org/project/amazon-sagemaker-haystack) | [![Test / amazon_sagemaker](https://github.com/deepset-ai/haystack-core-integrations/actions/workflows/amazon_sagemaker.yml/badge.svg)](https://github.com/deepset-ai/haystack-core-integrations/actions/workflows/amazon_sagemaker.yml) |
| [anthropic-haystack](integrations/anthropic/) | Generator | [![PyPI - Version](https://img.shields.io/pypi/v/anthropic-haystack.svg)](https://pypi.org/project/anthropic-haystack) | [![Test / anthropic](https://github.com/deepset-ai/haystack-core-integrations/actions/workflows/anthropic.yml/badge.svg)](https://github.com/deepset-ai/haystack-core-integrations/actions/workflows/anthropic.yml) |
Expand Down
6 changes: 2 additions & 4 deletions integrations/aimlapi/examples/aimlapi_basic_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,7 @@
def main() -> None:
"""Generate a response without using any tools."""

generator = AIMLAPIChatGenerator(
model="openai/gpt-5-chat-latest"
)
generator = AIMLAPIChatGenerator(model="openai/gpt-5-chat-latest")

messages = [
ChatMessage.from_system("You are a concise assistant."),
Expand All @@ -23,7 +21,7 @@ def main() -> None:

reply = generator.run(messages=messages)["replies"][0]

print(f"assistant response: {reply.text}")
print(f"assistant response: {reply.text}") # noqa: T201


if __name__ == "__main__":
Expand Down
24 changes: 9 additions & 15 deletions integrations/aimlapi/examples/aimlapi_with_tools_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,46 +35,40 @@ def main() -> None:

tool_invoker = ToolInvoker(tools=[weather_tool])

client = AIMLAPIChatGenerator(
model="openai/gpt-5-mini-2025-08-07"
)
client = AIMLAPIChatGenerator(model="openai/gpt-5-mini-2025-08-07")

messages = [
ChatMessage.from_system(
"You help users by calling the provided tools when they are relevant."
),
ChatMessage.from_system("You help users by calling the provided tools when they are relevant."),
ChatMessage.from_user("What's the weather in Tokyo today?"),
]

print("Requesting a tool call from the model...")
print("Requesting a tool call from the model...") # noqa: T201
tool_request = client.run(
messages=messages,
tools=[weather_tool],
generation_kwargs={
"tool_choice": {"type": "function", "function": {"name": "weather"}}
},
generation_kwargs={"tool_choice": {"type": "function", "function": {"name": "weather"}}},
)["replies"][0]

print(f"assistant tool request: {tool_request}")
print(f"assistant tool request: {tool_request}") # noqa: T201

if not tool_request.tool_calls:
print("No tool call was produced by the model.")
print("No tool call was produced by the model.") # noqa: T201
return

tool_messages = tool_invoker.run(messages=[tool_request])["tool_messages"]
for tool_message in tool_messages:
for tool_result in tool_message.tool_call_results:
print(f"tool output: {tool_result.result}")
print(f"tool output: {tool_result.result}") # noqa: T201

follow_up_messages = messages + [tool_request, *tool_messages]
follow_up_messages = [*messages, tool_request, *tool_messages]

final_reply = client.run(
messages=follow_up_messages,
tools=[weather_tool],
generation_kwargs={"tool_choice": "none"},
)["replies"][0]

print(f"assistant final answer: {final_reply.text}")
print(f"assistant final answer: {final_reply.text}") # noqa: T201


if __name__ == "__main__":
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
#
# SPDX-License-Identifier: Apache-2.0

from typing import Any, Dict, List, Optional, Union
from typing import Any, Dict, List, Optional, Union, cast

from haystack import component, default_to_dict, logging
from haystack.components.generators.chat import OpenAIChatGenerator
Expand Down Expand Up @@ -66,7 +66,7 @@ def __init__(
streaming_callback: Optional[StreamingCallbackT] = None,
api_base_url: Optional[str] = "https://api.aimlapi.com/v1",
generation_kwargs: Optional[Dict[str, Any]] = None,
tools: Optional[Union[List[Tool], Toolset]] = None,
tools: Optional[Union[list[Union[Tool, Toolset]], Toolset]] = None,
timeout: Optional[float] = None,
extra_headers: Optional[Dict[str, Any]] = None,
max_retries: Optional[int] = None,
Expand Down Expand Up @@ -157,7 +157,7 @@ def _prepare_api_call(
messages: List[ChatMessage],
streaming_callback: Optional[StreamingCallbackT] = None,
generation_kwargs: Optional[Dict[str, Any]] = None,
tools: Optional[Union[List[Tool], Toolset]] = None,
tools: Optional[Union[list[Union[Tool, Toolset]], Toolset]] = None,
tools_strict: Optional[bool] = None,
) -> Dict[str, Any]:
# update generation kwargs by merging with the generation kwargs passed to the run method
Expand All @@ -167,17 +167,22 @@ def _prepare_api_call(
# adapt ChatMessage(s) to the format expected by the OpenAI API (AIMLAPI uses the same format)
aimlapi_formatted_messages: List[Dict[str, Any]] = [message.to_openai_dict_format() for message in messages]

tools = tools or self.tools
if isinstance(tools, Toolset):
tools = list(tools)
tools_in = tools or self.tools

tools_list: List[Tool]
if isinstance(tools_in, Toolset):
tools_list = list(tools_in)
else:
tools_list = cast(List[Tool], tools_in or [])

tools_strict = tools_strict if tools_strict is not None else self.tools_strict
_check_duplicate_tool_names(list(tools or []))
_check_duplicate_tool_names(tools_list)

aimlapi_tools = {}
if tools:
if tools_list:
tool_definitions = [
{"type": "function", "function": {**t.tool_spec, **({"strict": tools_strict} if tools_strict else {})}}
for t in tools
for t in tools_list
]
aimlapi_tools = {"tools": tool_definitions}

Expand All @@ -189,10 +194,11 @@ def _prepare_api_call(

return {
"model": self.model,
"messages": aimlapi_formatted_messages,
"messages": aimlapi_formatted_messages, # type: ignore[arg-type] # openai expects list of specific message types
"stream": streaming_callback is not None,
"n": num_responses,
**aimlapi_tools,
"extra_body": {**generation_kwargs},
"extra_headers": {**extra_headers},
"openai_endpoint": "create",
}