Skip to content

Commit a77c475

Browse files
jjmachanchitalian
andauthored
feat: deeper helicon integration (#1196)
based on #1182 --------- Co-authored-by: Justin <[email protected]>
1 parent 17718f2 commit a77c475

File tree

4 files changed

+149
-12
lines changed

4 files changed

+149
-12
lines changed

docs/howtos/integrations/helicone.ipynb

Lines changed: 27 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -53,15 +53,22 @@
5353
"from datasets import Dataset\n",
5454
"from ragas import evaluate\n",
5555
"from ragas.metrics import faithfulness, answer_relevancy, context_precision\n",
56+
"from ragas.integrations.helicone import helicone_config # import helicone_config\n",
57+
"\n",
5658
"\n",
5759
"# Set up Helicone\n",
58-
"HELICONE_API_KEY = \"your_helicone_api_key_here\" # Replace with your actual Helicone API key\n",
59-
"os.environ[\"OPENAI_API_BASE\"] = f\"https://oai.helicone.ai/{HELICONE_API_KEY}/v1\"\n",
60-
"os.environ[\"OPENAI_API_KEY\"] = \"your_openai_api_key_here\" # Replace with your actual OpenAI API key\n",
60+
"helicone_config.api_key = (\n",
61+
" \"your_helicone_api_key_here\" # Replace with your actual Helicone API key\n",
62+
")\n",
63+
"os.environ[\n",
64+
" \"OPENAI_API_KEY\"\n",
65+
"] = \"your_openai_api_key_here\" # Replace with your actual OpenAI API key\n",
6166
"\n",
6267
"# Verify Helicone API key is set\n",
6368
"if HELICONE_API_KEY == \"your_helicone_api_key_here\":\n",
64-
" raise ValueError(\"Please replace 'your_helicone_api_key_here' with your actual Helicone API key.\")"
69+
" raise ValueError(\n",
70+
" \"Please replace 'your_helicone_api_key_here' with your actual Helicone API key.\"\n",
71+
" )"
6572
]
6673
},
6774
{
@@ -80,13 +87,23 @@
8087
"outputs": [],
8188
"source": [
8289
"data_samples = {\n",
83-
" 'question': ['When was the first Super Bowl?', 'Who has won the most Super Bowls?'],\n",
84-
" 'answer': ['The first Super Bowl was held on January 15, 1967.', 'The New England Patriots have won the most Super Bowls, with six championships.'],\n",
85-
" 'contexts': [\n",
86-
" ['The First AFL–NFL World Championship Game, later known as Super Bowl I, was played on January 15, 1967, at the Los Angeles Memorial Coliseum in Los Angeles, California.'],\n",
87-
" ['As of 2021, the New England Patriots have won the most Super Bowls with six championships, all under the leadership of quarterback Tom Brady and head coach Bill Belichick.']\n",
90+
" \"question\": [\"When was the first Super Bowl?\", \"Who has won the most Super Bowls?\"],\n",
91+
" \"answer\": [\n",
92+
" \"The first Super Bowl was held on January 15, 1967.\",\n",
93+
" \"The New England Patriots have won the most Super Bowls, with six championships.\",\n",
94+
" ],\n",
95+
" \"contexts\": [\n",
96+
" [\n",
97+
" \"The First AFL–NFL World Championship Game, later known as Super Bowl I, was played on January 15, 1967, at the Los Angeles Memorial Coliseum in Los Angeles, California.\"\n",
98+
" ],\n",
99+
" [\n",
100+
" \"As of 2021, the New England Patriots have won the most Super Bowls with six championships, all under the leadership of quarterback Tom Brady and head coach Bill Belichick.\"\n",
101+
" ],\n",
102+
" ],\n",
103+
" \"ground_truth\": [\n",
104+
" \"The first Super Bowl was held on January 15, 1967.\",\n",
105+
" \"The New England Patriots have won the most Super Bowls, with six championships as of 2021.\",\n",
88106
" ],\n",
89-
" 'ground_truth': ['The first Super Bowl was held on January 15, 1967.', 'The New England Patriots have won the most Super Bowls, with six championships as of 2021.']\n",
90107
"}\n",
91108
"\n",
92109
"dataset = Dataset.from_dict(data_samples)\n",

src/ragas/evaluation.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919
)
2020
from ragas.exceptions import ExceptionInRunner
2121
from ragas.executor import Executor
22+
from ragas.integrations.helicone import helicone_config
2223
from ragas.llms import llm_factory
2324
from ragas.llms.base import BaseRagasLLM, LangchainLLMWrapper
2425
from ragas.metrics._answer_correctness import AnswerCorrectness
@@ -136,6 +137,12 @@ def evaluate(
136137
column_map = column_map or {}
137138
callbacks = callbacks or []
138139

140+
if helicone_config.is_enabled:
141+
import uuid
142+
143+
helicone_config.session_name = "ragas-evaluation"
144+
helicone_config.session_id = str(uuid.uuid4())
145+
139146
if dataset is None:
140147
raise ValueError("Provide dataset!")
141148

src/ragas/integrations/helicone.py

Lines changed: 101 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,101 @@
1+
from dataclasses import dataclass, field
2+
from typing import Any, Dict, Optional
3+
4+
5+
@dataclass
6+
class CacheConfig:
7+
ttl: int = 60 * 60 * 24 * 30 # 30 days
8+
maxsize: int = 1000
9+
10+
11+
@dataclass
12+
class HeliconeSingleton:
13+
api_key: Optional[str] = None
14+
base_url: Optional[str] = "https://oai.helicone.ai"
15+
cache_config: Optional[CacheConfig] = None
16+
_instance: Optional["HeliconeSingleton"] = None
17+
18+
# New fields for configurable headers
19+
target_url: Optional[str] = None
20+
openai_api_base: Optional[str] = None
21+
request_id: Optional[str] = None
22+
model_override: Optional[str] = None
23+
prompt_id: Optional[str] = None
24+
user_id: Optional[str] = None
25+
fallbacks: Optional[str] = None
26+
rate_limit_policy: Optional[str] = None
27+
session_id: Optional[str] = None
28+
session_path: Optional[str] = None
29+
session_name: Optional[str] = None
30+
posthog_key: Optional[str] = None
31+
posthog_host: Optional[str] = None
32+
omit_response: Optional[bool] = None
33+
omit_request: Optional[bool] = None
34+
cache_enabled: Optional[bool] = None
35+
retry_enabled: Optional[bool] = None
36+
moderations_enabled: Optional[bool] = None
37+
llm_security_enabled: Optional[bool] = None
38+
stream_force_format: Optional[bool] = None
39+
custom_properties: Dict[str, str] = field(default_factory=dict)
40+
41+
def __new__(cls):
42+
if cls._instance is None:
43+
cls._instance = super().__new__(cls)
44+
return cls._instance
45+
46+
def default_headers(self) -> Dict[str, Any]:
47+
headers = {"Helicone-Auth": f"Bearer {self.api_key}"}
48+
49+
if self.target_url:
50+
headers["Helicone-Target-URL"] = self.target_url
51+
if self.openai_api_base:
52+
headers["Helicone-OpenAI-Api-Base"] = self.openai_api_base
53+
if self.request_id:
54+
headers["Helicone-Request-Id"] = self.request_id
55+
if self.model_override:
56+
headers["Helicone-Model-Override"] = self.model_override
57+
if self.prompt_id:
58+
headers["Helicone-Prompt-Id"] = self.prompt_id
59+
if self.user_id:
60+
headers["Helicone-User-Id"] = self.user_id
61+
if self.fallbacks:
62+
headers["Helicone-Fallbacks"] = self.fallbacks
63+
if self.rate_limit_policy:
64+
headers["Helicone-RateLimit-Policy"] = self.rate_limit_policy
65+
if self.session_id:
66+
headers["Helicone-Session-Id"] = self.session_id
67+
if self.session_path:
68+
headers["Helicone-Session-Path"] = self.session_path
69+
if self.session_name:
70+
headers["Helicone-Session-Name"] = self.session_name
71+
if self.posthog_key:
72+
headers["Helicone-Posthog-Key"] = self.posthog_key
73+
if self.posthog_host:
74+
headers["Helicone-Posthog-Host"] = self.posthog_host
75+
76+
# Boolean headers
77+
for header, value in {
78+
"Helicone-Omit-Response": self.omit_response,
79+
"Helicone-Omit-Request": self.omit_request,
80+
"Helicone-Cache-Enabled": (self.cache_enabled and "true")
81+
or (self.cache_config.maxsize or self.cache_config.ttl and "true"), # type: ignore
82+
"Helicone-Retry-Enabled": self.retry_enabled,
83+
"Helicone-Moderations-Enabled": self.moderations_enabled,
84+
"Helicone-LLM-Security-Enabled": self.llm_security_enabled,
85+
"Helicone-Stream-Force-Format": self.stream_force_format,
86+
}.items():
87+
if value is not None:
88+
headers[header] = str(value).lower()
89+
90+
# Custom properties
91+
for key, value in self.custom_properties.items():
92+
headers[f"Helicone-Property-{key}"] = value
93+
94+
return headers
95+
96+
@property
97+
def is_enabled(self):
98+
return self.api_key is not None
99+
100+
101+
helicone_config = HeliconeSingleton()

src/ragas/llms/base.py

Lines changed: 14 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515
from langchain_openai.llms import AzureOpenAI, OpenAI
1616
from langchain_openai.llms.base import BaseOpenAI
1717

18+
from ragas.integrations.helicone import helicone_config
1819
from ragas.run_config import RunConfig, add_async_retry, add_retry
1920

2021
if t.TYPE_CHECKING:
@@ -289,10 +290,21 @@ async def agenerate_text(
289290

290291

291292
def llm_factory(
292-
model: str = "gpt-4o-mini", run_config: t.Optional[RunConfig] = None
293+
model: str = "gpt-4o-mini",
294+
run_config: t.Optional[RunConfig] = None,
295+
default_headers: t.Optional[t.Dict[str, str]] = None,
296+
base_url: t.Optional[str] = None,
293297
) -> BaseRagasLLM:
294298
timeout = None
295299
if run_config is not None:
296300
timeout = run_config.timeout
297-
openai_model = ChatOpenAI(model=model, timeout=timeout)
301+
302+
# if helicone is enabled, use the helicone
303+
if helicone_config.is_enabled:
304+
default_headers = helicone_config.default_headers()
305+
base_url = helicone_config.base_url
306+
307+
openai_model = ChatOpenAI(
308+
model=model, timeout=timeout, default_headers=default_headers, base_url=base_url
309+
)
298310
return LangchainLLMWrapper(openai_model, run_config)

0 commit comments

Comments
 (0)