Skip to content
Closed
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion ldai/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,8 @@ def model_config(self, key: str, context: Context, default_value: str, variables
for entry in variation['prompt']
]

return AIConfig(config=variation, tracker=LDAIConfigTracker(self.client, variation['_ldMeta']['variationId'], key, context))
enabled = ['_ldMeta'].get('enabled')
return AIConfig(config=variation, tracker=LDAIConfigTracker(self.client, variation['_ldMeta']['versionKey'], key, context, bool(enabled)))

def interpolate_template(self, template: str, variables: Dict[str, Any]) -> str:
"""Interpolate the template with the given variables.
Expand Down
25 changes: 17 additions & 8 deletions ldai/tracker.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,15 +4,15 @@
from ldai.types import BedrockTokenUsage, FeedbackKind, OpenAITokenUsage, TokenUsage, UnderscoreTokenUsage

class LDAIConfigTracker:
def __init__(self, ld_client: LDClient, variation_id: str, config_key: str, context: Context):
def __init__(self, ld_client: LDClient, version_key: str, config_key: str, context: Context):
self.ld_client = ld_client
self.variation_id = variation_id
self.version_key = version_key
self.config_key = config_key
self.context = context

def get_track_data(self):
return {
'variationId': self.variation_id,
'versionKey': self.version_key,
'configKey': self.config_key,
}

Expand All @@ -27,24 +27,33 @@ def track_duration_of(self, func, *args, **kwargs):
self.track_duration(duration)
return result

def track_error(self, error: int) -> None:
self.ld_client.track('$ld:ai:error', self.context, self.get_track_data(), error)

def track_feedback(self, feedback: Dict[str, FeedbackKind]) -> None:
if feedback['kind'] == FeedbackKind.Positive:
self.ld_client.track('$ld:ai:feedback:user:positive', self.context, self.get_track_data(), 1)
elif feedback['kind'] == FeedbackKind.Negative:
self.ld_client.track('$ld:ai:feedback:user:negative', self.context, self.get_track_data(), 1)

def track_generation(self, generation: int) -> None:
self.ld_client.track('$ld:ai:generation', self.context, self.get_track_data(), generation)
def track_success(self) -> None:
self.ld_client.track('$ld:ai:generation', self.context, self.get_track_data(), 1)

def track_openai(self, func, *args, **kwargs):
result = self.track_duration_of(func, *args, **kwargs)
if result.usage:
self.track_tokens(OpenAITokenUsage(result.usage))
return result

def track_bedrock_converse(self, res: dict) -> dict:
if res.get('$metadata', {}).get('httpStatusCode') == 200:
self.track_success()
elif res.get('$metadata', {}).get('httpStatusCode') and res['$metadata']['httpStatusCode'] >= 400:
# Potentially add error tracking in the future.
pass
if res.get('metrics', {}).get('latencyMs'):
self.track_duration(res['metrics']['latencyMs'])
if res.get('usage'):
self.track_tokens(BedrockTokenUsage(res['usage']))
return res

def track_tokens(self, tokens: Union[TokenUsage, UnderscoreTokenUsage, BedrockTokenUsage]) -> None:
token_metrics = tokens.to_metrics()
if token_metrics['total'] > 0:
Expand Down
7 changes: 4 additions & 3 deletions ldai/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,16 +23,16 @@ class AITracker():
track_feedback: Callable[..., None]

class AIConfig():
def __init__(self, config: AIConfigData, tracker: AITracker):
def __init__(self, config: AIConfigData, tracker: AITracker, enabled: bool):
self.config = config
self.tracker = tracker
self.enabled = enabled

@dataclass
class FeedbackKind(Enum):
Positive = "positive"
Negative = "negative"

@dataclass

class TokenUsage():
total_tokens: int
prompt_tokens: int
Expand All @@ -45,6 +45,7 @@ def to_metrics(self):
'output': self['completion_tokens'],
}

@dataclass
class OpenAITokenUsage:
def __init__(self, data: any):
self.total_tokens = data.total_tokens
Expand Down
Loading