-
Notifications
You must be signed in to change notification settings - Fork 109
chore(ai): Add metric for tracking ai cost calculation #5560
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 5 commits
d49be0f
807f933
735fe55
a10f08d
01deeb5
a0f7607
374de6c
8c68e59
444eadb
ff67e57
0483d3f
a53fe30
1b4b705
60c4897
ed6ecd3
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -6,6 +6,7 @@ use relay_protocol::Annotated; | |
|
|
||
| use crate::ModelCosts; | ||
| use crate::span::ai; | ||
| use crate::statsd::Counters; | ||
|
|
||
| /// Normalizes AI attributes. | ||
| /// | ||
|
|
@@ -108,8 +109,59 @@ fn normalize_tokens_per_second(attributes: &mut Attributes, duration: Option<Dur | |
| } | ||
| } | ||
|
|
||
| /// Maps a span origin to a well-known AI integration name for metrics. | ||
| /// | ||
| /// Origins follow the pattern `auto.<integration>.<source>` or `auto.<category>.<protocol>.<source>`. | ||
| /// This function extracts recognized AI integrations for cleaner metric tagging. | ||
| fn map_origin_to_integration(origin: Option<&str>) -> &'static str { | ||
| match origin { | ||
| Some(o) if o.starts_with("auto.ai.openai") => "openai", | ||
| Some(o) if o.starts_with("auto.ai.openai_agents") => "openai_agents", | ||
| Some(o) if o.starts_with("auto.ai.anthropic") => "anthropic", | ||
| Some(o) if o.starts_with("auto.ai.cohere") => "cohere", | ||
| Some(o) if o.starts_with("auto.vercelai.") => "vercelai", | ||
| Some(o) if o.starts_with("auto.ai.langchain") => "langchain", | ||
| Some(o) if o.starts_with("auto.ai.langgraph") => "langgraph", | ||
| Some(o) if o.starts_with("auto.ai.google_genai") => "google_genai", | ||
| Some(o) if o.starts_with("auto.ai.pydantic_ai") => "pydantic_ai", | ||
| Some(o) if o.starts_with("auto.ai.huggingface_hub") => "huggingface_hub", | ||
| Some(o) if o.starts_with("auto.ai.litellm") => "litellm", | ||
| Some(o) if o.starts_with("auto.ai.mcp") => "mcp", | ||
| Some(o) if o.starts_with("auto.ai.mcp_server") => "mcp_server", | ||
| Some(o) if o.starts_with("auto.ai.claude_agent_sdk") => "claude_agent_sdk", | ||
| Some(o) if o.starts_with("auto.ai.") => "other", | ||
| Some(_) => "other", | ||
| None => "unknown", | ||
| } | ||
| } | ||
|
|
||
| fn platform_tag(platform: Option<&str>) -> &'static str { | ||
|
||
| match platform { | ||
| Some("cocoa") => "cocoa", | ||
| Some("csharp") => "csharp", | ||
| Some("edge") => "edge", | ||
| Some("go") => "go", | ||
| Some("java") => "java", | ||
| Some("javascript") => "javascript", | ||
| Some("julia") => "julia", | ||
| Some("native") => "native", | ||
| Some("node") => "node", | ||
| Some("objc") => "objc", | ||
| Some("perl") => "perl", | ||
| Some("php") => "php", | ||
| Some("python") => "python", | ||
| Some("ruby") => "ruby", | ||
| Some("swift") => "swift", | ||
| Some(_) => "other", | ||
| None => "unknown", | ||
| } | ||
| } | ||
|
|
||
| /// Calculates model costs and serializes them into attributes. | ||
| fn normalize_ai_costs(attributes: &mut Attributes, model_costs: Option<&ModelCosts>) { | ||
| let origin = extract_string_value(attributes, ORIGIN); | ||
| let platform = extract_string_value(attributes, PLATFORM); | ||
|
|
||
| let model_cost = attributes | ||
| .get_value(GEN_AI_REQUEST_MODEL) | ||
| .or_else(|| attributes.get_value(GEN_AI_RESPONSE_MODEL)) | ||
|
|
@@ -133,16 +185,43 @@ fn normalize_ai_costs(attributes: &mut Attributes, model_costs: Option<&ModelCos | |
| output_reasoning_tokens: get_tokens(GEN_AI_USAGE_OUTPUT_REASONING_TOKENS), | ||
| }; | ||
|
|
||
| let integration = map_origin_to_integration(origin); | ||
| let platform = platform_tag(platform); | ||
|
|
||
| let Some(costs) = ai::calculate_costs(model_cost, tokens) else { | ||
|
||
| relay_statsd::metric!( | ||
| counter(Counters::GenAiCostCalculationResult) += 1, | ||
| result = "calculation_none", | ||
| integration = integration, | ||
| platform = platform, | ||
shellmayr marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
| ); | ||
|
|
||
| return; | ||
| }; | ||
|
|
||
| // Overwrite all values, the attributes should reflect the values we used to calculate the total. | ||
|
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I assume removing that comment was a mistake?
Member
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. yup, adding it back in |
||
| let metric_label = if costs.input > 0.0 && costs.output > 0.0 { | ||
| "calculation_positive" | ||
| } else if costs.input < 0.0 || costs.output < 0.0 { | ||
| "calculation_negative" | ||
| } else { | ||
| "calculation_zero" | ||
| }; | ||
shellmayr marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
| relay_statsd::metric!( | ||
| counter(Counters::GenAiCostCalculationResult) += 1, | ||
| result = metric_label, | ||
| integration = integration, | ||
| platform = platform, | ||
| ); | ||
|
|
||
| attributes.insert(GEN_AI_COST_INPUT_TOKENS, costs.input); | ||
| attributes.insert(GEN_AI_COST_OUTPUT_TOKENS, costs.output); | ||
| attributes.insert(GEN_AI_COST_TOTAL_TOKENS, costs.total()); | ||
| } | ||
|
|
||
| fn extract_string_value<'a>(attributes: &'a Attributes, key: &str) -> Option<&'a str> { | ||
| attributes.get_value(key).and_then(|v| v.as_str()) | ||
| } | ||
|
|
||
| #[cfg(test)] | ||
| mod tests { | ||
| use std::collections::HashMap; | ||
|
|
||
Uh oh!
There was an error while loading. Please reload this page.