-
Notifications
You must be signed in to change notification settings - Fork 571
feat(integrations): add support for embed_content methods in GoogleGenAI integration #5128
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 1 commit
7dac9fd
7705545
3587c5e
78c693a
a085c79
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -36,6 +36,7 @@ | |
| ContentListUnion, | ||
| Tool, | ||
| Model, | ||
| EmbedContentResponse, | ||
| ) | ||
|
|
||
|
|
||
|
|
@@ -574,3 +575,70 @@ def prepare_generate_content_args(args, kwargs): | |
| kwargs["config"] = wrapped_config | ||
|
|
||
| return model, contents, model_name | ||
|
|
||
|
|
||
| def prepare_embed_content_args(args, kwargs): | ||
| # type: (tuple[Any, ...], dict[str, Any]) -> tuple[str, Any] | ||
| """Extract and prepare common arguments for embed_content methods. | ||
|
|
||
| Returns: | ||
| tuple: (model_name, contents) | ||
| """ | ||
| model = kwargs.get("model", "unknown") | ||
| contents = kwargs.get("contents") | ||
| model_name = get_model_name(model) | ||
|
|
||
| return model_name, contents | ||
constantinius marked this conversation as resolved.
Show resolved
Hide resolved
constantinius marked this conversation as resolved.
Show resolved
Hide resolved
Comment on lines
+581
to
+591
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Bug: 🔍 Detailed AnalysisThe 💡 Suggested FixModify 🤖 Prompt for AI AgentDid we get this right? 👍 / 👎 to inform future reviews. |
||
|
|
||
|
|
||
| def set_span_data_for_embed_request(span, integration, contents, kwargs): | ||
| # type: (Span, Any, Any, dict[str, Any]) -> None | ||
| """Set span data for embedding request.""" | ||
| # Include input contents if PII is allowed | ||
| if should_send_default_pii() and integration.include_prompts: | ||
| if contents: | ||
| # For embeddings, contents is typically a list of strings/texts | ||
| input_texts = [] | ||
|
|
||
| # Handle various content formats | ||
| if isinstance(contents, str): | ||
| input_texts = [contents] | ||
| elif isinstance(contents, list): | ||
| for item in contents: | ||
| text = extract_contents_text(item) | ||
| if text: | ||
| input_texts.append(text) | ||
| else: | ||
| text = extract_contents_text(contents) | ||
| if text: | ||
| input_texts = [text] | ||
|
|
||
| if input_texts: | ||
| set_data_normalized( | ||
| span, | ||
| SPANDATA.GEN_AI_EMBEDDINGS_INPUT, | ||
| input_texts, | ||
| unpack=False, | ||
| ) | ||
|
|
||
|
|
||
| def set_span_data_for_embed_response(span, integration, response): | ||
| # type: (Span, Any, EmbedContentResponse) -> None | ||
| """Set span data for embedding response.""" | ||
| if not response: | ||
| return | ||
|
|
||
| # Extract token counts from embeddings statistics (Vertex AI only) | ||
| # Each embedding has its own statistics with token_count | ||
| if hasattr(response, "embeddings") and response.embeddings: | ||
| total_tokens = 0 | ||
|
|
||
| for embedding in response.embeddings: | ||
| if hasattr(embedding, "statistics") and embedding.statistics: | ||
| token_count = getattr(embedding.statistics, "token_count", None) | ||
| if token_count is not None: | ||
| total_tokens += int(token_count) | ||
|
|
||
| # Set token count if we found any | ||
| if total_tokens > 0: | ||
| span.set_data(SPANDATA.GEN_AI_USAGE_INPUT_TOKENS, total_tokens) | ||
Uh oh!
There was an error while loading. Please reload this page.