Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.6.5
rev: v0.8.1
hooks:
- id: ruff
- id: ruff-format
Original file line number Diff line number Diff line change
Expand Up @@ -65,12 +65,3 @@ where = ["src"]

[tool.setuptools.dynamic]
version = {attr = "opentelemetry.instrumentation.openai.version.__version__"}

[tool.ruff]
target-version = "py38"
line-length = 120

[tool.ruff.lint.isort]
known-third-party = [
"opentelemetry",
]
Original file line number Diff line number Diff line change
Expand Up @@ -19,39 +19,37 @@
from timeit import default_timer
from typing import Collection

from wrapt import register_post_import_hook, wrap_function_wrapper

from opentelemetry._events import get_event_logger
from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
from opentelemetry.instrumentation.utils import unwrap
from opentelemetry.instrumentation.openai.environment_variables import (
OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT,
)
from opentelemetry.instrumentation.openai.helpers import (
_get_embeddings_span_attributes_from_wrapper,
_get_event_attributes,
_get_span_attributes_from_wrapper,
_record_token_usage_metrics,
_record_operation_duration_metric,
_send_log_events_from_messages,
_record_token_usage_metrics,
_send_log_events_from_choices,
_set_span_attributes_from_response,
_send_log_events_from_messages,
_set_embeddings_span_attributes_from_response,
_set_span_attributes_from_response,
_span_name_from_span_attributes,
)
from opentelemetry.instrumentation.openai.package import _instruments
from opentelemetry.instrumentation.openai.version import __version__
from opentelemetry.instrumentation.openai.wrappers import StreamWrapper
from opentelemetry.instrumentation.utils import unwrap
from opentelemetry.metrics import get_meter
from opentelemetry.semconv._incubating.metrics.gen_ai_metrics import (
create_gen_ai_client_token_usage,
create_gen_ai_client_operation_duration,
create_gen_ai_client_token_usage,
)

from opentelemetry.semconv.attributes.error_attributes import ERROR_TYPE
from opentelemetry.semconv.schemas import Schemas
from opentelemetry.trace import SpanKind, get_tracer
from opentelemetry.trace.status import StatusCode
from wrapt import register_post_import_hook, wrap_function_wrapper

EVENT_GEN_AI_CONTENT_PROMPT = "gen_ai.content.prompt"
EVENT_GEN_AI_CONTENT_COMPLETION = "gen_ai.content.completion"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,6 @@
from typing import TYPE_CHECKING

from opentelemetry._events import Event, EventLogger
from opentelemetry.semconv.attributes.error_attributes import ERROR_TYPE
from opentelemetry.semconv.attributes.server_attributes import SERVER_ADDRESS, SERVER_PORT
from opentelemetry.semconv._incubating.attributes.gen_ai_attributes import (
GEN_AI_OPERATION_NAME,
GEN_AI_REQUEST_FREQUENCY_PENALTY,
Expand All @@ -30,14 +28,16 @@
GEN_AI_REQUEST_STOP_SEQUENCES,
GEN_AI_REQUEST_TEMPERATURE,
GEN_AI_REQUEST_TOP_P,
GEN_AI_RESPONSE_ID,
GEN_AI_RESPONSE_FINISH_REASONS,
GEN_AI_RESPONSE_ID,
GEN_AI_RESPONSE_MODEL,
GEN_AI_SYSTEM,
GEN_AI_TOKEN_TYPE,
GEN_AI_USAGE_INPUT_TOKENS,
GEN_AI_USAGE_OUTPUT_TOKENS,
)
from opentelemetry.semconv.attributes.error_attributes import ERROR_TYPE
from opentelemetry.semconv.attributes.server_attributes import SERVER_ADDRESS, SERVER_PORT

try:
from opentelemetry.semconv._incubating.attributes.gen_ai_attributes import GEN_AI_REQUEST_ENCODING_FORMATS
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,10 @@

from opentelemetry._events import EventLogger
from opentelemetry.instrumentation.openai.helpers import (
_record_token_usage_metrics,
_record_operation_duration_metric,
_set_span_attributes_from_response,
_record_token_usage_metrics,
_send_log_events_from_stream_choices,
_set_span_attributes_from_response,
)
from opentelemetry.metrics import Histogram
from opentelemetry.semconv.attributes.error_attributes import ERROR_TYPE
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,17 +15,17 @@
# limitations under the License.

import json
import re
import os
import re
from typing import Sequence, Union
from urllib.parse import parse_qs, urlparse

import openai
import pytest
import yaml
from opentelemetry import metrics, trace
from opentelemetry._logs import set_logger_provider
from opentelemetry._events import set_event_logger_provider
from opentelemetry._logs import set_logger_provider
from opentelemetry.instrumentation.openai import OpenAIInstrumentor
from opentelemetry.metrics import Histogram
from opentelemetry.sdk._events import EventLoggerProvider
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
from opentelemetry._events import Event
from opentelemetry._logs import LogRecord
from opentelemetry.instrumentation.openai import OpenAIInstrumentor
from opentelemetry.trace import SpanKind, StatusCode
from opentelemetry.semconv._incubating.attributes.gen_ai_attributes import (
GEN_AI_OPERATION_NAME,
GEN_AI_REQUEST_FREQUENCY_PENALTY,
Expand All @@ -35,15 +34,16 @@
GEN_AI_REQUEST_STOP_SEQUENCES,
GEN_AI_REQUEST_TEMPERATURE,
GEN_AI_REQUEST_TOP_P,
GEN_AI_SYSTEM,
GEN_AI_RESPONSE_FINISH_REASONS,
GEN_AI_RESPONSE_ID,
GEN_AI_RESPONSE_MODEL,
GEN_AI_RESPONSE_FINISH_REASONS,
GEN_AI_SYSTEM,
GEN_AI_USAGE_INPUT_TOKENS,
GEN_AI_USAGE_OUTPUT_TOKENS,
)
from opentelemetry.semconv.attributes.error_attributes import ERROR_TYPE
from opentelemetry.semconv.attributes.server_attributes import SERVER_ADDRESS, SERVER_PORT
from opentelemetry.trace import SpanKind, StatusCode

from .conftest import (
assert_error_operation_duration_metric,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,16 +19,16 @@
import openai
import pytest
from opentelemetry.instrumentation.openai.helpers import GEN_AI_REQUEST_ENCODING_FORMATS
from opentelemetry.trace import SpanKind, StatusCode
from opentelemetry.semconv._incubating.attributes.gen_ai_attributes import (
GEN_AI_OPERATION_NAME,
GEN_AI_REQUEST_MODEL,
GEN_AI_SYSTEM,
GEN_AI_RESPONSE_MODEL,
GEN_AI_SYSTEM,
GEN_AI_USAGE_INPUT_TOKENS,
)
from opentelemetry.semconv.attributes.error_attributes import ERROR_TYPE
from opentelemetry.semconv.attributes.server_attributes import SERVER_ADDRESS, SERVER_PORT
from opentelemetry.trace import SpanKind, StatusCode

from .conftest import (
assert_error_operation_duration_metric,
Expand All @@ -37,7 +37,6 @@
)
from .utils import MOCK_POSITIVE_FLOAT, get_sorted_metrics


test_basic_test_data = [
("openai_provider_embeddings", "text-embedding-3-small", 4, 0.2263190783560276),
("azure_provider_embeddings", "ada", 4, 0.0017870571464300156),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,9 @@
from opentelemetry.sdk._logs._internal import LogData
from opentelemetry.sdk.metrics._internal.point import Metric
from opentelemetry.sdk.metrics.export import (
InMemoryMetricReader,
DataPointT,
HistogramDataPoint,
InMemoryMetricReader,
NumberDataPoint,
)
from opentelemetry.util.types import AttributeValue
Expand Down
24 changes: 24 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
[tool.ruff]
target-version = "py38"
line-length = 120

[lint.isort]
known-first-party = [
"opentelemetry",
]

[tool.ruff.lint]
# https://docs.astral.sh/ruff/linter/#rule-selection
select = [
"I", # isort
"F", # pyflakes
"E", # pycodestyle errors
"W", # pycodestyle warnings
"PLC", # pylint convention
"PLE", # pylint error
"Q", # flake8-quotes
"A", # flake8-builtins
]
ignore = [
"E501", # line-too-long
]
Loading