Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/get-envs.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def main(f):
joined_envs = ",".join(filtered_envs)

assert joined_envs, ( # noqa: S101
f"No environments found.\nenvironments = {str(environments)}\nGROUP_NUMBER = {GROUP_NUMBER + 1}\nTOTAL_GROUPS = {TOTAL_GROUPS}"
f"No environments found.\nenvironments = {environments!s}\nGROUP_NUMBER = {GROUP_NUMBER + 1}\nTOTAL_GROUPS = {TOTAL_GROUPS}"
)
print(joined_envs)

Expand Down
4 changes: 2 additions & 2 deletions newrelic/api/application.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,9 @@

class Application:
_lock = threading.Lock()
_instances = {}
_instances = {} # noqa: RUF012

_delayed_callables = {}
_delayed_callables = {} # noqa: RUF012

@staticmethod
def _instance(name, activate=True):
Expand Down
2 changes: 1 addition & 1 deletion newrelic/api/html_insertion.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ def insert_at_index(index):
xua_meta = _xua_meta_re.search(data)
charset_meta = _charset_meta_re.search(data)

index = max(xua_meta and xua_meta.end() or 0, charset_meta and charset_meta.end() or 0)
index = max((xua_meta and xua_meta.end()) or 0, (charset_meta and charset_meta.end()) or 0)

if index:
return insert_at_index(index)
Expand Down
2 changes: 1 addition & 1 deletion newrelic/api/log.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,7 @@ def format(self, record):


class NewRelicLogForwardingHandler(logging.Handler):
IGNORED_LOG_RECORD_KEYS = {"message", "msg"}
IGNORED_LOG_RECORD_KEYS = frozenset(("message", "msg"))

def emit(self, record):
try:
Expand Down
2 changes: 1 addition & 1 deletion newrelic/api/time_trace.py
Original file line number Diff line number Diff line change
Expand Up @@ -436,7 +436,7 @@ def notice_error(self, error=None, attributes=None, expected=None, ignore=None,
_, error_group_name = process_user_attribute("error.group.name", error_group_name_raw)
if error_group_name is None or not isinstance(error_group_name, str):
raise ValueError(
f"Invalid attribute value for error.group.name. Expected string, got: {repr(error_group_name_raw)}"
f"Invalid attribute value for error.group.name. Expected string, got: {error_group_name_raw!r}"
)
except Exception:
_logger.error(
Expand Down
4 changes: 2 additions & 2 deletions newrelic/common/agent_http.py
Original file line number Diff line number Diff line change
Expand Up @@ -283,7 +283,7 @@ def __init__(
else:
self._host = proxy.host
self._port = proxy.port or 443
self._prefix = f"{self.PREFIX_SCHEME + host}:{str(port)}"
self._prefix = f"{self.PREFIX_SCHEME + host}:{port!s}"
urlopen_kwargs["assert_same_host"] = False
if proxy_headers:
self._headers.update(proxy_headers)
Expand Down Expand Up @@ -501,7 +501,7 @@ class ApplicationModeClient(SupportabilityMixin, HttpClient):


class DeveloperModeClient(SupportabilityMixin, BaseClient):
RESPONSES = {
RESPONSES = { # noqa: RUF012
"preconnect": {"redirect_host": "fake-collector.newrelic.com"},
"agent_settings": [],
"connect": {
Expand Down
2 changes: 1 addition & 1 deletion newrelic/common/encoding_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -486,7 +486,7 @@ def text(self):
if pr:
pr = f"{pr:.6f}".rstrip("0").rstrip(".")

payload = f"0-0-{self['ac']}-{self['ap']}-{self.get('id', '')}-{self.get('tx', '')}-{'1' if self.get('sa') else '0'}-{pr}-{str(self['ti'])}"
payload = f"0-0-{self['ac']}-{self['ap']}-{self.get('id', '')}-{self.get('tx', '')}-{'1' if self.get('sa') else '0'}-{pr}-{self['ti']!s}"
return f"{self.get('tk', self['ac'])}@nr={payload}"

@classmethod
Expand Down
14 changes: 7 additions & 7 deletions newrelic/common/utilization.py
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,7 @@ class AWSUtilization(CommonUtilization):
METADATA_HOST = "169.254.169.254"
METADATA_PATH = "/latest/dynamic/instance-identity/document"
METADATA_TOKEN_PATH = "/latest/api/token" # noqa: S105
HEADERS = {"X-aws-ec2-metadata-token-ttl-seconds": "21600"}
HEADERS = {"X-aws-ec2-metadata-token-ttl-seconds": "21600"} # noqa: RUF012
VENDOR_NAME = "aws"
_utilization_data = None

Expand Down Expand Up @@ -219,18 +219,18 @@ def fetch(cls):
class AzureUtilization(CommonUtilization):
METADATA_HOST = "169.254.169.254"
METADATA_PATH = "/metadata/instance/compute"
METADATA_QUERY = {"api-version": "2017-03-01"}
METADATA_QUERY = {"api-version": "2017-03-01"} # noqa: RUF012
EXPECTED_KEYS = ("location", "name", "vmId", "vmSize")
HEADERS = {"Metadata": "true"}
HEADERS = {"Metadata": "true"} # noqa: RUF012
VENDOR_NAME = "azure"


class AzureFunctionUtilization(CommonUtilization):
METADATA_HOST = "169.254.169.254"
METADATA_PATH = "/metadata/instance/compute"
METADATA_QUERY = {"api-version": "2017-03-01"}
METADATA_QUERY = {"api-version": "2017-03-01"} # noqa: RUF012
EXPECTED_KEYS = ("faas.app_name", "cloud.region")
HEADERS = {"Metadata": "true"}
HEADERS = {"Metadata": "true"} # noqa: RUF012
VENDOR_NAME = "azurefunction"

@staticmethod
Expand Down Expand Up @@ -264,10 +264,10 @@ def get_values(cls, response):

class GCPUtilization(CommonUtilization):
EXPECTED_KEYS = ("id", "machineType", "name", "zone")
HEADERS = {"Metadata-Flavor": "Google"}
HEADERS = {"Metadata-Flavor": "Google"} # noqa: RUF012
METADATA_HOST = "metadata.google.internal"
METADATA_PATH = "/computeMetadata/v1/instance/"
METADATA_QUERY = {"recursive": "true"}
METADATA_QUERY = {"recursive": "true"} # noqa: RUF012
VENDOR_NAME = "gcp"

@classmethod
Expand Down
2 changes: 1 addition & 1 deletion newrelic/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@
)
from newrelic.core.config import Settings, apply_config_setting, default_host, fetch_config_setting

__all__ = ["initialize", "filter_app_factory"]
__all__ = ["filter_app_factory", "initialize"]

_logger = logging.getLogger(__name__)

Expand Down
6 changes: 3 additions & 3 deletions newrelic/core/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,8 +121,8 @@ class Agent:

_instance_lock = threading.Lock()
_instance = None
_startup_callables = []
_registration_callables = {}
_startup_callables = [] # noqa: RUF012
_registration_callables = {} # noqa: RUF012

@staticmethod
def run_on_startup(callable): # noqa: A002
Expand Down Expand Up @@ -781,7 +781,7 @@ def shutdown_agent(timeout=None):

def register_data_source(source, application=None, name=None, settings=None, **properties):
agent = agent_instance()
agent.register_data_source(source, application and application.name or None, name, settings, **properties)
agent.register_data_source(source, (application and application.name) or None, name, settings, **properties)


def _remove_thread_utilization():
Expand Down
7 changes: 2 additions & 5 deletions newrelic/core/agent_control_health.py
Original file line number Diff line number Diff line change
Expand Up @@ -172,11 +172,8 @@ def update_to_healthy_status(self, protocol_error=False, collector_error=False):
# session. This function allows us to update to a healthy status if so based on the error type
# Since this function is only called when we are in scenario where the agent functioned as expected, we check to
# see if the previous status was unhealthy so we know to update it
if (
protocol_error
and self.status_code in PROTOCOL_ERROR_CODES
or collector_error
and self.status_code == HealthStatus.FAILED_NR_CONNECTION.value
if (protocol_error and self.status_code in PROTOCOL_ERROR_CODES) or (
collector_error and self.status_code == HealthStatus.FAILED_NR_CONNECTION.value
):
self.status_code = HealthStatus.HEALTHY.value
self.status_message = HEALTHY_STATUS_MESSAGE
Expand Down
8 changes: 4 additions & 4 deletions newrelic/core/agent_protocol.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@
class AgentProtocol:
VERSION = 17

STATUS_CODE_RESPONSE = {
STATUS_CODE_RESPONSE = { # noqa: RUF012
400: DiscardDataForRequest,
401: ForceAgentRestart,
403: DiscardDataForRequest,
Expand All @@ -69,7 +69,7 @@ class AgentProtocol:
500: RetryDataForRequest,
503: RetryDataForRequest,
}
LOG_MESSAGES = {
LOG_MESSAGES = { # noqa: RUF012
401: (
logging.ERROR,
(
Expand Down Expand Up @@ -146,7 +146,7 @@ class AgentProtocol:
"ai_monitoring.enabled",
)

LOGGER_FUNC_MAPPING = {
LOGGER_FUNC_MAPPING = { # noqa: RUF012
"ERROR": _logger.error,
"WARN": _logger.warning,
"INFO": _logger.info,
Expand Down Expand Up @@ -284,7 +284,7 @@ def _to_http(self, method, payload=()):
@staticmethod
def _connect_payload(app_name, linked_applications, environment, settings):
settings = global_settings_dump(settings)
app_names = [app_name] + linked_applications
app_names = [app_name, *linked_applications]

hostname = system_info.gethostname(
settings["heroku.use_dyno_names"], settings["heroku.dyno_name_prefixes_to_shorten"]
Expand Down
2 changes: 1 addition & 1 deletion newrelic/core/agent_streaming.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ class StreamingRpc:
"""

RETRY_POLICY = ((15, False), (15, False), (30, False), (60, False), (120, False), (300, True))
OPTIONS = [("grpc.enable_retries", 0)]
OPTIONS = (("grpc.enable_retries", 0),)

def __init__(self, endpoint, stream_buffer, metadata, record_metric, ssl=True, compression=None):
self._endpoint = endpoint
Expand Down
8 changes: 4 additions & 4 deletions newrelic/core/database_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -272,7 +272,7 @@ def _uncomment_sql(sql):

def _parse_default(sql, regex):
match = regex.search(sql)
return match and _extract_identifier(match.group(1)) or ""
return (match and _extract_identifier(match.group(1))) or ""


_parse_identifier_1_p = r'"((?:[^"]|"")+)"(?:\."((?:[^"]|"")+)")?'
Expand All @@ -290,7 +290,7 @@ def _parse_default(sql, regex):


def _join_identifier(m):
return m and ".".join([s for s in m.groups()[1:] if s]).lower() or ""
return (m and ".".join([s for s in m.groups()[1:] if s]).lower()) or ""


def _parse_select(sql):
Expand Down Expand Up @@ -415,14 +415,14 @@ def _parse_alter(sql):

def _parse_operation(sql):
match = _parse_operation_re.search(sql)
operation = match and match.group(1).lower() or ""
operation = (match and match.group(1).lower()) or ""
return operation if operation in _operation_table else ""


def _parse_target(sql, operation):
sql = sql.rstrip(";")
parse = _operation_table.get(operation, None)
return parse and parse(sql) or ""
return (parse and parse(sql)) or ""


# For explain plan obfuscation, the regular expression for matching the
Expand Down
2 changes: 1 addition & 1 deletion newrelic/core/environment.py
Original file line number Diff line number Diff line change
Expand Up @@ -209,7 +209,7 @@ def plugins():
for name, module in sys.modules.copy().items():
# Exclude lib.sub_paths as independent modules except for newrelic.hooks.
nr_hook = name.startswith("newrelic.hooks.")
if "." in name and not nr_hook or name.startswith("_"):
if ("." in name and not nr_hook) or name.startswith("_"):
continue

# If the module isn't actually loaded (such as failed relative imports
Expand Down
2 changes: 1 addition & 1 deletion newrelic/core/external_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ def netloc(self):
if (scheme, port) in (("http", 80), ("https", 443)):
port = None

netloc = port and (f"{hostname}:{port}") or hostname
netloc = (port and (f"{hostname}:{port}")) or hostname
return netloc

def time_metrics(self, stats, root, parent):
Expand Down
6 changes: 3 additions & 3 deletions newrelic/core/infinite_tracing_pb2.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,11 +21,11 @@

# Import appropriate generated pb2 file for protobuf version
if PROTOBUF_VERSION >= (6,):
from newrelic.core.infinite_tracing_v6_pb2 import AttributeValue, RecordStatus, Span, SpanBatch # noqa: F401
from newrelic.core.infinite_tracing_v6_pb2 import AttributeValue, RecordStatus, Span, SpanBatch
elif PROTOBUF_VERSION >= (5,):
from newrelic.core.infinite_tracing_v5_pb2 import AttributeValue, RecordStatus, Span, SpanBatch # noqa: F401
from newrelic.core.infinite_tracing_v5_pb2 import AttributeValue, RecordStatus, Span, SpanBatch
elif PROTOBUF_VERSION >= (4,):
from newrelic.core.infinite_tracing_v4_pb2 import AttributeValue, RecordStatus, Span, SpanBatch # noqa: F401
from newrelic.core.infinite_tracing_v4_pb2 import AttributeValue, RecordStatus, Span, SpanBatch
else:
from newrelic.core.infinite_tracing_v3_pb2 import AttributeValue, RecordStatus, Span, SpanBatch # noqa: F401

Expand Down
2 changes: 1 addition & 1 deletion newrelic/core/node_mixin.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ def get_trace_segment_params(self, settings, params=None):
return _params

def span_event(self, settings, base_attrs=None, parent_guid=None, attr_class=dict):
i_attrs = base_attrs and base_attrs.copy() or attr_class()
i_attrs = (base_attrs and base_attrs.copy()) or attr_class()
i_attrs["type"] = "Span"
i_attrs["name"] = self.name
i_attrs["guid"] = self.guid
Expand Down
16 changes: 8 additions & 8 deletions newrelic/core/stats_engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ def merge_stats(self, other):
self[1] += other[1]
self[2] += other[2]

self[3] = (self[0] or self[1] or self[2]) and min(self[3], other[3]) or other[3]
self[3] = ((self[0] or self[1] or self[2]) and min(self[3], other[3])) or other[3]
self[4] = max(self[4], other[3])

def merge_apdex_metric(self, metric):
Expand All @@ -102,7 +102,7 @@ def merge_apdex_metric(self, metric):
self[1] += metric.tolerating
self[2] += metric.frustrating

self[3] = (self[0] or self[1] or self[2]) and min(self[3], metric.apdex_t) or metric.apdex_t
self[3] = ((self[0] or self[1] or self[2]) and min(self[3], metric.apdex_t)) or metric.apdex_t
self[4] = max(self[4], metric.apdex_t)


Expand Down Expand Up @@ -140,7 +140,7 @@ def merge_stats(self, other):

self[1] += other[1]
self[2] += other[2]
self[3] = self[0] and min(self[3], other[3]) or other[3]
self[3] = (self[0] and min(self[3], other[3])) or other[3]
self[4] = max(self[4], other[4])
self[5] += other[5]

Expand All @@ -157,7 +157,7 @@ def merge_raw_time_metric(self, duration, exclusive=None):

self[1] += duration
self[2] += exclusive
self[3] = self[0] and min(self[3], duration) or duration
self[3] = (self[0] and min(self[3], duration)) or duration
self[4] = max(self[4], duration)
self[5] += duration**2

Expand Down Expand Up @@ -321,7 +321,7 @@ def __str__(self):
return str(self.__stats_table)

def __repr__(self):
return f"{__class__.__name__}({repr(self.__stats_table)})"
return f"{__class__.__name__}({self.__stats_table!r})"

def items(self):
return self.metrics()
Expand All @@ -341,7 +341,7 @@ def merge_stats(self, other):
"""Merge data from another instance of this object."""

self[1] += other[1]
self[2] = self[0] and min(self[2], other[2]) or other[2]
self[2] = (self[0] and min(self[2], other[2])) or other[2]
self[3] = max(self[3], other[3])

if self[3] == other[3]:
Expand All @@ -358,7 +358,7 @@ def merge_slow_sql_node(self, node):
duration = node.duration

self[1] += duration
self[2] = self[0] and min(self[2], duration) or duration
self[2] = (self[0] and min(self[2], duration)) or duration
self[3] = max(self[3], duration)

if self[3] == duration:
Expand Down Expand Up @@ -853,7 +853,7 @@ def notice_error(self, error=None, attributes=None, expected=None, ignore=None,
_, error_group_name = process_user_attribute("error.group.name", error_group_name_raw)
if error_group_name is None or not isinstance(error_group_name, str):
raise ValueError(
f"Invalid attribute value for error.group.name. Expected string, got: {repr(error_group_name_raw)}"
f"Invalid attribute value for error.group.name. Expected string, got: {error_group_name_raw!r}"
)
else:
agent_attributes["error.group.name"] = error_group_name
Expand Down
2 changes: 1 addition & 1 deletion newrelic/core/thread_utilization.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ def __call__(self):
yield ("Instance/Available", total_threads)
yield ("Instance/Used", utilization)

busy = total_threads and utilization / total_threads or 0.0
busy = (total_threads and utilization / total_threads) or 0.0

yield ("Instance/Busy", busy)

Expand Down
2 changes: 1 addition & 1 deletion newrelic/core/trace_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ def __init__(self):
self._cache = weakref.WeakValueDictionary()

def __repr__(self):
return f"<{self.__class__.__name__} object at 0x{id(self):x} {str(dict(self.items()))}>"
return f"<{self.__class__.__name__} object at 0x{id(self):x} {dict(self.items())!s}>"

def current_thread_id(self):
"""Returns the thread ID for the caller.
Expand Down
2 changes: 1 addition & 1 deletion newrelic/hooks/adapter_cheroot.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
def instrument_cheroot_wsgiserver(module):
def wrap_wsgi_application_entry_point(server, bind_addr, wsgi_app, *args, **kwargs):
application = newrelic.api.wsgi_application.WSGIApplicationWrapper(wsgi_app)
args = [server, bind_addr, application] + list(args)
args = [server, bind_addr, application, *args]
return (args, kwargs)

newrelic.api.in_function.wrap_in_function(module, "Server.__init__", wrap_wsgi_application_entry_point)
2 changes: 1 addition & 1 deletion newrelic/hooks/adapter_cherrypy.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
def instrument_cherrypy_wsgiserver(module):
def wrap_wsgi_application_entry_point(server, bind_addr, wsgi_app, *args, **kwargs):
application = newrelic.api.wsgi_application.WSGIApplicationWrapper(wsgi_app)
args = [server, bind_addr, application] + list(args)
args = [server, bind_addr, application, *args]
return (args, kwargs)

newrelic.api.in_function.wrap_in_function(module, "CherryPyWSGIServer.__init__", wrap_wsgi_application_entry_point)
Loading
Loading