Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions postgres/changelog.d/22999.added
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Adds logical database breakdown for connection metrics
13 changes: 13 additions & 0 deletions postgres/datadog_checks/postgres/postgres.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@
BUFFERCACHE_METRICS,
CLUSTER_VACUUM_PROGRESS_METRICS,
CONNECTION_METRICS,
CONNECTION_METRICS_BY_DB,
COUNT_METRICS,
FUNCTION_METRICS,
IDLE_TX_LOCK_AGE_METRICS,
Expand Down Expand Up @@ -858,6 +859,18 @@ def _collect_stats(self, instance_tags):
archiver_instance_metrics = self.metrics_cache.get_archiver_metrics(self.version)

metric_scope = [CONNECTION_METRICS]

connection_metrics_by_db = copy.deepcopy(CONNECTION_METRICS_BY_DB)
databases_to_ignore = ""
if len(self._config.ignore_databases) > 0:
escaped_databases = ["'{}'".format(db.replace("'", "''")) for db in self._config.ignore_databases]
databases_to_ignore = "AND datname NOT IN ({})".format(", ".join(escaped_databases))
connection_metrics_by_db["query"] = connection_metrics_by_db["query"].format(
ignore_database_filter=databases_to_ignore
)
metric_scope.append(connection_metrics_by_db)
self.log.debug("Connection Metrics by DB query [%s]", connection_metrics_by_db["query"])

per_database_metric_scope = []

if self._config.collect_function_metrics:
Expand Down
23 changes: 23 additions & 0 deletions postgres/datadog_checks/postgres/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -588,6 +588,29 @@ def trim_leading_set_stmts(sql):
""".strip(),
}

CONNECTION_METRICS_BY_DB = {
'descriptors': [('database_name', 'db')],
'metrics': {
'connections': ('database_connections', AgentCheck.gauge),
'pct_connections': ('percent_database_usage_connections', AgentCheck.gauge),
},
'relation': False,
'query': """
WITH max_con AS (
SELECT setting::float
FROM pg_settings
WHERE name = 'max_connections'
)
SELECT datname,
numbackends,
SUM(numbackends)/MAX(setting)
FROM pg_stat_database, max_con
WHERE datname IS NOT NULL {ignore_database_filter}
GROUP BY datname, numbackends
""",
'name': 'connections_by_database',
}

CONNECTION_METRICS = {
'descriptors': [],
'metrics': {
Expand Down
2 changes: 2 additions & 0 deletions postgres/metadata.csv
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,7 @@ postgresql.create_index.partitions_done,gauge,,,,"When creating an index on a pa
postgresql.create_index.partitions_total,gauge,,,,"When creating an index on a partitioned table, this column is set to the total number of partitions on which the index is to be created. This field is 0 during a REINDEX. Only available with PostgreSQL 12 and newer. This metric is tagged with db, table, index, command, phase.",0,postgres,postgres index partitions total,,
postgresql.create_index.tuples_done,gauge,,,,"Number of tuples already processed in the current phase. Only available with PostgreSQL 12 and newer. This metric is tagged with db, table, index, command, phase.",0,postgres,postgres index tuples done,,
postgresql.create_index.tuples_total,gauge,,,,"Total number of tuples to be processed in the current phase. Only available with PostgreSQL 12 and newer. This metric is tagged with db, table, index, command, phase.",0,postgres,postgres index tuples total,,
postgresql.database_connections,gauge,,connection,,The number of connections being made to this database instance as a fraction of the maximum number of allowed connections per logical database.,0,postgres,db conns,,
postgresql.database_size,gauge,,byte,,The disk space used by this database. This metric is tagged with db.,0,postgres,db size,,
postgresql.db.count,gauge,,item,,The number of available databases.,0,postgres,db cnt,,
postgresql.dead_rows,gauge,,row,,"Enabled with `relations`. The estimated number of dead rows. This metric is tagged with db, schema, table.",0,postgres,dead rows,,
Expand Down Expand Up @@ -103,6 +104,7 @@ postgresql.live_rows,gauge,,row,,"Enabled with `relations`. The estimated number
postgresql.locks,gauge,,lock,,"Enabled with `relations`. The number of locks active for this database. This metric is tagged with db, lock_mode, lock_type, schema, table, granted.",0,postgres,locks,,
postgresql.locks.idle_in_transaction_age,gauge,,second,,Transaction age of idle in transaction sessions holding exclusive relation locks.,0,postgres,idle tx age with locks,,"pid,db,session_user,app,client_hostname,lock_mode,relation,relation_owner"
postgresql.max_connections,gauge,,connection,, The maximum number of client connections allowed to this database.,0,postgres,max conns,,
postgresql.percent_database_usage_connections,gauge,,fraction,,The number of connections being made to this database as a fraction of the maximum number of allowed connections per logical database,0,postgres,pct db usg conns,,
postgresql.percent_usage_connections,gauge,,fraction,,The number of connections to this database as a fraction of the maximum number of allowed connections.,0,postgres,pct usg conns,,
postgresql.pg_stat_statements.dealloc,count,,,,The number of times pg_stat_statements had to evict least executed queries because pg_stat_statements.max was reached.,-1,postgres,pgss dealloc,,
postgresql.queries.blk_read_time,count,,nanosecond,,"Total time spent reading blocks per query_signature, db, and user. (DBM only)",0,postgres, postgres queries block read time,,
Expand Down
10 changes: 8 additions & 2 deletions postgres/tests/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,8 +107,14 @@

COMMON_BGW_METRICS_PG_ABOVE_94 = ['postgresql.archiver.archived_count', 'postgresql.archiver.failed_count']
COMMON_BGW_METRICS_PG_BELOW_17 = ['postgresql.bgwriter.buffers_backend', 'postgresql.bgwriter.buffers_backend_fsync']

CONNECTION_METRICS = ['postgresql.max_connections', 'postgresql.percent_usage_connections']
CONNECTION_METRICS_DB = ['postgresql.connections']
CONNECTION_METRICS_BY_DB = [
'postgresql.connections',
'postgresql.database_connections',
'postgresql.percent_database_usage_connections',
]

COMMON_DBS = ['dogs', 'postgres', 'dogs_nofunc', 'dogs_noschema', DB_NAME]

CHECK_PERFORMANCE_METRICS = [
Expand Down Expand Up @@ -252,7 +258,7 @@ def check_connection_metrics(aggregator, expected_tags, count=1):
aggregator.assert_metric(name, count=count, tags=expected_tags)
for db in COMMON_DBS:
db_tags = expected_tags + ['db:{}'.format(db)]
for name in CONNECTION_METRICS_DB:
for name in CONNECTION_METRICS_BY_DB:
aggregator.assert_metric(name, count=count, tags=db_tags)


Expand Down
14 changes: 12 additions & 2 deletions postgres/tests/test_pg_integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,7 @@
)

CONNECTION_METRICS = ['postgresql.max_connections', 'postgresql.percent_usage_connections']
CONNECTION_METRICS_BY_DB = ['postgresql.database_connections', 'postgresql.percent_database_usage_connections']

pytestmark = [pytest.mark.integration, pytest.mark.usefixtures('dd_environment')]

Expand Down Expand Up @@ -425,10 +426,16 @@ def test_connections_metrics(aggregator, integration_check, pg_instance):
check.run()

expected_tags = _get_expected_tags(check, pg_instance)
expected_tags_with_db = expected_tags + ['db:datadog_test']

for name in CONNECTION_METRICS:
aggregator.assert_metric(name, count=1, tags=expected_tags)
expected_tags += ['db:datadog_test']
aggregator.assert_metric('postgresql.connections', count=1, tags=expected_tags)
aggregator.assert_metric('postgresql.connections', count=1, tags=expected_tags_with_db)

for name in CONNECTION_METRICS_BY_DB:
aggregator.assert_metric(name, count=1, tags=expected_tags_with_db)
aggregator.assert_metric('postgresql.database_connections', count=1, tags=expected_tags_with_db)
aggregator.assert_metric('postgresql.percent_database_usage_connections', count=1, tags=expected_tags_with_db)


@requires_over_10
Expand Down Expand Up @@ -811,6 +818,9 @@ def test_correct_hostname(
for name in CONNECTION_METRICS:
aggregator.assert_metric(name, count=1, tags=expected_tags_no_db, hostname=expected_hostname)

for name in CONNECTION_METRICS_BY_DB:
aggregator.assert_metric(name, count=1, tags=expected_tags_with_db, hostname=expected_hostname)

aggregator.assert_service_check(
'postgres.can_connect',
count=1,
Expand Down
Loading