Skip to content

Test do NOT merge #1050

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 5 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 0 additions & 17 deletions lib/charms/postgresql_k8s/v1/postgresql.py
Original file line number Diff line number Diff line change
Expand Up @@ -1216,23 +1216,6 @@ def update_user_password(
if connection is not None:
connection.close()

def is_restart_pending(self) -> bool:
"""Query pg_settings for pending restart."""
connection = None
try:
with self._connect_to_database() as connection, connection.cursor() as cursor:
cursor.execute("SELECT COUNT(*) FROM pg_settings WHERE pending_restart=True;")
return cursor.fetchone()[0] > 0
except psycopg2.OperationalError:
logger.warning("Failed to connect to PostgreSQL.")
return False
except psycopg2.Error as e:
logger.error(f"Failed to check if restart is pending: {e}")
return False
finally:
if connection:
connection.close()

@staticmethod
def build_postgresql_group_map(group_map: Optional[str]) -> List[Tuple]:
"""Build the PostgreSQL authorization group-map.
Expand Down
18 changes: 6 additions & 12 deletions src/charm.py
Original file line number Diff line number Diff line change
Expand Up @@ -2401,18 +2401,12 @@ def _handle_postgresql_restart_need(self) -> None:
self._patroni.reload_patroni_configuration()
except Exception as e:
logger.error(f"Reload patroni call failed! error: {e!s}")
# Wait for some more time than the Patroni's loop_wait default value (10 seconds),
# which tells how much time Patroni will wait before checking the configuration
# file again to reload it.
try:
for attempt in Retrying(stop=stop_after_attempt(5), wait=wait_fixed(3)):
with attempt:
restart_postgresql = restart_postgresql or self.postgresql.is_restart_pending()
if not restart_postgresql:
raise Exception
except RetryError:
# Ignore the error, as it happens only to indicate that the configuration has not changed.
pass

time.sleep(2) # Patroni need a bit to set flag on the API
restart_pending = self._patroni.is_restart_pending()
logger.debug(f"Checking if restart pending: {restart_postgresql} or {restart_pending}")
restart_postgresql = restart_postgresql or restart_pending

self.unit_peer_data.update({"tls": "enabled" if self.is_tls_enabled else ""})
self.postgresql_client_relation.update_endpoints()

Expand Down
17 changes: 17 additions & 0 deletions src/cluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -438,6 +438,23 @@ def get_patroni_health(self) -> dict[str, str]:

return r.json()

def is_restart_pending(self) -> bool:
"""Returns whether the Patroni/PostgreSQL restart pending."""
patroni_status = requests.get(
f"{self._patroni_url}/patroni",
verify=self.verify,
timeout=API_REQUEST_TIMEOUT,
auth=self._patroni_auth,
)
try:
pending_restart = patroni_status.json()["pending_restart"]
except KeyError:
pending_restart = False
pass
logger.debug(f"Patroni API is_restart_pending: {pending_restart}")

return pending_restart

@property
def is_creating_backup(self) -> bool:
"""Returns whether a backup is being created."""
Expand Down
55 changes: 1 addition & 54 deletions tests/unit/test_charm.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
)
from ops.testing import Harness
from psycopg2 import OperationalError
from tenacity import RetryError, wait_fixed
from tenacity import RetryError

from backups import CANNOT_RESTORE_PITR
from charm import (
Expand Down Expand Up @@ -2028,59 +2028,6 @@ def test_migration_from_single_secret(harness, scope, is_leader):
)


def test_handle_postgresql_restart_need(harness):
with (
patch("charms.rolling_ops.v0.rollingops.RollingOpsManager._on_acquire_lock") as _restart,
patch("charm.wait_fixed", return_value=wait_fixed(0)),
patch("charm.Patroni.reload_patroni_configuration") as _reload_patroni_configuration,
patch("charm.PostgresqlOperatorCharm._unit_ip"),
patch(
"charm.PostgresqlOperatorCharm.is_tls_enabled", new_callable=PropertyMock
) as _is_tls_enabled,
patch.object(PostgresqlOperatorCharm, "postgresql", Mock()) as postgresql_mock,
):
rel_id = harness.model.get_relation(PEER).id
for values in itertools.product(
[True, False], [True, False], [True, False], [True, False]
):
_reload_patroni_configuration.reset_mock()
_restart.reset_mock()
with harness.hooks_disabled():
harness.update_relation_data(rel_id, harness.charm.unit.name, {"tls": ""})
harness.update_relation_data(
rel_id,
harness.charm.unit.name,
{"postgresql_restarted": ("True" if values[3] else "")},
)

_is_tls_enabled.return_value = values[0]
postgresql_mock.is_tls_enabled.return_value = values[1]
postgresql_mock.is_restart_pending = PropertyMock(return_value=values[2])

harness.charm._handle_postgresql_restart_need()
_reload_patroni_configuration.assert_called_once()
if values[0]:
assert "tls" in harness.get_relation_data(rel_id, harness.charm.unit)
else:
assert "tls" not in harness.get_relation_data(rel_id, harness.charm.unit)

if (values[0] != values[1]) or values[2]:
assert "postgresql_restarted" not in harness.get_relation_data(
rel_id, harness.charm.unit
)
_restart.assert_called_once()
else:
if values[3]:
assert "postgresql_restarted" in harness.get_relation_data(
rel_id, harness.charm.unit
)
else:
assert "postgresql_restarted" not in harness.get_relation_data(
rel_id, harness.charm.unit
)
_restart.assert_not_called()


def test_on_peer_relation_departed(harness):
with (
patch(
Expand Down
Loading