Skip to content

Commit 699e5c2

Browse files
authored
[MISC] Conditional checksum calculation (#901)
* Conditional checksum calculation * Converge s3 resource creation * Tactically deployed sleep * Early fail
1 parent 5041520 commit 699e5c2

File tree

5 files changed

+92
-70
lines changed

5 files changed

+92
-70
lines changed

poetry.lock

Lines changed: 37 additions & 37 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ requires-poetry = ">=2.0.0"
88
[tool.poetry.dependencies]
99
python = "^3.10"
1010
ops = "^2.18.1"
11-
boto3 = "^1.35.99"
11+
boto3 = "^1.37.22"
1212
pgconnstr = "^1.0.1"
1313
requests = "^2.32.3"
1414
tenacity = "^9.0.0"

src/backups.py

Lines changed: 21 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
from datetime import datetime, timezone
1313
from io import BytesIO
1414

15-
import boto3 as boto3
15+
import boto3
1616
import botocore
1717
from botocore.exceptions import ClientError
1818
from charms.data_platform_libs.v0.s3 import CredentialsChangedEvent, S3Requirer
@@ -88,6 +88,23 @@ def _tls_ca_chain_filename(self) -> str:
8888
return f"{self.charm._storage_path}/pgbackrest-tls-ca-chain.crt"
8989
return ""
9090

91+
def _get_s3_session_resource(self, s3_parameters: dict):
92+
session = boto3.session.Session(
93+
aws_access_key_id=s3_parameters["access-key"],
94+
aws_secret_access_key=s3_parameters["secret-key"],
95+
region_name=s3_parameters["region"],
96+
)
97+
return session.resource(
98+
"s3",
99+
endpoint_url=self._construct_endpoint(s3_parameters),
100+
verify=(self._tls_ca_chain_filename or None),
101+
config=botocore.client.Config(
102+
# https://github.com/boto/boto3/issues/4400#issuecomment-2600742103
103+
request_checksum_calculation="when_required",
104+
response_checksum_validation="when_required",
105+
),
106+
)
107+
91108
def _are_backup_settings_ok(self) -> tuple[bool, str | None]:
92109
"""Validates whether backup settings are OK."""
93110
if self.model.get_relation(self.relation_name) is None:
@@ -227,18 +244,9 @@ def _create_bucket_if_not_exists(self) -> None:
227244

228245
bucket_name = s3_parameters["bucket"]
229246
region = s3_parameters.get("region")
230-
session = boto3.session.Session(
231-
aws_access_key_id=s3_parameters["access-key"],
232-
aws_secret_access_key=s3_parameters["secret-key"],
233-
region_name=s3_parameters["region"],
234-
)
235247

236248
try:
237-
s3 = session.resource(
238-
"s3",
239-
endpoint_url=self._construct_endpoint(s3_parameters),
240-
verify=(self._tls_ca_chain_filename or None),
241-
)
249+
s3 = self._get_s3_session_resource(s3_parameters)
242250
except ValueError as e:
243251
logger.exception("Failed to create a session '%s' in region=%s.", bucket_name, region)
244252
raise e
@@ -1316,17 +1324,8 @@ def _upload_content_to_s3(
13161324
processed_s3_path = os.path.join(s3_parameters["path"], s3_path).lstrip("/")
13171325
try:
13181326
logger.info(f"Uploading content to bucket={bucket_name}, path={processed_s3_path}")
1319-
session = boto3.session.Session(
1320-
aws_access_key_id=s3_parameters["access-key"],
1321-
aws_secret_access_key=s3_parameters["secret-key"],
1322-
region_name=s3_parameters["region"],
1323-
)
13241327

1325-
s3 = session.resource(
1326-
"s3",
1327-
endpoint_url=self._construct_endpoint(s3_parameters),
1328-
verify=(self._tls_ca_chain_filename or None),
1329-
)
1328+
s3 = self._get_s3_session_resource(s3_parameters)
13301329
bucket = s3.Bucket(bucket_name)
13311330

13321331
with tempfile.NamedTemporaryFile() as temp_file:
@@ -1359,16 +1358,7 @@ def _read_content_from_s3(self, s3_path: str, s3_parameters: dict) -> str | None
13591358
processed_s3_path = os.path.join(s3_parameters["path"], s3_path).lstrip("/")
13601359
try:
13611360
logger.info(f"Reading content from bucket={bucket_name}, path={processed_s3_path}")
1362-
session = boto3.session.Session(
1363-
aws_access_key_id=s3_parameters["access-key"],
1364-
aws_secret_access_key=s3_parameters["secret-key"],
1365-
region_name=s3_parameters["region"],
1366-
)
1367-
s3 = session.resource(
1368-
"s3",
1369-
endpoint_url=self._construct_endpoint(s3_parameters),
1370-
verify=(self._tls_ca_chain_filename or None),
1371-
)
1361+
s3 = self._get_s3_session_resource(s3_parameters)
13721362
bucket = s3.Bucket(bucket_name)
13731363
with BytesIO() as buf:
13741364
bucket.download_fileobj(processed_s3_path, buf)

tests/integration/test_backups_gcp.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -72,6 +72,7 @@ async def test_backup_gcp(ops_test: OpsTest, charm, gcp_cloud_configs: tuple[dic
7272
)
7373

7474

75+
@pytest.mark.abort_on_fail
7576
async def test_restore_on_new_cluster(
7677
ops_test: OpsTest, charm, gcp_cloud_configs: tuple[dict, dict]
7778
) -> None:

0 commit comments

Comments
 (0)