2
2
# Copyright 2023 Canonical Ltd.
3
3
# See LICENSE file for licensing details.
4
4
import logging
5
- import os
6
5
import uuid
7
6
8
- import boto3
9
- import pytest as pytest
7
+ import pytest
10
8
from lightkube .core .client import Client
11
9
from lightkube .resources .core_v1 import Pod
12
10
from pytest_operator .plugin import OpsTest
13
11
from tenacity import Retrying , stop_after_attempt , wait_exponential
14
12
15
13
from . import architecture
14
+ from .conftest import GCP
16
15
from .helpers import (
17
16
DATABASE_APP_NAME ,
18
17
backup_operations ,
19
18
build_and_deploy ,
20
19
cat_file_from_unit ,
21
- construct_endpoint ,
22
20
db_connect ,
23
21
get_password ,
24
22
get_unit_address ,
43
41
44
42
logger = logging .getLogger (__name__ )
45
43
46
- AWS = "AWS"
47
- GCP = "GCP"
48
-
49
-
50
- @pytest .fixture (scope = "module" )
51
- async def cloud_configs (ops_test : OpsTest ) -> None :
52
- # Define some configurations and credentials.
53
- configs = {
54
- AWS : {
55
- "endpoint" : "https://s3.amazonaws.com" ,
56
- "bucket" : "data-charms-testing" ,
57
- "path" : f"/postgresql-k8s/{ uuid .uuid1 ()} " ,
58
- "region" : "us-east-1" ,
59
- },
60
- GCP : {
61
- "endpoint" : "https://storage.googleapis.com" ,
62
- "bucket" : "data-charms-testing" ,
63
- "path" : f"/postgresql-k8s/{ uuid .uuid1 ()} " ,
64
- "region" : "" ,
65
- },
66
- }
67
- credentials = {
68
- AWS : {
69
- "access-key" : os .environ ["AWS_ACCESS_KEY" ],
70
- "secret-key" : os .environ ["AWS_SECRET_KEY" ],
71
- },
72
- GCP : {
73
- "access-key" : os .environ ["GCP_ACCESS_KEY" ],
74
- "secret-key" : os .environ ["GCP_SECRET_KEY" ],
75
- },
76
- }
77
- yield configs , credentials
78
- # Delete the previously created objects.
79
- logger .info ("deleting the previously created backups" )
80
- for cloud , config in configs .items ():
81
- session = boto3 .session .Session (
82
- aws_access_key_id = credentials [cloud ]["access-key" ],
83
- aws_secret_access_key = credentials [cloud ]["secret-key" ],
84
- region_name = config ["region" ],
85
- )
86
- s3 = session .resource (
87
- "s3" , endpoint_url = construct_endpoint (config ["endpoint" ], config ["region" ])
88
- )
89
- bucket = s3 .Bucket (config ["bucket" ])
90
- # GCS doesn't support batch delete operation, so delete the objects one by one.
91
- for bucket_object in bucket .objects .filter (Prefix = config ["path" ].lstrip ("/" )):
92
- bucket_object .delete ()
93
-
94
44
95
45
@pytest .mark .abort_on_fail
96
- async def test_backup_gcp (ops_test : OpsTest , charm , cloud_configs : tuple [dict , dict ]) -> None :
46
+ async def test_backup_gcp (ops_test : OpsTest , charm , gcp_cloud_configs : tuple [dict , dict ]) -> None :
97
47
"""Build and deploy two units of PostgreSQL in GCP and then test the backup and restore actions."""
98
- config = cloud_configs [ 0 ][ GCP ]
99
- credentials = cloud_configs [ 1 ][ GCP ]
48
+ config = gcp_cloud_configs [ 0 ]
49
+ credentials = gcp_cloud_configs [ 1 ]
100
50
101
51
await backup_operations (
102
52
ops_test ,
@@ -123,7 +73,9 @@ async def test_backup_gcp(ops_test: OpsTest, charm, cloud_configs: tuple[dict, d
123
73
)
124
74
125
75
126
- async def test_restore_on_new_cluster (ops_test : OpsTest , charm ) -> None :
76
+ async def test_restore_on_new_cluster (
77
+ ops_test : OpsTest , charm , gcp_cloud_configs : tuple [dict , dict ]
78
+ ) -> None :
127
79
"""Test that is possible to restore a backup to another PostgreSQL cluster."""
128
80
previous_database_app_name = f"{ DATABASE_APP_NAME } -gcp"
129
81
database_app_name = f"new-{ DATABASE_APP_NAME } "
@@ -217,7 +169,7 @@ async def test_restore_on_new_cluster(ops_test: OpsTest, charm) -> None:
217
169
218
170
219
171
async def test_invalid_config_and_recovery_after_fixing_it (
220
- ops_test : OpsTest , cloud_configs : tuple [dict , dict ]
172
+ ops_test : OpsTest , gcp_cloud_configs : tuple [dict , dict ]
221
173
) -> None :
222
174
"""Test that the charm can handle invalid and valid backup configurations."""
223
175
database_app_name = f"new-{ DATABASE_APP_NAME } "
@@ -251,10 +203,10 @@ async def test_invalid_config_and_recovery_after_fixing_it(
251
203
logger .info (
252
204
"configuring S3 integrator for a valid cloud, but with the path of another cluster repository"
253
205
)
254
- await ops_test .model .applications [S3_INTEGRATOR_APP_NAME ].set_config (cloud_configs [ 0 ][ GCP ])
206
+ await ops_test .model .applications [S3_INTEGRATOR_APP_NAME ].set_config (gcp_cloud_configs [ 0 ])
255
207
action = await ops_test .model .units .get (f"{ S3_INTEGRATOR_APP_NAME } /0" ).run_action (
256
208
"sync-s3-credentials" ,
257
- ** cloud_configs [ 1 ][ GCP ],
209
+ ** gcp_cloud_configs [ 1 ],
258
210
)
259
211
await action .wait ()
260
212
await wait_for_idle_on_blocked (
@@ -267,7 +219,7 @@ async def test_invalid_config_and_recovery_after_fixing_it(
267
219
268
220
# Provide valid backup configurations, with another path in the S3 bucket.
269
221
logger .info ("configuring S3 integrator for a valid cloud" )
270
- config = cloud_configs [ 0 ][ GCP ].copy ()
222
+ config = gcp_cloud_configs [ 0 ].copy ()
271
223
config ["path" ] = f"/postgresql-k8s/{ uuid .uuid1 ()} "
272
224
await ops_test .model .applications [S3_INTEGRATOR_APP_NAME ].set_config (config )
273
225
logger .info ("waiting for the database charm to become active" )
@@ -276,7 +228,7 @@ async def test_invalid_config_and_recovery_after_fixing_it(
276
228
)
277
229
278
230
279
- async def test_delete_pod (ops_test : OpsTest ) -> None :
231
+ async def test_delete_pod (ops_test : OpsTest , gcp_cloud_configs : tuple [ dict , dict ] ) -> None :
280
232
logger .info ("Getting original backup config" )
281
233
database_app_name = f"new-{ DATABASE_APP_NAME } "
282
234
original_pgbackrest_config = await cat_file_from_unit (
0 commit comments