Skip to content

Commit 01db8d8

Browse files
authored
Add filter option for backup (#141)
* Add filter option for backup * Black linting
1 parent d74ce90 commit 01db8d8

File tree

2 files changed

+38
-4
lines changed

2 files changed

+38
-4
lines changed

README.md

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,7 @@ usage: dynamodump [-h] [-a {zip,tar}] [-b BUCKET]
4444
[--writeCapacity WRITECAPACITY] [--schemaOnly]
4545
[--dataOnly] [--noConfirm] [--skipThroughputUpdate]
4646
[--billingMode BILLING_MODE] [--dumpPath DUMPPATH] [--log LOG]
47+
[-f FILTEROPTION]
4748
4849
Simple DynamoDB backup/restore/empty.
4950
@@ -110,6 +111,8 @@ optional arguments:
110111
backups (defaults to use 'dump') [optional]
111112
--log LOG Logging level - DEBUG|INFO|WARNING|ERROR|CRITICAL
112113
[optional]
114+
-f FILTEROPTION, --filterOption FILTEROPTION
115+
Filter option for backup, JSON file of which keys are ['FilterExpression', 'ExpressionAttributeNames', 'ExpressionAttributeValues']
113116
```
114117

115118
Backup files are stored in a 'dump' subdirectory, and are restored from there as well by default.

dynamodump/dynamodump.py

Lines changed: 35 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -646,7 +646,7 @@ def do_empty(dynamo, table_name, billing_mode):
646646
)
647647

648648

649-
def do_backup(dynamo, read_capacity, tableQueue=None, srcTable=None):
649+
def do_backup(dynamo, read_capacity, tableQueue=None, srcTable=None, filterOption=None):
650650
"""
651651
Connect to DynamoDB and perform the backup for srcTable or each table in tableQueue
652652
"""
@@ -703,6 +703,8 @@ def do_backup(dynamo, read_capacity, tableQueue=None, srcTable=None):
703703
optional_args = {}
704704
if last_evaluated_key is not None:
705705
optional_args["ExclusiveStartKey"] = last_evaluated_key
706+
if filterOption is not None:
707+
optional_args.update(filterOption)
706708
scanned_table = dynamo.scan(
707709
TableName=table_name, **optional_args
708710
)
@@ -1230,6 +1232,11 @@ def main():
12301232
parser.add_argument(
12311233
"--log", help="Logging level - DEBUG|INFO|WARNING|ERROR|CRITICAL " "[optional]"
12321234
)
1235+
parser.add_argument(
1236+
"-f",
1237+
"--filterOption",
1238+
help="Filter option for backup, JSON file of which keys are ['FilterExpression', 'ExpressionAttributeNames', 'ExpressionAttributeValues']",
1239+
)
12331240
args = parser.parse_args()
12341241

12351242
# set log level
@@ -1282,6 +1289,20 @@ def main():
12821289
if args.noSeparator is True:
12831290
prefix_separator = None
12841291

1292+
# set filter options
1293+
filter_option = None
1294+
if args.filterOption is not None:
1295+
with open(args.filterOption, "r") as f:
1296+
filter_option = json.load(f)
1297+
if filter_option.keys() != set(
1298+
(
1299+
"FilterExpression",
1300+
"ExpressionAttributeNames",
1301+
"ExpressionAttributeValues",
1302+
)
1303+
):
1304+
raise Exception("Invalid filter option format")
1305+
12851306
# do backup/restore
12861307
start_time = datetime.datetime.now().replace(microsecond=0)
12871308
if args.mode == "backup":
@@ -1311,9 +1332,19 @@ def main():
13111332

13121333
try:
13131334
if args.srcTable.find("*") == -1:
1314-
do_backup(conn, args.read_capacity, tableQueue=None)
1335+
do_backup(
1336+
conn,
1337+
args.read_capacity,
1338+
tableQueue=None,
1339+
filterOption=filter_option,
1340+
)
13151341
else:
1316-
do_backup(conn, args.read_capacity, matching_backup_tables)
1342+
do_backup(
1343+
conn,
1344+
args.read_capacity,
1345+
matching_backup_tables,
1346+
filterOption=filter_option,
1347+
)
13171348
except AttributeError:
13181349
# Didn't specify srcTable if we get here
13191350

@@ -1324,7 +1355,7 @@ def main():
13241355
t = threading.Thread(
13251356
target=do_backup,
13261357
args=(conn, args.readCapacity),
1327-
kwargs={"tableQueue": q},
1358+
kwargs={"tableQueue": q, "filterOption": filter_option},
13281359
)
13291360
t.start()
13301361
threads.append(t)

0 commit comments

Comments
 (0)