Skip to content

Commit 19eafc1

Browse files
committed
Merge pull request #18 from bchew/revert-17-master
Revert "Support unicode value "
2 parents 371dd37 + 11e215f commit 19eafc1

File tree

1 file changed

+10
-12
lines changed

1 file changed

+10
-12
lines changed

dynamodump.py

Lines changed: 10 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,4 @@
11
#!/usr/bin/env python
2-
#coding: utf-8
3-
42
import boto.dynamodb2.layer1, json, sys, time, shutil, os, argparse, logging, datetime, threading
53
from boto.dynamodb2.layer1 import DynamoDBConnection
64

@@ -29,7 +27,7 @@ def get_table_name_matches(conn, table_name_wildcard, separator):
2927

3028
try:
3129
last_evaluated_table_name = table_list["LastEvaluatedTableName"]
32-
except KeyError as e:
30+
except KeyError, e:
3331
break
3432

3533
matching_tables = []
@@ -78,7 +76,7 @@ def delete_table(conn, sleep_interval, table_name):
7876
table_exist = True
7977
try:
8078
conn.delete_table(table_name)
81-
except boto.exception.JSONResponseError as e:
79+
except boto.exception.JSONResponseError, e:
8280
if e.body["__type"] == "com.amazonaws.dynamodb.v20120810#ResourceNotFoundException":
8381
table_exist = False
8482
logging.info(table_name + " table deleted!")
@@ -102,7 +100,7 @@ def delete_table(conn, sleep_interval, table_name):
102100
while True:
103101
logging.info("Waiting for " + table_name + " table to be deleted.. [" + conn.describe_table(table_name)["Table"]["TableStatus"] +"]")
104102
time.sleep(sleep_interval)
105-
except boto.exception.JSONResponseError as e:
103+
except boto.exception.JSONResponseError, e:
106104
if e.body["__type"] == "com.amazonaws.dynamodb.v20120810#ResourceNotFoundException":
107105
logging.info(table_name + " table deleted.")
108106
pass
@@ -133,7 +131,7 @@ def batch_write(conn, sleep_interval, table_name, put_requests):
133131
request_items = unprocessed_items
134132
i += 1
135133
else:
136-
logging.info("Max retries reached, failed to processed batch write: " + json.dumps(unprocessed_items, indent=JSON_INDENT, ensure_ascii=False))
134+
logging.info("Max retries reached, failed to processed batch write: " + json.dumps(unprocessed_items, indent=JSON_INDENT))
137135
logging.info("Ignoring and continuing..")
138136
break
139137

@@ -152,7 +150,7 @@ def update_provisioned_throughput(conn, table_name, read_capacity, write_capacit
152150
try:
153151
conn.update_table(table_name, {"ReadCapacityUnits": int(read_capacity), "WriteCapacityUnits": int(write_capacity)})
154152
break
155-
except boto.exception.JSONResponseError as e:
153+
except boto.exception.JSONResponseError, e:
156154
if e.body["__type"] == "com.amazonaws.dynamodb.v20120810#LimitExceededException":
157155
logging.info("Limit exceeded, retrying updating throughput of " + table_name + "..")
158156
time.sleep(sleep_interval)
@@ -176,7 +174,7 @@ def do_backup(conn, table_name, read_capacity):
176174
logging.info("Dumping table schema for " + table_name)
177175
f = open(DUMP_PATH + "/" + table_name + "/" + SCHEMA_FILE, "w+")
178176
table_desc = conn.describe_table(table_name)
179-
f.write(json.dumps(table_desc, indent=JSON_INDENT, ensure_ascii=False))
177+
f.write(json.dumps(table_desc, indent=JSON_INDENT))
180178
f.close()
181179

182180
original_read_capacity = table_desc["Table"]["ProvisionedThroughput"]["ReadCapacityUnits"]
@@ -197,14 +195,14 @@ def do_backup(conn, table_name, read_capacity):
197195
scanned_table = conn.scan(table_name, exclusive_start_key=last_evaluated_key)
198196

199197
f = open(DUMP_PATH + "/" + table_name + "/" + DATA_DIR + "/" + str(i).zfill(4) + ".json", "w+")
200-
f.write(json.dumps(scanned_table, indent=JSON_INDENT, ensure_ascii=False))
198+
f.write(json.dumps(scanned_table, indent=JSON_INDENT))
201199
f.close()
202200

203201
i += 1
204202

205203
try:
206204
last_evaluated_key = scanned_table["LastEvaluatedKey"]
207-
except KeyError as e:
205+
except KeyError, e:
208206
break
209207

210208
# revert back to original table read capacity if specified
@@ -263,7 +261,7 @@ def do_restore(conn, sleep_interval, source_table, destination_table, write_capa
263261
try:
264262
conn.create_table(table_attribute_definitions, table_table_name, table_key_schema, table_provisioned_throughput, table_local_secondary_indexes, table_global_secondary_indexes)
265263
break
266-
except boto.exception.JSONResponseError as e:
264+
except boto.exception.JSONResponseError, e:
267265
if e.body["__type"] == "com.amazonaws.dynamodb.v20120810#LimitExceededException":
268266
logging.info("Limit exceeded, retrying creation of " + destination_table + "..")
269267
time.sleep(sleep_interval)
@@ -321,7 +319,7 @@ def do_restore(conn, sleep_interval, source_table, destination_table, write_capa
321319
try:
322320
conn.update_table(destination_table, global_secondary_index_updates=gsi_data)
323321
break
324-
except boto.exception.JSONResponseError as e:
322+
except boto.exception.JSONResponseError, e:
325323
if e.body["__type"] == "com.amazonaws.dynamodb.v20120810#LimitExceededException":
326324
logging.info("Limit exceeded, retrying updating throughput of GlobalSecondaryIndexes in " + destination_table + "..")
327325
time.sleep(sleep_interval)

0 commit comments

Comments
 (0)