forked from Jonpro03/jonprobot
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy path6_migrate_portfolio_db.py
More file actions
114 lines (100 loc) · 3.3 KB
/
6_migrate_portfolio_db.py
File metadata and controls
114 lines (100 loc) · 3.3 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
import boto3
import tinydb
from datetime import datetime
from decimal import Decimal
from boto3.dynamodb.conditions import Key, Attr
import pytz
anon_apes = []
pdb = tinydb.TinyDB("portfolio_db.json")
sdb = tinydb.TinyDB("new_shares_db.json")
q = tinydb.Query()
aws_region = ""
aws_access_key = ""
aws_secret_access_key = ""
BUCKET = ""
session = boto3.Session(aws_access_key_id=aws_access_key, aws_secret_access_key=aws_secret_access_key)
ddb = session.resource("dynamodb", region_name=aws_region)
posts_table = ddb.Table("computershared_posts")
s3_client = session.client('s3')
with open("earliest_update.txt", "r+") as f:
starting_at = int(f.read())
#starting_at = 1642006339
for pd in pdb.search(q.created >= starting_at):
if (pd['u'] in anon_apes):
continue
if pd["value"] == 0 or pd["audited"] == False:# or pd["image_path"] in ["", "None"]:
continue
a = posts_table.query(
KeyConditionExpression=Key('u').eq(pd['u']) & Key('id').eq(pd['id'])
)["Count"]
if a > 0:
print(f"{pd['id']} already done.")
continue
image_name = pd["image_path"].split('/')[-1]
try:
s3_client.head_object(BUCKET, image_name)
except:
try:
s3_client.upload_file(pd["image_path"], BUCKET, image_name)
except:
pass
try:
record = {
"timestamp": Decimal(str(pd["created"])),
"u": pd["u"],
"id": pd["id"],
"url": pd["url"],
"sub": pd["sub"],
"audited": pd["audited"],
"post_type": "portfolio",
"image_url": pd["url"],
"image_hash": pd["img_hash"],
"image_text": pd["image_text"],
"image_path": f"s3://{BUCKET}/{image_name}",
"dupes": [],
"shares": Decimal(str(pd["value"])),
}
posts_table.put_item(Item=record)
except:
continue
print(f"{pd['id']} uploaded.")
for sd in sdb.search(q.created > starting_at):
if (sd['u'] in anon_apes):
continue
if sd["value"] == 0 or sd["audited"] == False: # or sd["image_path"] in ["", "None"]:
continue
a = posts_table.query(
KeyConditionExpression=Key('u').eq(sd['u']) & Key('id').eq(sd['id'])
)["Count"]
if a > 0:
print(f"{sd['id']} already done.")
continue
image_name = sd["image_path"].split('/')[-1]
try:
s3_client.head_object(BUCKET, image_name)
except:
try:
s3_client.upload_file(sd["image_path"], BUCKET, image_name)
except:
pass
try:
record = {
"timestamp": Decimal(str(sd["created"])),
"u": sd["u"],
"id": sd["id"],
"url": sd["url"],
"sub": sd["sub"],
"audited": sd["audited"],
"post_type": "purchase",
"gme_price": Decimal(str( sd["gme_price"] if "gme_price" in sd else "170.0")),
"image_url": sd["url"],
"image_hash": sd["img_hash"],
"image_text": sd["image_text"],
"image_path": f"s3://{BUCKET}/{image_name}",
"dupes": [],
"shares": Decimal(str(sd["value"] / (sd["gme_price"] or 170.0))),
}
posts_table.put_item(Item=record)
except:
continue
print(f"{sd['u']} uploaded.")