1
+ import os , time , json
2
+ import posixpath as path
3
+
4
+ import requests
5
+
6
+ from api .API_ingest import shelterluv_db
7
+ from server .api .API_ingest .shelterluv_db import insert_animals
8
+
9
+
10
+ # from config import engine
11
+ # from flask import current_app
12
+ # from sqlalchemy.sql import text
13
+
14
+ BASE_URL = 'http://shelterluv.com/api/'
15
+ MAX_COUNT = 100 # Max records the API will return for one call
16
+
17
+ try :
18
+ from secrets_dict import SHELTERLUV_SECRET_TOKEN
19
+ except ImportError :
20
+ # Not running locally
21
+ from os import environ
22
+
23
+ try :
24
+ SHELTERLUV_SECRET_TOKEN = environ ['SHELTERLUV_SECRET_TOKEN' ]
25
+ except KeyError :
26
+ # Not in environment
27
+ # You're SOL for now
28
+ print ("Couldn't get SHELTERLUV_SECRET_TOKEN from file or environment" )
29
+
30
+
31
+
32
+ headers = {
33
+ "Accept" : "application/json" ,
34
+ "X-API-Key" : SHELTERLUV_SECRET_TOKEN
35
+ }
36
+
37
+ logger = print
38
+
39
+ def get_animal_count ():
40
+ """Test that server is operational and get total animal count."""
41
+ animals = 'v1/animals&offset=0&limit=1'
42
+ URL = path .join (BASE_URL ,animals )
43
+
44
+ try :
45
+ response = requests .request ("GET" ,URL , headers = headers )
46
+ except Exception as e :
47
+ logger ('get_animal_count failed with ' , e )
48
+ return - 2
49
+
50
+ if response .status_code != 200 :
51
+ logger ("get_animal_count " , response .status_code , "code" )
52
+ return - 3
53
+
54
+ try :
55
+ decoded = json .loads (response .text )
56
+ except json .decoder .JSONDecodeError as e :
57
+ logger ("get_animal_count JSON decode failed with" , e )
58
+ return - 4
59
+
60
+ if decoded ['success' ]:
61
+ return decoded ['total_count' ]
62
+ else :
63
+ return - 5 # AFAICT, this means URL was bad
64
+
65
+
66
+ def get_updated_animal_count (last_update ):
67
+ """Test that server is operational and get total animal count."""
68
+ animals = 'v1/animals&offset=0&limit=1&sort=updated_at&since=' + str (last_update )
69
+ URL = path .join (BASE_URL ,animals )
70
+
71
+ try :
72
+ response = requests .request ("GET" ,URL , headers = headers )
73
+ except Exception as e :
74
+ logger ('get_updated_animal_count failed with ' , e )
75
+ return - 2
76
+
77
+ if response .status_code != 200 :
78
+ logger ("get_updated_animal_count " , response .status_code , "code" )
79
+ return - 3
80
+
81
+ try :
82
+ decoded = json .loads (response .text )
83
+ except json .decoder .JSONDecodeError as e :
84
+ logger ("get_updated_animal_count JSON decode failed with" , e )
85
+ return - 4
86
+
87
+ if decoded ['success' ]:
88
+ return decoded ['total_count' ]
89
+ else :
90
+ return - 5 # AFAICT, this means URL was bad
91
+
92
+
93
+
94
+
95
+ def filter_animals (raw_list ):
96
+ """Given a list of animal records as returned by SL, return a list of records with only the fields we care about."""
97
+
98
+ good_keys = ['ID' , 'Internal-ID' , 'Name' , 'Type' , 'DOBUnixTime' , 'CoverPhoto' ,'LastUpdatedUnixTime' ]
99
+
100
+ filtered = []
101
+
102
+ for r in raw_list :
103
+ f = {}
104
+ for k in good_keys :
105
+ try :
106
+ f [k ] = r [k ]
107
+ except :
108
+ if k in ('DOBUnixTime' ,'LastUpdatedUnixTime' ):
109
+ f [k ] = 0
110
+ else :
111
+ f [k ] = ''
112
+ filtered .append (f )
113
+
114
+ return filtered
115
+
116
+
117
+
118
+
119
+ def get_animals_bulk (total_count ):
120
+ """Pull all animal records from SL """
121
+
122
+ # 'Great' API design - animal record 0 is the newest, so we need to start at the end,
123
+ # back up MAX_COUNT rows, make our request, then keep backing up. We need to keep checking
124
+ # the total records to ensure one wasn't added in the middle of the process.
125
+ # Good news, the API is robust and won't blow up if you request past the end.
126
+
127
+ raw_url = path .join (BASE_URL , 'v1/animals&offset={0}&limit={1}' )
128
+
129
+ start_record = int (total_count )
130
+ offset = (start_record - MAX_COUNT ) if (start_record - MAX_COUNT ) > - 1 else 0
131
+ limit = MAX_COUNT
132
+
133
+ while offset > - 1 :
134
+
135
+ logger ("getting at offset" , offset )
136
+ url = raw_url .format (offset ,limit )
137
+
138
+ try :
139
+ response = requests .request ("GET" ,url , headers = headers )
140
+ except Exception as e :
141
+ logger ('get_animals failed with ' , e )
142
+ return - 2
143
+
144
+ if response .status_code != 200 :
145
+ logger ("get_animal_count " , response .status_code , "code" )
146
+ return - 3
147
+
148
+ try :
149
+ decoded = json .loads (response .text )
150
+ except json .decoder .JSONDecodeError as e :
151
+ logger ("get_animal_count JSON decode failed with" , e )
152
+ return - 4
153
+
154
+ if decoded ['success' ]:
155
+ insert_animals ( filter_animals (decoded ['animals' ]) )
156
+ if offset == 0 :
157
+ break
158
+ offset -= MAX_COUNT
159
+ if offset < 0 :
160
+ limit = limit + offset
161
+ offset = 0
162
+ else :
163
+ return - 5 # AFAICT, this means URL was bad
164
+
165
+ return 'zero'
166
+
167
+
168
+ def update_animals (last_update ):
169
+ """Get the animals inserted or updated since last check, insert/update db records. """
170
+
171
+ updated_records = get_updated_animal_count (last_update )
172
+
173
+
174
+
175
+
176
+
177
+
178
+
179
+
180
+
181
+
182
+
183
+
184
+
185
+
186
+
187
+
188
+
189
+
190
+
191
+
192
+ def sla_test ():
193
+ total_count = get_animal_count ()
194
+ print ('Total animals:' ,total_count )
195
+
196
+ b = get_animals_bulk (total_count )
197
+ print (len (b ))
198
+
199
+ # f = filter_animals(b)
200
+ # print(f)
201
+
202
+ # count = shelterluv_db.insert_animals(f)
203
+ return len (b )
204
+
205
+ # if __name__ == '__main__' :
206
+
207
+ # total_count = get_animal_count()
208
+ # print('Total animals:',total_count)
209
+
210
+ # b = get_animals_bulk(9)
211
+ # print(len(b))
212
+
213
+ # f = filter_animals(b)
214
+ # print(f)
215
+
216
+ # count = shelterluv_db.insert_animals(f)
0 commit comments