Skip to content

Commit bdccdc7

Browse files
authored
refactor anomalies (#186)
1 parent 094179c commit bdccdc7

File tree

1 file changed

+38
-84
lines changed

1 file changed

+38
-84
lines changed

data-collection/deploy/module-cost-anomaly.yaml

Lines changed: 38 additions & 84 deletions
Original file line numberDiff line numberDiff line change
@@ -117,55 +117,25 @@ Resources:
117117
import boto3
118118
119119
BUCKET = os.environ['BUCKET_NAME']
120-
ROLE_NAME = os.environ['ROLENAME']
120+
ROLE_NAME = os.environ['ROLE_NAME']
121121
MODULE_NAME = os.environ['PREFIX']
122122
TMP_FILE = '/tmp/tmp.json'
123-
REGIONS = ["us-east-1"]
123+
REGION = "us-east-1"
124124
125125
logger = logging.getLogger(__name__)
126126
logger.setLevel(getattr(logging, os.environ.get('LOG_LEVEL', 'INFO').upper(), logging.INFO))
127127
128-
# Helper Exception classes
129-
class CidError(Exception):
130-
def __init__(self, message="", exc=None):
131-
message = f"({type(exc).__name__}) exception. {message}" if exc else message
132-
if type(self) == CidNonCriticalError.__class__:
133-
logger.warning(message)
134-
else:
135-
logger.error(message)
136-
super().__init__(message)
137-
class CidNonCriticalError(CidError):
138-
def __init__(self, message="", exc=None):
139-
super().__init__(message, exc)
140-
class CidCriticalError(CidError):
141-
def __init__(self, message="", exc=None):
142-
super().__init__(message, exc)
143-
class ClientAccessError(Exception):
144-
def __init__(self, exc, role_name, account_id, service, region):
145-
message = f"({type(exc).__name__}) exception: '{exc}' when getting '{service}' client with role '{role_name}' from account '{account_id}' in region '{region}'"
146-
logger.warning(message)
147-
super().__init__(message)
148-
149-
150-
def lambda_handler(event, context):
128+
def lambda_handler(event, context): #pylint: disable=unused-argument
151129
logger.info(f"Incoming event: {json.dumps(event)}")
152130
key = "account"
153-
try:
154-
account = json.loads(event[key])
155-
main(account, ROLE_NAME, MODULE_NAME, BUCKET)
156-
157-
except KeyError as e:
131+
if key not in event:
158132
logger.error(f"Lambda event parameter '{key}' not defined (fatal) in {MODULE_NAME} module. Please do not trigger this Lambda manually. "
159133
f"Find the corresponding {MODULE_NAME} state machine in Step Functions and trigger from there."
160134
)
161135
raise RuntimeError(f"(MissingParameterError) Lambda event missing '{key}' parameter")
162136
163-
except CidNonCriticalError as exc:
164-
pass
165-
except CidCriticalError as exc:
166-
raise exc
167-
except Exception as exc:
168-
raise CidCriticalError(f"(UnhandledExceptionError) in {MODULE_NAME} module", exc)
137+
account = json.loads(event[key])
138+
main(account, ROLE_NAME, MODULE_NAME, BUCKET)
169139
170140
return {
171141
'statusCode': 200
@@ -188,16 +158,16 @@ Resources:
188158
189159
def get_api_data(role_name, account_id, start_date, end_date):
190160
results = []
191-
client = get_client_with_role(role_name, account_id, region=REGIONS[0], service="ce")
161+
client = get_client_with_role(role_name, account_id, region=REGION, service="ce")
192162
next_token = None
193-
while True:
194-
params = dict(
195-
DateInterval={
163+
while True: # operation get_anomalies cannot be paginated
164+
params = {
165+
"DateInterval": {
196166
'StartDate': str(start_date),
197167
'EndDate': str(end_date)
198168
},
199-
MaxResults=100,
200-
)
169+
"MaxResults": 100,
170+
}
201171
if next_token:
202172
params['NextPageToken'] = next_token
203173
response = client.get_anomalies(**params)
@@ -212,16 +182,11 @@ Resources:
212182
213183
def process_records(records, tmp_file):
214184
count = 0
215-
try:
216-
with open(tmp_file, "w", encoding='utf-8') as f:
217-
for record in records:
218-
data = parse_record(record)
219-
f.write(to_json(data))
220-
f.write("\n")
221-
count += 1
222-
except Exception as exc:
223-
raise CidCriticalError(f"Unhandled exception in process_records", exc)
224-
185+
with open(tmp_file, "w", encoding='utf-8') as f:
186+
for record in records:
187+
data = parse_record(record)
188+
f.write(to_json(data) + "\n")
189+
count += 1
225190
logger.info(f"Processed a total of {count} new records for account")
226191
return count
227192
@@ -244,18 +209,14 @@ Resources:
244209
'Service': get_value_by_path(record, 'RootCauses/0/Service'),
245210
'UsageType': get_value_by_path(record, 'RootCauses/0/UsageType')
246211
}
247-
logger.debug(f"Processing record complete")
212+
logger.debug("Processing record complete")
248213
return result
249214
250215
251-
def upload_to_s3(payer_id, bucket, module_name, tmp_file, sub_name="-"):
252-
try:
253-
key = datetime.now().strftime(f"{module_name}/{module_name}-data/payer_id={payer_id}/year=%Y/month=%m/day=%d/%Y-%m-%d.json")
254-
res = boto3.client('s3').upload_file(tmp_file, bucket, key)
255-
logger.info(f"Data stored to s3://{bucket}/{key}")
256-
257-
except Exception as exc:
258-
raise CidFatalError(exc)
216+
def upload_to_s3(payer_id, bucket, module_name, tmp_file):
217+
key = datetime.now().strftime(f"{module_name}/{module_name}-data/payer_id={payer_id}/year=%Y/month=%m/day=%d/%Y-%m-%d.json")
218+
boto3.client('s3').upload_file(tmp_file, bucket, key)
219+
logger.info(f"Data stored to s3://{bucket}/{key}")
259220
260221
261222
def get_value_by_path(data, path, default=None):
@@ -264,13 +225,11 @@ Resources:
264225
current = data
265226
for key in keys:
266227
if isinstance(current, dict) and key in current:
267-
logger.debug(f"Found key {key}")
268228
current = current.get(key, default)
269229
elif isinstance(current, list) and key.isdigit():
270230
try:
271231
current = current[int(key)]
272-
logger.debug(f"Found index {key}")
273-
except (IndexError):
232+
except IndexError:
274233
logger.debug(f"Index value {key} within path {path} is not valid in get_value_by_path for data {data}, returning default of {default}")
275234
return default
276235
else:
@@ -281,24 +240,20 @@ Resources:
281240
282241
def get_client_with_role(role_name, account_id, service, region):
283242
logger.debug(f"Attempting to get '{service}' client with role '{role_name}' from account '{account_id}' in region '{region}'")
284-
try:
285-
credentials = boto3.client('sts').assume_role(
286-
RoleArn=f"arn:aws:iam::{account_id}:role/{role_name}",
287-
RoleSessionName="data_collection"
288-
)['Credentials']
289-
logger.debug("Successfully assumed role, now getting client")
290-
client = boto3.client(
291-
service,
292-
region_name = region,
293-
aws_access_key_id=credentials['AccessKeyId'],
294-
aws_secret_access_key=credentials['SecretAccessKey'],
295-
aws_session_token=credentials['SessionToken'],
296-
)
297-
logger.debug(f"Successfully created '{service}' client with role '{role_name}' from account '{account_id}' in region '{region}'")
298-
return client
299-
300-
except Exception as exc:
301-
raise ClientAccessError(exc, role_name, account_id, service, region)
243+
credentials = boto3.client('sts').assume_role(
244+
RoleArn=f"arn:aws:iam::{account_id}:role/{role_name}",
245+
RoleSessionName="data_collection"
246+
)['Credentials']
247+
logger.debug("Successfully assumed role, now getting client")
248+
client = boto3.client(
249+
service,
250+
region_name = region,
251+
aws_access_key_id=credentials['AccessKeyId'],
252+
aws_secret_access_key=credentials['SecretAccessKey'],
253+
aws_session_token=credentials['SessionToken'],
254+
)
255+
logger.debug(f"Successfully created '{service}' client with role '{role_name}' from account '{account_id}' in region '{region}'")
256+
return client
302257
303258
def to_json(obj):
304259
return json.dumps(
@@ -307,7 +262,6 @@ Resources:
307262
x.isoformat() if isinstance(x, (date, datetime)) else None
308263
)
309264
310-
311265
def calculate_dates(bucket, s3_path):
312266
end_date = datetime.now().date()
313267
start_date = datetime.now().date() - timedelta(days=90) #Cost anomalies are available for last 90days
@@ -333,7 +287,7 @@ Resources:
333287
Variables:
334288
BUCKET_NAME: !Ref DestinationBucket
335289
PREFIX: !Ref CFDataName
336-
ROLENAME: !Ref ManagementRoleName
290+
ROLE_NAME: !Ref ManagementRoleName
337291
Metadata:
338292
cfn_nag:
339293
rules_to_suppress:

0 commit comments

Comments
 (0)