Skip to content

Commit 40af4bf

Browse files
authored
fix: multi language support for support cases using the API (#368)
1 parent 5377959 commit 40af4bf

File tree

1 file changed

+89
-84
lines changed

1 file changed

+89
-84
lines changed

data-collection/deploy/module-support-cases.yaml

Lines changed: 89 additions & 84 deletions
Original file line numberDiff line numberDiff line change
@@ -146,7 +146,6 @@ Resources:
146146
BUCKET = os.environ['BUCKET_NAME']
147147
ROLE_NAME = os.environ['ROLE_NAME']
148148
MODULE_NAME = os.environ['MODULE_NAME']
149-
LANGUAGES = {'en', 'es', 'fr', 'ja', 'ko', 'pt', 'zh'}
150149
151150
logger = logging.getLogger(__name__)
152151
logger.setLevel(getattr(logging, os.environ.get('LOG_LEVEL', 'INFO').upper(), logging.INFO))
@@ -161,6 +160,7 @@ Resources:
161160
raise RuntimeError(f"(MissingParameterError) Lambda event missing '{key}' parameter")
162161
163162
account = json.loads(event[key])
163+
164164
try:
165165
main(account, ROLE_NAME, MODULE_NAME, BUCKET)
166166
except Exception as exc: #pylint: disable=broad-exception-caught
@@ -201,18 +201,23 @@ Resources:
201201
202202
def main(account, role_name, module_name, bucket): #pylint: disable=too-many-locals
203203
account_id = account["account_id"]
204+
logger.debug(f"==> account_id: '{account["account_id"]}'")
204205
payer_id = account["payer_id"]
206+
logger.debug(f"==> payer_id: '{account["payer_id"]}'")
205207
account_name = account.get("account_name", None)
208+
logger.debug(f"==> account_name: '{account.get("account_name", None)}'")
206209
support = get_client_with_role(role_name, account_id, region="us-east-1", service="support")
207210
s3 = boto3.client('s3')
208211
209212
default_start_date = (datetime.now().date() - timedelta(days=365)).strftime('%Y-%m-%d') # Case communications are available for 12 months after creation.
210-
213+
logger.debug(f"==> default_start_date: '{default_start_date}'")
211214
status = {
212215
"last_read": default_start_date,
213216
"account_id": account_id,
214217
}
218+
logger.debug(f"==> status: '{status}'")
215219
status_key = f"{module_name}/{module_name}-status/payer_id={payer_id}/{account_id}.json"
220+
logger.debug(f"==> status_key: '{status_key}'")
216221
try:
217222
status = json.loads(
218223
s3.get_object(
@@ -223,96 +228,96 @@ Resources:
223228
except s3.exceptions.NoSuchKey as exc:
224229
if exc.response['Error']['Code'] != 'NoSuchKey': # this is fine if there no status file
225230
raise
226-
227-
for lang in LANGUAGES:
228-
case_iterator = (
231+
232+
case_iterator = (
233+
support
234+
.get_paginator('describe_cases')
235+
.paginate(
236+
afterTime=status["last_read"],
237+
includeCommunications=False,
238+
includeResolvedCases=True
239+
)
240+
.search("""cases[].{
241+
CaseId: caseId,
242+
DisplayId: displayId,
243+
Subject: subject,
244+
Status: status,
245+
ServiceCode: serviceCode,
246+
CategoryCode: categoryCode,
247+
SeverityCode: severityCode,
248+
SubmittedBy: submittedBy,
249+
TimeCreated: timeCreated,
250+
CCEmailAddresses: ccEmailAddresses,
251+
Language: language
252+
}""")
253+
)
254+
for index, data in enumerate(case_iterator):
255+
case_id = data['CaseId']
256+
logger.debug(f"==> case_id: '{data['CaseId']}'")
257+
case_date = datetime.strptime(data["TimeCreated"], '%Y-%m-%dT%H:%M:%S.%fZ')
258+
logger.debug(f"==> case_date: '{case_date}'")
259+
with open("/tmp/tmp.json", "w", encoding='utf-8') as f:
260+
data['AccountAlias'] = account_name
261+
data['Summary'] = ''
262+
f.write(to_json(data)) # single line per file
263+
key = case_date.strftime(
264+
f"{module_name}/" +
265+
f"{module_name}-data/" +
266+
f"payer_id={payer_id}/" +
267+
f"account_id={account_id}/" +
268+
f"year=%Y/month=%m/day=%d/{case_id}.json"
269+
)
270+
s3.upload_file("/tmp/tmp.json", bucket, key)
271+
logger.debug(f"Data stored to s3://{bucket}/{key}")
272+
273+
communication_iterator = (
229274
support
230-
.get_paginator('describe_cases')
231-
.paginate(
232-
afterTime=status["last_read"],
233-
includeCommunications=False,
234-
includeResolvedCases=True,
235-
language=lang,
236-
)
237-
.search("""cases[].{
275+
.get_paginator('describe_communications')
276+
.paginate(caseId=case_id)
277+
.search("""communications[].{
238278
CaseId: caseId,
239-
DisplayId: displayId,
240-
Subject: subject,
241-
Status: status,
242-
ServiceCode: serviceCode,
243-
CategoryCode: categoryCode,
244-
SeverityCode: severityCode,
279+
Body: body,
245280
SubmittedBy: submittedBy,
246281
TimeCreated: timeCreated,
247-
CCEmailAddresses: ccEmailAddresses,
248-
Language: language
282+
AttachmentSet: attachmentSet[0]
249283
}""")
250284
)
251-
252-
for index, data in enumerate(case_iterator):
253-
case_id = data['CaseId']
254-
case_date = datetime.strptime(data["TimeCreated"], '%Y-%m-%dT%H:%M:%S.%fZ')
255-
with open("/tmp/tmp.json", "w", encoding='utf-8') as f:
256-
data['AccountAlias'] = account_name
257-
data['Summary'] = ''
258-
f.write(to_json(data)) # single line per file
259-
key = case_date.strftime(
260-
f"{module_name}/" +
261-
f"{module_name}-data/" +
262-
f"payer_id={payer_id}/" +
263-
f"account_id={account_id}/" +
264-
f"year=%Y/month=%m/day=%d/{case_id}.json"
265-
)
266-
s3.upload_file("/tmp/tmp.json", bucket, key)
267-
logger.debug(f"Data stored to s3://{bucket}/{key}")
268-
269-
communication_iterator = (
270-
support
271-
.get_paginator('describe_communications')
272-
.paginate(caseId=case_id)
273-
.search("""communications[].{
274-
CaseId: caseId,
275-
Body: body,
276-
SubmittedBy: submittedBy,
277-
TimeCreated: timeCreated,
278-
AttachmentSet: attachmentSet[0]
279-
}""")
280-
)
281-
with open("/tmp/tmp.json", "w", encoding='utf-8') as f:
282-
for communication in communication_iterator:
283-
communication['AccountAlias'] = account_name
284-
f.write(to_json(communication) + '\n')
285-
key = case_date.strftime(
286-
f"{module_name}/" +
287-
f"{module_name}-communications/" +
288-
f"payer_id={payer_id}/" +
289-
f"account_id={account_id}/" +
290-
f"year=%Y/month=%m/day=%d/{case_id}.json"
291-
)
292-
boto3.client('s3').upload_file("/tmp/tmp.json", bucket, key)
293-
logger.info(f"Processed a total of {index+1} support cases")
294-
logger.info(f"Sending Support case {data['CaseId']} for summarization ...")
295-
message = {
296-
'Bucket': bucket,
297-
'CommunicationsKey': key
298-
}
299-
eventbridge = boto3.client('events')
300-
response = eventbridge.put_events(
301-
Entries=[
302-
{
303-
'Source': 'supportcases.datacollection.cid.aws',
304-
'DetailType': 'Event',
305-
'Detail': json.dumps(message)
306-
}
307-
]
308-
)
309-
failed_entry_count = response['FailedEntryCount']
310-
if failed_entry_count > 0:
311-
logger.info(f"Failed to send support case event for {case_id} to Eventbridge default bus.")
312-
else:
313-
logger.info(f"Support case event for {case_id} successfully sent to Eventbridge default bus and has Event ID: {response['Entries'][0]['EventId']}")
285+
with open("/tmp/tmp.json", "w", encoding='utf-8') as f:
286+
for communication in communication_iterator:
287+
communication['AccountAlias'] = account_name
288+
f.write(to_json(communication) + '\n')
289+
key = case_date.strftime(
290+
f"{module_name}/" +
291+
f"{module_name}-communications/" +
292+
f"payer_id={payer_id}/" +
293+
f"account_id={account_id}/" +
294+
f"year=%Y/month=%m/day=%d/{case_id}.json"
295+
)
296+
boto3.client('s3').upload_file("/tmp/tmp.json", bucket, key)
297+
logger.info(f"Processed a total of {index+1} support cases")
298+
logger.info(f"Sending Support case {data['CaseId']} for summarization ...")
299+
message = {
300+
'Bucket': bucket,
301+
'CommunicationsKey': key
302+
}
303+
eventbridge = boto3.client('events')
304+
response = eventbridge.put_events(
305+
Entries=[
306+
{
307+
'Source': 'supportcases.datacollection.cid.aws',
308+
'DetailType': 'Event',
309+
'Detail': json.dumps(message)
310+
}
311+
]
312+
)
313+
failed_entry_count = response['FailedEntryCount']
314+
if failed_entry_count > 0:
315+
logger.info(f"Failed to send support case event for {case_id} to Eventbridge default bus.")
316+
else:
317+
logger.info(f"Support case event for {case_id} successfully sent to Eventbridge default bus and has Event ID: {response['Entries'][0]['EventId']}")
314318
315319
status["last_read"] = datetime.now().strftime('%Y-%m-%d')
320+
logger.debug(f"==> last_read: '{status["last_read"]}'")
316321
s3.put_object(
317322
Bucket=bucket,
318323
Key=status_key,

0 commit comments

Comments
 (0)