Skip to content

Commit 07c7e2b

Browse files
authored
Merge pull request #98 from OpenLXP/dev
Dev
2 parents 6d399f6 + bb1bcee commit 07c7e2b

File tree

84 files changed

+4055
-936
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

84 files changed

+4055
-936
lines changed

.github/workflows/cd-workflows.yml

Lines changed: 0 additions & 48 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@ on:
77
pull_request:
88
branches:
99
- main
10-
- dev
1110
jobs:
1211
code-test:
1312
# Kicks off the workflow and prepares Github to run coverage test using a ubuntu-latest container.
@@ -51,50 +50,3 @@ jobs:
5150
echo "Docker network successfully created"
5251
echo "Running coverage unit test"
5352
docker compose --env-file ./.env run app_xis sh -c "python manage.py waitdb && coverage run manage.py test --tag=unit && flake8 && coverage report && coverage report --fail-under=80"
54-
55-
sonarcloud:
56-
name: SonarCloud
57-
runs-on: ubuntu-latest
58-
steps:
59-
- uses: actions/checkout@v2
60-
with:
61-
fetch-depth: 0 # Shallow clones should be disabled for a better relevancy of analysis
62-
- name: SonarCloud Scan
63-
uses: SonarSource/sonarcloud-github-action@master
64-
env:
65-
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # Needed to get PR information, if any
66-
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
67-
68-
build:
69-
# requires dependency from step above
70-
needs: code-test
71-
name: Build Docker Image
72-
runs-on: ubuntu-latest
73-
74-
steps:
75-
- name: Checkout Code
76-
uses: actions/checkout@v2
77-
78-
- name: Configure AWS credentials
79-
uses: aws-actions/configure-aws-credentials@v1
80-
with:
81-
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
82-
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
83-
aws-region: ${{ secrets.AWS_REGION }}
84-
85-
- name: Login to Amazon ECR
86-
id: login-ecr
87-
uses: aws-actions/amazon-ecr-login@v1
88-
with:
89-
mask-password: 'true'
90-
91-
- name: Build, tag, and push image to Amazon ECR
92-
env:
93-
ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }}
94-
ECR_REPOSITORY: ${{ secrets.ECR_REPO }}
95-
IMAGE_TAG: xis
96-
run: |
97-
echo "Starting docker build"
98-
docker build -t $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG .
99-
echo "Pushing image to ECR..."
100-
docker push $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG

Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
# Dockerfile
22

3-
FROM python:3.9-buster
3+
FROM python:3.9-bookworm
44

55
# install nginx
66

app/api/management/utils/api_helper_functions.py

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,10 @@ def add_metadata_ledger(data, experience_id):
3131
data['unique_record_identifier'] = str(uuid.uuid4())
3232

3333
# sorting the metadata for consistency
34-
data['metadata'] = multi_dict_sort(data['metadata'])
34+
if 'metadata' in data:
35+
data['metadata'] = multi_dict_sort(data['metadata'])
36+
else:
37+
data['metadata'] = {}
3538

3639
# create hash values of metadata and supplemental data
3740
metadata_hash = hashlib.sha512(str(data['metadata']).encode(
@@ -78,7 +81,10 @@ def add_supplemental_ledger(data, experience_id):
7881
data['unique_record_identifier'] = str(uuid.uuid4())
7982

8083
# sorting the metadata for consistency
81-
data['metadata'] = multi_dict_sort(data['metadata'])
84+
if 'metadata' in data:
85+
data['metadata'] = multi_dict_sort(data['metadata'])
86+
else:
87+
data['metadata'] = {}
8288

8389
# create hash values of metadata and supplemental data
8490
supplemental_hash = hashlib.sha512(str(data['metadata'])

app/api/serializers.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -102,7 +102,6 @@ def validate(self, data):
102102
# validate for recommended values in data
103103
validate_recommended(
104104
data, recommended_column_list, flattened_source_data)
105-
106105
# Type checking for values in metadata
107106
for item in flattened_source_data:
108107
# check if datatype has been assigned to field
@@ -139,6 +138,7 @@ def validate(self, data):
139138
raise serializers.ValidationError("Metadata has missing fields. "
140139
"Data did not pass validation."
141140
"Check logs for more details")
141+
142142
return data
143143

144144
def update(self, instance, validated_data):

app/api/tests/test_setup.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,13 +12,19 @@
1212
from core.models import (CompositeLedger, MetadataLedger, SupplementalLedger,
1313
XISConfiguration)
1414

15+
from django.test import override_settings
16+
1517

1618
class TestSetUp(APITestCase):
1719
"""Class with setup and teardown for tests in XIS"""
1820

1921
def setUp(self):
2022
"""Function to set up necessary data for testing"""
2123

24+
settings_manager = override_settings(SECURE_SSL_REDIRECT=False)
25+
settings_manager.enable()
26+
self.addCleanup(settings_manager.disable)
27+
2228
self.su_username = "super@test.com"
2329
self.su_password = "1234"
2430

app/api/views.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -113,6 +113,8 @@ def filter_queryset(self, queryset):
113113
def post(self, request):
114114
"""This method defines the API's to save data to the
115115
metadata ledger in the XIS"""
116+
logger.info("Start processing")
117+
logger.error("Incoming experience")
116118

117119
# Add optional/recommended fields to the metadata
118120
extra_fields = get_optional_and_recommended_fields_for_validation()
@@ -346,31 +348,31 @@ def post(self, request, provider_id, experience_id):
346348

347349

348350
@api_view(['GET'])
349-
@permission_classes((permissions.AllowAny,))
351+
@permission_classes((permissions.IsAdminUser,))
350352
def xis_workflow_api(request):
351353
logger.info('XIS workflow api')
352354
task = xis_workflow.delay()
353355
return JsonResponse({"task_id": task.id}, status=status.HTTP_202_ACCEPTED)
354356

355357

356358
@api_view(['GET'])
357-
@permission_classes((permissions.AllowAny,))
359+
@permission_classes((permissions.IsAdminUser,))
358360
def xis_downstream_workflow_api(request):
359361
logger.info('Downstream workflow api')
360362
task = xis_downstream_workflow.delay()
361363
return JsonResponse({"task_id": task.id}, status=status.HTTP_202_ACCEPTED)
362364

363365

364366
@api_view(['GET'])
365-
@permission_classes((permissions.AllowAny,))
367+
@permission_classes((permissions.IsAdminUser,))
366368
def xis_upstream_workflow_api(request):
367369
logger.info('Upstream workflow api')
368370
task = xis_upstream_workflow.delay()
369371
return JsonResponse({"task_id": task.id}, status=status.HTTP_202_ACCEPTED)
370372

371373

372374
@api_view(['GET'])
373-
@permission_classes((permissions.AllowAny,))
375+
@permission_classes((permissions.IsAdminUser,))
374376
def get_status(request, task_id):
375377
task_result = AsyncResult(task_id)
376378
result = {

app/core/fixtures/admin_theme_data.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@
4141
"css_delete_button_background_color": "#BA2121",
4242
"css_delete_button_background_hover_color": "#A41515",
4343
"css_delete_button_text_color": "#FFFFFF",
44-
"related_modal_active": true,
44+
"related_modal_active": false,
4545
"related_modal_background_color": "#000000",
4646
"related_modal_background_opacity": "0.3",
4747
"related_modal_rounded_corners": true,

app/core/management/commands/load_metadata_from_xis.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -35,20 +35,21 @@ def retrieve_records(self, upstream):
3535

3636
xis_response = requests.get(
3737
url=upstream.xis_api_endpoint + 'metadata/',
38-
headers=headers)
38+
headers=headers, timeout=3.0)
3939

4040
while (xis_response.status_code//10 == 20):
4141
for record in xis_response.json()['results']:
4242
self.save_record(upstream, bleach_data_to_json(record))
4343

4444
if (xis_response.json()['next'] is not None):
4545
xis_response = requests.get(
46-
url=xis_response.json()['next'], headers=headers)
46+
url=xis_response.json()['next'], headers=headers,
47+
timeout=3.0)
4748
else:
4849
return
4950

5051
logger.error(
51-
f"HTTP Error {xis_response.status_code} from {upstream}")
52+
"HTTP Error %s from %s", xis_response.status_code, upstream)
5253

5354
def save_record(self, upstream, record):
5455
"""saves record to metadata and supplemental ledgers as needed"""

app/core/management/commands/load_metadata_into_xis.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -76,9 +76,9 @@ def send_record(self, downstream, record):
7676
url=f'{downstream.xis_api_endpoint}managed-data/catalogs/'
7777
f'{record["provider_name"]}/{record["metadata_key_hash"]}',
7878
data=json.dumps(CompositeLedgerSerializer(record).data),
79-
headers=headers)
79+
headers=headers, timeout=3.0)
8080

81-
if (xis_response.status_code//10 == 20):
81+
if(xis_response.status_code//10 == 20):
8282
downstream.composite_experiences.add(
8383
record['unique_record_identifier'])
8484
else:
@@ -96,10 +96,10 @@ def handle(self, *args, **options):
9696
downstream_apis = XISDownstream.objects.all().filter(
9797
xis_api_endpoint_status=XISDownstream.ACTIVE)
9898
# if there are ids as an arg, filter to only those ids
99-
if ('id' in options and options['id']):
99+
if('id' in options and options['id']):
100100
downstream_apis = downstream_apis.filter(pk__in=options['id'])
101101
# if there are apis as an arg, filter to only those apis
102-
if ('api' in options and options['api']):
102+
if('api' in options and options['api']):
103103
downstream_apis = downstream_apis.filter(
104104
xis_api_endpoint__in=options['api'])
105105

@@ -109,7 +109,7 @@ def handle(self, *args, **options):
109109
queryset = ds.apply_filter().values()
110110
# get the fields that should be included/excluded in records
111111
include, exclude = ds.determine_fields()
112-
if (include):
112+
if(include):
113113
for record in queryset:
114114
metadata = self.__add_fields(include, record)
115115
metadata = self.__remove_fields(exclude, metadata)

app/core/management/utils/xis_internal.py

Lines changed: 28 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
import html
21
import logging
32

43
import bleach
@@ -11,35 +10,44 @@
1110
def required_recommended_logs(id_num, category, field):
1211
"""logs the missing required and recommended """
1312

13+
RECORD = "Record"
14+
1415
# Logs the missing required columns
1516
if category == 'Required':
1617
logger.error(
17-
"Record " + str(
18-
id_num) + " does not have all " + category +
19-
" fields. "
20-
+ field + " field is empty")
21-
18+
"%s %s does not have all %s fields. %s field is empty",
19+
RECORD,
20+
id_num,
21+
category,
22+
field
23+
)
2224
# Logs the missing recommended columns
2325
if category == 'Recommended':
2426
logger.warning(
25-
"Record " + str(
26-
id_num) + " does not have all " + category +
27-
" fields. "
28-
+ field + " field is empty")
29-
27+
"%s %s does not have all %s fields. %s field is empty",
28+
RECORD,
29+
id_num,
30+
category,
31+
field
32+
)
3033
# Logs the inaccurate datatype columns
3134
if category == 'datatype':
3235
logger.warning(
33-
"Record " + str(
34-
id_num) + " does not have the expected " + category +
35-
" for the field " + field)
36-
36+
"%s %s does not have the expected %s for the field %s",
37+
RECORD,
38+
id_num,
39+
category,
40+
field
41+
)
42+
# Logs the prefered alias during homoglyph check
3743
if category == 'homoglyphs':
3844
logger.error(
39-
"Record " + str(
40-
id_num) + " does not have the expected " + "preferred aliases "
41-
"for the field " +
42-
field)
45+
"%s %s does not have the expected "
46+
"preferred aliases for the field %s",
47+
RECORD,
48+
id_num,
49+
field
50+
)
4351

4452

4553
def dict_flatten(data_dict, required_column_list):
@@ -213,10 +221,10 @@ def bleach_data_to_json(rdata):
213221
# if string, clean
214222
if isinstance(rdata[key], str):
215223
rdata[key] = bleach.clean(rdata[key], tags={}, strip=True)
216-
rdata[key] = html.unescape(rdata[key])
217224
# if dict, enter dict
218225
if isinstance(rdata[key], dict):
219226
rdata[key] = bleach_data_to_json(rdata[key])
227+
220228
return rdata
221229

222230

0 commit comments

Comments
 (0)