Skip to content

Commit 42bbc13

Browse files
authored
Merge pull request #99 from OpenLXP/code-sync
Latest sync with P1 master branch
2 parents 3209c91 + f201705 commit 42bbc13

File tree

78 files changed

+4013
-924
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

78 files changed

+4013
-924
lines changed

.github/workflows/cd-workflows.yml

Lines changed: 0 additions & 47 deletions
Original file line numberDiff line numberDiff line change
@@ -51,50 +51,3 @@ jobs:
5151
echo "Docker network successfully created"
5252
echo "Running coverage unit test"
5353
docker compose --env-file ./.env run app_xis sh -c "python manage.py waitdb && coverage run manage.py test --tag=unit && flake8 && coverage report && coverage report --fail-under=80"
54-
55-
# sonarcloud:
56-
# name: SonarCloud
57-
# runs-on: ubuntu-latest
58-
# steps:
59-
# - uses: actions/checkout@v2
60-
# with:
61-
# fetch-depth: 0 # Shallow clones should be disabled for a better relevancy of analysis
62-
# - name: SonarCloud Scan
63-
# uses: SonarSource/sonarcloud-github-action@master
64-
# env:
65-
# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # Needed to get PR information, if any
66-
# SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
67-
68-
build:
69-
# requires dependency from step above
70-
needs: code-test
71-
name: Build Docker Image
72-
runs-on: ubuntu-latest
73-
74-
steps:
75-
- name: Checkout Code
76-
uses: actions/checkout@v2
77-
78-
- name: Configure AWS credentials
79-
uses: aws-actions/configure-aws-credentials@v1
80-
with:
81-
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
82-
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
83-
aws-region: ${{ secrets.AWS_REGION }}
84-
85-
- name: Login to Amazon ECR
86-
id: login-ecr
87-
uses: aws-actions/amazon-ecr-login@v1
88-
with:
89-
mask-password: 'true'
90-
91-
- name: Build, tag, and push image to Amazon ECR
92-
env:
93-
ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }}
94-
ECR_REPOSITORY: ${{ secrets.ECR_REPO }}
95-
IMAGE_TAG: latest
96-
run: |
97-
echo "Starting docker build"
98-
docker build -t $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG .
99-
echo "Pushing image to ECR..."
100-
docker push $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG

app/api/serializers.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -102,7 +102,6 @@ def validate(self, data):
102102
# validate for recommended values in data
103103
validate_recommended(
104104
data, recommended_column_list, flattened_source_data)
105-
106105
# Type checking for values in metadata
107106
for item in flattened_source_data:
108107
# check if datatype has been assigned to field
@@ -139,6 +138,7 @@ def validate(self, data):
139138
raise serializers.ValidationError("Metadata has missing fields. "
140139
"Data did not pass validation."
141140
"Check logs for more details")
141+
142142
return data
143143

144144
def update(self, instance, validated_data):

app/api/views.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -113,6 +113,8 @@ def filter_queryset(self, queryset):
113113
def post(self, request):
114114
"""This method defines the API's to save data to the
115115
metadata ledger in the XIS"""
116+
logger.info("Start processing")
117+
logger.error("Incoming experience")
116118

117119
# Add optional/recommended fields to the metadata
118120
extra_fields = get_optional_and_recommended_fields_for_validation()
@@ -346,31 +348,31 @@ def post(self, request, provider_id, experience_id):
346348

347349

348350
@api_view(['GET'])
349-
@permission_classes((permissions.AllowAny,))
351+
@permission_classes((permissions.IsAdminUser,))
350352
def xis_workflow_api(request):
351353
logger.info('XIS workflow api')
352354
task = xis_workflow.delay()
353355
return JsonResponse({"task_id": task.id}, status=status.HTTP_202_ACCEPTED)
354356

355357

356358
@api_view(['GET'])
357-
@permission_classes((permissions.AllowAny,))
359+
@permission_classes((permissions.IsAdminUser,))
358360
def xis_downstream_workflow_api(request):
359361
logger.info('Downstream workflow api')
360362
task = xis_downstream_workflow.delay()
361363
return JsonResponse({"task_id": task.id}, status=status.HTTP_202_ACCEPTED)
362364

363365

364366
@api_view(['GET'])
365-
@permission_classes((permissions.AllowAny,))
367+
@permission_classes((permissions.IsAdminUser,))
366368
def xis_upstream_workflow_api(request):
367369
logger.info('Upstream workflow api')
368370
task = xis_upstream_workflow.delay()
369371
return JsonResponse({"task_id": task.id}, status=status.HTTP_202_ACCEPTED)
370372

371373

372374
@api_view(['GET'])
373-
@permission_classes((permissions.AllowAny,))
375+
@permission_classes((permissions.IsAdminUser,))
374376
def get_status(request, task_id):
375377
task_result = AsyncResult(task_id)
376378
result = {

app/core/fixtures/admin_theme_data.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@
4141
"css_delete_button_background_color": "#BA2121",
4242
"css_delete_button_background_hover_color": "#A41515",
4343
"css_delete_button_text_color": "#FFFFFF",
44-
"related_modal_active": true,
44+
"related_modal_active": false,
4545
"related_modal_background_color": "#000000",
4646
"related_modal_background_opacity": "0.3",
4747
"related_modal_rounded_corners": true,

app/core/management/commands/load_metadata_from_xis.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -35,20 +35,21 @@ def retrieve_records(self, upstream):
3535

3636
xis_response = requests.get(
3737
url=upstream.xis_api_endpoint + 'metadata/',
38-
headers=headers)
38+
headers=headers, timeout=3.0)
3939

4040
while (xis_response.status_code//10 == 20):
4141
for record in xis_response.json()['results']:
4242
self.save_record(upstream, bleach_data_to_json(record))
4343

4444
if (xis_response.json()['next'] is not None):
4545
xis_response = requests.get(
46-
url=xis_response.json()['next'], headers=headers)
46+
url=xis_response.json()['next'], headers=headers,
47+
timeout=3.0)
4748
else:
4849
return
4950

5051
logger.error(
51-
f"HTTP Error {xis_response.status_code} from {upstream}")
52+
"HTTP Error %s from %s", xis_response.status_code, upstream)
5253

5354
def save_record(self, upstream, record):
5455
"""saves record to metadata and supplemental ledgers as needed"""

app/core/management/commands/load_metadata_into_xis.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -76,9 +76,9 @@ def send_record(self, downstream, record):
7676
url=f'{downstream.xis_api_endpoint}managed-data/catalogs/'
7777
f'{record["provider_name"]}/{record["metadata_key_hash"]}',
7878
data=json.dumps(CompositeLedgerSerializer(record).data),
79-
headers=headers)
79+
headers=headers, timeout=3.0)
8080

81-
if (xis_response.status_code//10 == 20):
81+
if(xis_response.status_code//10 == 20):
8282
downstream.composite_experiences.add(
8383
record['unique_record_identifier'])
8484
else:
@@ -96,10 +96,10 @@ def handle(self, *args, **options):
9696
downstream_apis = XISDownstream.objects.all().filter(
9797
xis_api_endpoint_status=XISDownstream.ACTIVE)
9898
# if there are ids as an arg, filter to only those ids
99-
if ('id' in options and options['id']):
99+
if('id' in options and options['id']):
100100
downstream_apis = downstream_apis.filter(pk__in=options['id'])
101101
# if there are apis as an arg, filter to only those apis
102-
if ('api' in options and options['api']):
102+
if('api' in options and options['api']):
103103
downstream_apis = downstream_apis.filter(
104104
xis_api_endpoint__in=options['api'])
105105

@@ -109,7 +109,7 @@ def handle(self, *args, **options):
109109
queryset = ds.apply_filter().values()
110110
# get the fields that should be included/excluded in records
111111
include, exclude = ds.determine_fields()
112-
if (include):
112+
if(include):
113113
for record in queryset:
114114
metadata = self.__add_fields(include, record)
115115
metadata = self.__remove_fields(exclude, metadata)

app/core/management/utils/xis_internal.py

Lines changed: 28 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
import html
21
import logging
32

43
import bleach
@@ -11,35 +10,44 @@
1110
def required_recommended_logs(id_num, category, field):
1211
"""logs the missing required and recommended """
1312

13+
RECORD = "Record"
14+
1415
# Logs the missing required columns
1516
if category == 'Required':
1617
logger.error(
17-
"Record " + str(
18-
id_num) + " does not have all " + category +
19-
" fields. "
20-
+ field + " field is empty")
21-
18+
"%s %s does not have all %s fields. %s field is empty",
19+
RECORD,
20+
id_num,
21+
category,
22+
field
23+
)
2224
# Logs the missing recommended columns
2325
if category == 'Recommended':
2426
logger.warning(
25-
"Record " + str(
26-
id_num) + " does not have all " + category +
27-
" fields. "
28-
+ field + " field is empty")
29-
27+
"%s %s does not have all %s fields. %s field is empty",
28+
RECORD,
29+
id_num,
30+
category,
31+
field
32+
)
3033
# Logs the inaccurate datatype columns
3134
if category == 'datatype':
3235
logger.warning(
33-
"Record " + str(
34-
id_num) + " does not have the expected " + category +
35-
" for the field " + field)
36-
36+
"%s %s does not have the expected %s for the field %s",
37+
RECORD,
38+
id_num,
39+
category,
40+
field
41+
)
42+
# Logs the prefered alias during homoglyph check
3743
if category == 'homoglyphs':
3844
logger.error(
39-
"Record " + str(
40-
id_num) + " does not have the expected " + "preferred aliases "
41-
"for the field " +
42-
field)
45+
"%s %s does not have the expected "
46+
"preferred aliases for the field %s",
47+
RECORD,
48+
id_num,
49+
field
50+
)
4351

4452

4553
def dict_flatten(data_dict, required_column_list):
@@ -213,10 +221,10 @@ def bleach_data_to_json(rdata):
213221
# if string, clean
214222
if isinstance(rdata[key], str):
215223
rdata[key] = bleach.clean(rdata[key], tags={}, strip=True)
216-
rdata[key] = html.unescape(rdata[key])
217224
# if dict, enter dict
218225
if isinstance(rdata[key], dict):
219226
rdata[key] = bleach_data_to_json(rdata[key])
227+
220228
return rdata
221229

222230

app/core/management/utils/xss_client.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -39,10 +39,10 @@ def read_json_data(schema_ref):
3939

4040
def get_target_validation_schema():
4141
"""Retrieve target validation schema from XIS configuration """
42-
logger.info("Configuration of schemas and files")
42+
logger.error("Configuration of schemas and files")
4343
data = XISConfiguration.objects.first()
4444
target_validation_schema = data.target_schema
45-
logger.info("Reading schema for validation")
45+
logger.error("Reading schema for validation")
4646
# Read source validation schema as dictionary
4747
schema_data_dict = read_json_data(target_validation_schema)
4848
return schema_data_dict

app/core/migrations/0012_auto_20230517_1537.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
# Generated by Django 3.2.19 on 2023-05-17 15:37
22

3-
import django.db.models.deletion
43
from django.conf import settings
54
from django.db import migrations, models
5+
import django.db.models.deletion
66

77

88
class Migration(migrations.Migration):

app/core/tests/test_commands_integration.py

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -3,14 +3,12 @@
33

44
from ddt import ddt
55
from django.test import tag
6-
from elasticsearch import Elasticsearch
76

87
from core.management.commands.consolidate_ledgers import (
98
check_metadata_ledger_transmission_ready_record,
109
put_metadata_ledger_into_composite_ledger)
1110
from core.management.commands.load_metadata_into_xse import (
1211
check_records_to_load_into_xse, post_data_to_xse)
13-
from core.management.utils.xse_client import get_elasticsearch_endpoint
1412
from core.models import CompositeLedger, MetadataLedger
1513

1614
from .test_setup import TestSetUp
@@ -103,11 +101,6 @@ def test_post_data_to_xse_created(self):
103101
"""Test for POSTing XIS composite_ledger to XSE in JSON format when
104102
record gets created in XSE"""
105103

106-
es = Elasticsearch(get_elasticsearch_endpoint())
107-
108-
if(es.indices.exists('testing_index')):
109-
es.indices.delete('testing_index')
110-
111104
self.composite_ledger.save()
112105

113106
data = CompositeLedger.objects.filter(

0 commit comments

Comments
 (0)