Skip to content

Commit 115b366

Browse files
authored
Merge pull request #154 from ImagingDataCommons/idc-prod-sp
Sprint 15 Release
2 parents 508bce9 + e325468 commit 115b366

File tree

13 files changed

+838
-874
lines changed

13 files changed

+838
-874
lines changed

cohorts/decorators.py

Lines changed: 17 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -29,22 +29,30 @@
2929
def api_auth(function):
3030
def wrap(request, *args, **kwargs):
3131
try:
32-
auth_header = request.META.get('HTTP_AUTHORIZATION',b'')
32+
auth_header = request.META.get('HTTP_AUTHORIZATION',None)
33+
if not auth_header:
34+
logger.error("No Authorization header found for API call!")
35+
return JsonResponse({'message': 'No API authorization header - please be sure to provide an API token for API calls.'}, status=403)
36+
3337
# Force local dev to behave like deployed system
34-
if settings.DEBUG:
35-
auth_header = auth_header.encode('iso-8859-1')
38+
# if settings.DEBUG:
39+
# if isinstance(auth_header,str):
40+
# auth_header = auth_header.encode('iso-8859-1')
3641
auth_header = auth_header.split()
3742

38-
# Check for our Auth Header Token key, whatever that is.
39-
if not auth_header or auth_header[0].lower() != settings.API_AUTH_KEY.lower().encode():
40-
return JsonResponse({'message':'API access token not provided, or the wrong key was used.'},status=403)
41-
4243
# Make sure our Auth Header is the expected size
4344
if len(auth_header) == 1 or len(auth_header) > 2:
44-
return JsonResponse({'message': 'API access token not provided, or the wrong key was used.'},status=403)
45+
logger.error("Malformed Authorization header: {}".format(auth_header))
46+
return JsonResponse({'message': 'Received malformed API authorization header.'},status=403)
47+
48+
# Check for our Auth Header Token key
49+
if auth_header[0].lower() != settings.API_AUTH_KEY.lower():
50+
logger.error("Invalid API Token key; received: {} - expected {}".format(
51+
auth_header[0].lower(), settings.API_AUTH_KEY.lower()))
52+
return JsonResponse({'message':'API Auth token key not recognized.'},status=403)
4553

4654
# Now actually validate with the token
47-
token = auth_header[1].decode()
55+
token = auth_header[1]
4856
Token.objects.select_related('user').get(key=token)
4957

5058
# If a user was found, we've received a valid API call, and can proceed.

cohorts/models.py

Lines changed: 18 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -100,20 +100,21 @@ def is_public(self):
100100

101101
# Returns the data versions identified in the filter groups for this cohort
102102
# Returns a DataVersion QuerySet
103-
def get_data_versions(self):
103+
def get_data_versions(self, active=None):
104104

105-
data_versions = ImagingDataCommonsVersion.objects.filter(id__in=self.filter_group_set.all().values_list('data_version',flat=True))
105+
data_versions = ImagingDataCommonsVersion.objects.filter(id__in=self.filter_group_set.all().values_list('data_version',flat=True)) \
106+
if active is None else ImagingDataCommonsVersion.objects.filter(active=active, id__in=self.filter_group_set.all().values_list('data_version',flat=True))
106107

107108
return data_versions.distinct()
108109

109110
# Returns the list of data sources used by this cohort, as a function of the filters which define it
110111
# Return values can be
111-
def get_data_sources(self, source_type=DataSource.SOLR):
112+
def get_data_sources(self, source_type=DataSource.SOLR, active=None):
112113

113114
cohort_filters = Filter.objects.select_related('attribute').filter(resulting_cohort=self)
114115
attributes = Attribute.objects.filter(id__in=cohort_filters.values_list('attribute', flat=True))
115116

116-
data_versions = self.get_data_versions()
117+
data_versions = self.get_data_versions(active=active)
117118

118119
sources = attributes.get_data_sources(data_versions, source_type)
119120

@@ -136,6 +137,18 @@ def get_filters_by_data_source(self, source_type=None):
136137

137138
return result
138139

140+
141+
# Returns a dict of the filters defining this cohort organized by filter group
142+
def get_filters_as_dict_simple(self):
143+
result = []
144+
145+
filter_groups = self.filter_group_set.all()
146+
147+
for fg in filter_groups:
148+
result.append(fg.filter_set.all().get_filter_set())
149+
return result
150+
151+
139152
# Returns a dict of the filters defining this cohort organized by filter group
140153
def get_filters_as_dict(self):
141154
result = []
@@ -195,7 +208,7 @@ def get_filters_for_bq(self, prefix=None, suffix=None, counts=False, schema=None
195208
group_filter_dict = self.get_filters_as_dict()
196209

197210
for group in group_filter_dict:
198-
group_filters = {x: [y for y in x['values']] for x in group['filters']}
211+
group_filters = {x['name']: [y for y in x['values']] for x in group['filters']}
199212
filter_sets.append(BigQuerySupport.build_bq_filter_and_params(
200213
group_filters, field_prefix=prefix, param_suffix=suffix, with_count_toggle=counts,
201214
type_schema=schema

cohorts/utils.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -148,7 +148,7 @@ def _save_cohort(user, filters=None, name=None, cohort_id=None, version=None, de
148148

149149
return cohort_info
150150

151-
def cohort_manifest(cohort, user, fields, limit):
151+
def cohort_manifest(cohort, user, fields, limit, offset):
152152
try:
153153
sources = cohort.get_data_sources()
154154
versions = cohort.get_data_versions()
@@ -158,7 +158,7 @@ def cohort_manifest(cohort, user, fields, limit):
158158
filters = {x['name']: x['values'] for x in group_filters[0]['filters']}
159159

160160
cohort_records = get_collex_metadata(
161-
filters, fields, limit, sources=sources, versions=versions, counts_only=False,
161+
filters, fields, limit, offset, sources=sources, versions=versions, counts_only=False,
162162
collapse_on='SOPInstanceUID', records_only=True, sort="PatientID asc, StudyInstanceUID asc, SeriesInstanceUID asc, SOPInstanceUID asc")
163163

164164
return cohort_records

0 commit comments

Comments
 (0)