Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
2.4.7
2.4.8
10 changes: 8 additions & 2 deletions entity-api-spec.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -434,7 +434,7 @@ components:
- block
- section
- suspension
description: 'A code representing the type of specimen. Must be an organ, block, section, or suspension'
description: 'A code representing the type of specimen. Must be an organ, block, section, or suspension, in all lower case.'
protocol_url:
type: string
description: 'The protocols.io doi url pointing the protocol under wich the sample was obtained and/or prepared.'
Expand Down Expand Up @@ -482,7 +482,7 @@ components:
- TR
- UR
- UT
description: 'Organ code specifier, only set if sample_category == organ. Valid values found in: [organ types](https://github.com/hubmapconsortium/search-api/blob/main/src/search-schema/data/definitions/enums/organ_types.yaml)'
description: 'Organ code specifier, only set if sample_category == organ. Valid values found in: [organ types](https://ontology.api.hubmapconsortium.org/organs/by-code?application_context=HUBMAP)'
organ_other:
type: string
description: The organ type provided by the user if "other" organ type is selected
Expand Down Expand Up @@ -903,6 +903,12 @@ components:
$ref: '#/components/schemas/Dataset'
readOnly: true
description: 'The datasets that are contained in this Upload.'
anticipated_complete_upload_month:
type: string
description: 'The month that the Upload is anticipated to have all required data uploaded, in the format YYYY-MM.'
anticipated_dataset_count:
type: integer
description: 'The total number of datasets that this Upload will eventually contain.'
Collection:
type: object
properties:
Expand Down
12 changes: 10 additions & 2 deletions src/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -1272,7 +1272,15 @@ def create_multiple_samples(count):
# No need to log the validation errors
bad_request_error(str(e))

# `direct_ancestor_uuid` is required on create
try:
schema_manager.execute_property_level_validators('before_property_create_validators', normalized_entity_type, request, {}, json_data_dict)
# Currently only ValueError
except ValueError as e:
bad_request_error(e)
except schema_errors.UnimplementedValidatorException as uve:
internal_server_error(uve)

# `direct_ancestor_uuid` is required on create for a Sample.
# Check existence of the direct ancestor (either another Sample or Donor)
direct_ancestor_dict = query_target_entity(json_data_dict['direct_ancestor_uuid'], user_token)

Expand All @@ -1288,7 +1296,7 @@ def create_multiple_samples(count):
if ('organ' not in json_data_dict) or (not json_data_dict['organ']):
bad_request_error("A valid organ code is required since the direct ancestor is a Donor")

# Generate 'before_create_triiger' data and create the entity details in Neo4j
# Generate 'before_create_trigger' data and create the entity details in Neo4j
generated_ids_dict_list = create_multiple_samples_details(request, normalized_entity_type, user_token, json_data_dict, count)

# Also index the each new Sample node in elasticsearch via search-api
Expand Down
18 changes: 18 additions & 0 deletions src/schema/provenance_schema.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -1241,6 +1241,24 @@ ENTITIES:
type: string
indexed: true
description: The organ code representing the organ type that the data contained in the upload will be registered/associated with.
anticipated_complete_upload_month:
type: string
indexed: true
description: The specific month the Upload is anticipated to have all required data uploaded, in the format YYYY-MM.
required_on_create: false
before_property_create_validators:
- validate_anticipated_complete_date
before_property_update_validators:
- validate_anticipated_complete_date
anticipated_dataset_count:
type: integer
indexed: true
description: The total number of datasets that this Upload will eventually contain.
required_on_create: false
before_property_create_validators:
- validate_anticipated_dataset_count
before_property_update_validators:
- validate_anticipated_dataset_count

############################################# EPICollection #############################################
Epicollection:
Expand Down
67 changes: 65 additions & 2 deletions src/schema/schema_validators.py
Original file line number Diff line number Diff line change
Expand Up @@ -542,6 +542,63 @@ def validate_upload_status_value(property_key, normalized_entity_type, request,
if new_status not in accepted_status_values:
raise ValueError(f"Invalid status value: {new_status}")

"""
Validate the anticipated_complete_data string provided for an Upload

Parameters
----------
property_key : str
The target property key
normalized_type : str
Submission
request: Flask request object
The instance of Flask request passed in from application request
existing_data_dict : dict
A dictionary that contains all existing entity properties
new_data_dict : dict
The json data in request body, already after the regular validations
"""
def validate_anticipated_complete_date(property_key, normalized_entity_type, request, existing_data_dict, new_data_dict):
MAX_ANTICIPATED_COMPLETE_DATE = '2026-12'
anticipated_complete_date_str = new_data_dict[property_key]
if not re.fullmatch(pattern=r'^\d{4}-\d{2}$', string=anticipated_complete_date_str):
raise ValueError(f"Format of '{anticipated_complete_date_str}' does not match the format YYYY-MM")
anticipated_year, anticipated_month = map(int, anticipated_complete_date_str.split("-"))
if anticipated_month < 1 or anticipated_month > 12:
raise ValueError(f"Anticipated completion month of '{anticipated_complete_date_str[5:]}' is not valid")
now = datetime.now()
current_year = now.year
current_month = now.month
if anticipated_year < current_year or \
(anticipated_year == current_year and anticipated_month < current_month):
raise ValueError( f"Anticipated complete date '{anticipated_complete_date_str}'"
f" cannot be before the current month.")
max_anticipated_year, max_anticipated_month = map(int, MAX_ANTICIPATED_COMPLETE_DATE.split("-"))
if anticipated_year > max_anticipated_year:
raise ValueError( f"Anticipated complete date '{anticipated_complete_date_str}'"
f" cannot be after '{MAX_ANTICIPATED_COMPLETE_DATE}'.")

"""
Validate the anticipated_dataset_count integer provided for an Upload

Parameters
----------
property_key : str
The target property key
normalized_type : str
Submission
request: Flask request object
The instance of Flask request passed in from application request
existing_data_dict : dict
A dictionary that contains all existing entity properties
new_data_dict : dict
The json data in request body, already after the regular validations
"""
def validate_anticipated_dataset_count(property_key, normalized_entity_type, request, existing_data_dict, new_data_dict):
# anticipated_dataset_count of type int, assured by provenance_schema.yaml "type: integer"
anticipated_dataset_count = new_data_dict[property_key]
if anticipated_dataset_count <= 0:
raise ValueError(f"{property_key} must be positive integer when specified.")

"""
Validate the provided value of Sample.sample_category on create via POST and update via PUT
Expand All @@ -564,8 +621,14 @@ def validate_sample_category(property_key, normalized_entity_type, request, exis
sample_category = new_data_dict[property_key].lower()

if sample_category not in defined_tissue_types:
raise ValueError(f"Invalid sample_category: {sample_category}")

raise ValueError(f"Invalid sample_category: {sample_category}."
f" Should be one of {', '.join(defined_tissue_types)}.")

# Given the sample_category is a defined_tissue_types element, assure the request has
# the proper case for storage
if new_data_dict[property_key] != sample_category:
raise ValueError(f"The case of sample_category '{new_data_dict[property_key]}'"
f" must be specified as '{sample_category}'.")

"""
Validate the provided value of Publication.publication_date is in the correct format against ISO 8601 Format:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -81,4 +81,10 @@ Upload:
items:
$ref: '#/components/schemas/Dataset'
readOnly: true
description: "The datasets that are contained in this Upload."
description: "The datasets that are contained in this Upload."
anticipated_complete_upload_month:
type: string
description: The month that the Upload is anticipated to have all required data uploaded, in the format YYYY-MM.
anticipated_dataset_count:
type: integer
description: The total number of datasets that this Upload will eventually contain.