diff --git a/VERSION b/VERSION index e30309f7..f041bc6d 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -2.4.7 +2.4.8 diff --git a/entity-api-spec.yaml b/entity-api-spec.yaml index 7cd58bfe..647b4aed 100644 --- a/entity-api-spec.yaml +++ b/entity-api-spec.yaml @@ -434,7 +434,7 @@ components: - block - section - suspension - description: 'A code representing the type of specimen. Must be an organ, block, section, or suspension' + description: 'A code representing the type of specimen. Must be an organ, block, section, or suspension, in all lower case.' protocol_url: type: string description: 'The protocols.io doi url pointing the protocol under wich the sample was obtained and/or prepared.' @@ -482,7 +482,7 @@ components: - TR - UR - UT - description: 'Organ code specifier, only set if sample_category == organ. Valid values found in: [organ types](https://github.com/hubmapconsortium/search-api/blob/main/src/search-schema/data/definitions/enums/organ_types.yaml)' + description: 'Organ code specifier, only set if sample_category == organ. Valid values found in: [organ types](https://ontology.api.hubmapconsortium.org/organs/by-code?application_context=HUBMAP)' organ_other: type: string description: The organ type provided by the user if "other" organ type is selected @@ -903,6 +903,12 @@ components: $ref: '#/components/schemas/Dataset' readOnly: true description: 'The datasets that are contained in this Upload.' + anticipated_complete_upload_month: + type: string + description: 'The month that the Upload is anticipated to have all required data uploaded, in the format YYYY-MM.' + anticipated_dataset_count: + type: integer + description: 'The total number of datasets that this Upload will eventually contain.' Collection: type: object properties: diff --git a/src/app.py b/src/app.py index 23f16fc1..e0abe58e 100644 --- a/src/app.py +++ b/src/app.py @@ -89,6 +89,10 @@ MEMCACHED_MODE = False MEMCACHED_PREFIX = 'NONE' +# Read the secret key which may be submitted in HTTP Request Headers to override the lockout of +# updates to entities with characteristics prohibiting their modification. +LOCKED_ENTITY_UPDATE_OVERRIDE_KEY = app.config['LOCKED_ENTITY_UPDATE_OVERRIDE_KEY'] + # Suppress InsecureRequestWarning warning when requesting status on https with ssl cert verify disabled requests.packages.urllib3.disable_warnings(category = InsecureRequestWarning) @@ -1272,7 +1276,15 @@ def create_multiple_samples(count): # No need to log the validation errors bad_request_error(str(e)) - # `direct_ancestor_uuid` is required on create + try: + schema_manager.execute_property_level_validators('before_property_create_validators', normalized_entity_type, request, {}, json_data_dict) + # Currently only ValueError + except ValueError as e: + bad_request_error(e) + except schema_errors.UnimplementedValidatorException as uve: + internal_server_error(uve) + + # `direct_ancestor_uuid` is required on create for a Sample. # Check existence of the direct ancestor (either another Sample or Donor) direct_ancestor_dict = query_target_entity(json_data_dict['direct_ancestor_uuid'], user_token) @@ -1288,7 +1300,7 @@ def create_multiple_samples(count): if ('organ' not in json_data_dict) or (not json_data_dict['organ']): bad_request_error("A valid organ code is required since the direct ancestor is a Donor") - # Generate 'before_create_triiger' data and create the entity details in Neo4j + # Generate 'before_create_trigger' data and create the entity details in Neo4j generated_ids_dict_list = create_multiple_samples_details(request, normalized_entity_type, user_token, json_data_dict, count) # Also index the each new Sample node in elasticsearch via search-api @@ -1355,8 +1367,29 @@ def update_entity(id): # Normalize user provided entity_type normalized_entity_type = schema_manager.normalize_entity_type(entity_dict['entity_type']) - # Note, we don't support entity level validators on entity update via PUT - # Only entity create via POST is supported at the entity level + # Execute entity level validator defined in schema yaml before entity modification. + lockout_overridden = False + try: + schema_manager.execute_entity_level_validator(validator_type='before_entity_update_validator' + , normalized_entity_type=normalized_entity_type + , request=request + , existing_entity_dict=entity_dict) + except schema_errors.MissingApplicationHeaderException as e: + bad_request_error(e) + except schema_errors.InvalidApplicationHeaderException as e: + bad_request_error(e) + except schema_errors.LockedEntityUpdateException as leue: + # HTTP header names are case-insensitive, and request.headers.get() returns None if the header doesn't exist + locked_entity_update_header = request.headers.get(SchemaConstants.LOCKED_ENTITY_UPDATE_HEADER) + if locked_entity_update_header and (LOCKED_ENTITY_UPDATE_OVERRIDE_KEY == locked_entity_update_header): + lockout_overridden = True + logger.info(f"For {entity_dict['entity_type']} {entity_dict['uuid']}" + f" update prohibited due to {str(leue)}," + f" but being overridden by valid {SchemaConstants.LOCKED_ENTITY_UPDATE_HEADER} in request.") + else: + forbidden_error(leue) + except Exception as e: + internal_server_error(e) # Validate request json against the yaml schema # Pass in the entity_dict for missing required key check, this is different from creating new entity @@ -1375,6 +1408,9 @@ def update_entity(id): ValueError) as e: bad_request_error(e) + # Proceed with per-entity updates after passing any entity-level or property-level validations which + # would have locked out updates. + # # Sample, Dataset, and Upload: additional validation, update entity, after_update_trigger # Collection and Donor: update entity if normalized_entity_type == 'Sample': @@ -1459,13 +1495,6 @@ def update_entity(id): if has_dataset_uuids_to_link or has_dataset_uuids_to_unlink or has_updated_status: after_update(normalized_entity_type, user_token, merged_updated_dict) elif schema_manager.entity_type_instanceof(normalized_entity_type, 'Collection'): - entity_visibility = _get_entity_visibility( normalized_entity_type=normalized_entity_type - ,entity_dict=entity_dict) - # Prohibit update of an existing Collection if it meets criteria of being visible to public e.g. has DOI. - if entity_visibility == DataVisibilityEnum.PUBLIC: - logger.info(f"Attempt to update {normalized_entity_type} with id={id} which has visibility {entity_visibility}.") - bad_request_error(f"Cannot update {normalized_entity_type} due '{entity_visibility.value}' visibility.") - # Generate 'before_update_trigger' data and update the entity details in Neo4j merged_updated_dict = update_entity_details(request, normalized_entity_type, user_token, json_data_dict, entity_dict) @@ -1531,7 +1560,8 @@ def update_entity(id): # Do not return the updated dict to avoid computing overhead - 7/14/2023 by Zhou # return jsonify(normalized_complete_dict) - return jsonify({'message': f"{normalized_entity_type} of {id} has been updated"}) + override_msg = 'Lockout overridden. ' if lockout_overridden else '' + return jsonify({'message': f"{override_msg}{normalized_entity_type} of {id} has been updated"}) """ diff --git a/src/instance/app.cfg.example b/src/instance/app.cfg.example index c0ae7302..595ee728 100644 --- a/src/instance/app.cfg.example +++ b/src/instance/app.cfg.example @@ -26,6 +26,11 @@ NEO4J_URI = 'bolt://hubmap-neo4j-localhost:7687' NEO4J_USERNAME = 'neo4j' NEO4J_PASSWORD = '123' +# Secret value presented with the request header value named by +# SchemaConstants.LOCKED_ENTITY_UPDATE_HEADER, expected to be off the form +# X-HuBMAP-Update-Override: +LOCKED_ENTITY_UPDATE_OVERRIDE_KEY = 'set during deployment' + # Set MEMCACHED_MODE to False to disable the caching for local development MEMCACHED_MODE = True MEMCACHED_SERVER = 'host:11211' diff --git a/src/schema/provenance_schema.yaml b/src/schema/provenance_schema.yaml index 43c53dad..dbb3351b 100644 --- a/src/schema/provenance_schema.yaml +++ b/src/schema/provenance_schema.yaml @@ -10,8 +10,9 @@ # - trigger types: before_create_trigger|after_create_trigger|before_update_trigger|after_update_trigger|on_read_trigger|on_index_trigger, one property can have none (default) or more than one triggers # - updated_peripherally: a temporary measure to correctly handle any attributes which are potentially updated by multiple triggers -# Entity level validator: -# - types: before_entity_create_validator, a single validation method needed for creating or updating the entity +# Entity level validators: +# - types: before_entity_create_validator - validation method needed for creating an entity. +# before_entity_update_validator - validation method needed for updating an entity. # Property level validators: # - types: before_property_create_validators|before_property_update_validators, a list of validation methods @@ -192,6 +193,9 @@ shared_entity_properties: &shared_entity_properties ENTITIES: ############################################# Collection ############################################# Collection: + before_entity_update_validator: + # Halt modification of entities which are "locked", such as a Dataset with status == 'Published' + - validate_entity_not_locked_before_update excluded_properties_from_public_response: - datasets: - lab_dataset_id @@ -304,6 +308,9 @@ ENTITIES: Dataset: # Only allowed applications can create new Dataset via POST before_entity_create_validator: validate_application_header_before_entity_create + before_entity_update_validator: + # Halt modification of entities which are "locked", such as a Dataset with status == 'Published' + - validate_entity_not_locked_before_update # Dataset can be either derivation source or target excluded_properties_from_public_response: - lab_dataset_id @@ -659,6 +666,9 @@ ENTITIES: superclass: Dataset # Only allowed applications can create new Publication via POST before_entity_create_validator: validate_application_header_before_entity_create + before_entity_update_validator: + # Halt modification of entities which are "locked", such as a Dataset with status == 'Published' + - validate_entity_not_locked_before_update # Publications can be either derivation source or target derivation: source: true @@ -763,6 +773,9 @@ ENTITIES: - lab_donor_id - submission_id - label + before_entity_update_validator: + # Halt modification of entities which are "locked", such as a Dataset with status == 'Published' + - validate_entity_not_locked_before_update properties: <<: *shared_properties <<: *shared_entity_properties @@ -896,6 +909,9 @@ ENTITIES: - lab_id # Both Sample and Donor ancestors of a Sample must have these fields removed - submission_id + before_entity_update_validator: + # Halt modification of entities which are "locked", such as a Dataset with status == 'Published' + - validate_entity_not_locked_before_update properties: <<: *shared_properties <<: *shared_entity_properties @@ -1120,7 +1136,10 @@ ENTITIES: Upload: # Only allowed applications can create new Upload via POST before_entity_create_validator: validate_application_header_before_entity_create - # Upload requires an ancestor of Lab, and and has no allowed decesndents + # No before_entity_update_validator needed for Upload because the entity is + # always considered "non-public", and therefore not blocked from update/PUT. + # + # Upload requires a Lab entity as an ancestor, and has no allowed descendants derivation: source: false target: false # Set to false since the schema doesn't handle Lab currently @@ -1241,6 +1260,24 @@ ENTITIES: type: string indexed: true description: The organ code representing the organ type that the data contained in the upload will be registered/associated with. + anticipated_complete_upload_month: + type: string + indexed: true + description: The specific month the Upload is anticipated to have all required data uploaded, in the format YYYY-MM. + required_on_create: false + before_property_create_validators: + - validate_anticipated_complete_date + before_property_update_validators: + - validate_anticipated_complete_date + anticipated_dataset_count: + type: integer + indexed: true + description: The total number of datasets that this Upload will eventually contain. + required_on_create: false + before_property_create_validators: + - validate_anticipated_dataset_count + before_property_update_validators: + - validate_anticipated_dataset_count ############################################# EPICollection ############################################# Epicollection: @@ -1250,5 +1287,8 @@ ENTITIES: derivation: source: false target: false + before_entity_update_validator: + # Halt modification of entities which are "locked", such as a Dataset with status == 'Published' + - validate_entity_not_locked_before_update properties: <<: *shared_collection_properties diff --git a/src/schema/schema_constants.py b/src/schema/schema_constants.py index 30cc0279..e3cfd1a5 100644 --- a/src/schema/schema_constants.py +++ b/src/schema/schema_constants.py @@ -7,6 +7,7 @@ class SchemaConstants(object): COMPONENT_DATASET = 'component-dataset' INGEST_PIPELINE_APP = 'ingest-pipeline' HUBMAP_APP_HEADER = 'X-Hubmap-Application' + LOCKED_ENTITY_UPDATE_HEADER = 'X-HuBMAP-Update-Override' INTERNAL_TRIGGER = 'X-Internal-Trigger' DATASET_STATUS_PUBLISHED = 'published' diff --git a/src/schema/schema_errors.py b/src/schema/schema_errors.py index 90a900a0..edb4ac63 100644 --- a/src/schema/schema_errors.py +++ b/src/schema/schema_errors.py @@ -39,4 +39,7 @@ class MissingApplicationHeaderException(Exception): pass class InvalidApplicationHeaderException(Exception): + pass + +class LockedEntityUpdateException(Exception): pass \ No newline at end of file diff --git a/src/schema/schema_manager.py b/src/schema/schema_manager.py index a14a75fc..59b11e3a 100644 --- a/src/schema/schema_manager.py +++ b/src/schema/schema_manager.py @@ -1166,13 +1166,15 @@ def validate_json_data_against_schema(json_data_dict, normalized_entity_type, ex Parameters ---------- validator_type : str - One of the validator types: before_entity_create_validator + One of the validator types recognized by the validate_entity_level_validator_type() method. normalized_entity_type : str One of the normalized entity types defined in the schema yaml: Donor, Sample, Dataset, Upload, Upload, Publication request: Flask request object The instance of Flask request passed in from application request +existing_entity_dict : dict + The dictionary for an entity, retrieved from Neo4j, for use during update/PUT validations """ -def execute_entity_level_validator(validator_type, normalized_entity_type, request): +def execute_entity_level_validator(validator_type, normalized_entity_type, request, existing_entity_dict=None): global _schema # A bit validation @@ -1183,23 +1185,41 @@ def execute_entity_level_validator(validator_type, normalized_entity_type, reque for key in entity: if validator_type == key: - validator_method_name = entity[validator_type] + if isinstance(entity[validator_type], str): + validator_method_names = [entity[validator_type]] + else: + # default to expecting a list when not a str + validator_method_names = entity[validator_type] - try: - # Get the target validator method defined in the schema_validators.py module - validator_method_to_call = getattr(schema_validators, validator_method_name) - - logger.info(f"To run {validator_type}: {validator_method_name} defined for entity {normalized_entity_type}") - - validator_method_to_call(normalized_entity_type, request) - except schema_errors.MissingApplicationHeaderException as e: - raise schema_errors.MissingApplicationHeaderException(e) - except schema_errors.InvalidApplicationHeaderException as e: - raise schema_errors.InvalidApplicationHeaderException(e) - except Exception as e: - msg = f"Failed to call the {validator_type} method: {validator_method_name} defined for entity {normalized_entity_type}" - # Log the full stack trace, prepend a line with our message - logger.exception(msg) + for validator_method_name in validator_method_names: + try: + # Get the target validator method defined in the schema_validators.py module + validator_method_to_call = getattr(schema_validators, validator_method_name) + + logger.info(f"To run {validator_type}: {validator_method_name} defined for entity {normalized_entity_type}") + + # Create a dictionary to hold data need by any entity validator, which must be populated + # with validator specific requirements when the method to be called is determined. + options_dict = {} + if existing_entity_dict is None: + # Execute the entity-level validation for create/POST + options_dict['http_request'] = request + validator_method_to_call(options_dict) + else: + # Execute the entity-level validation for update/PUT + options_dict['existing_entity_dict']= existing_entity_dict + validator_method_to_call(options_dict) + except schema_errors.MissingApplicationHeaderException as e: + raise schema_errors.MissingApplicationHeaderException(e) + except schema_errors.InvalidApplicationHeaderException as e: + raise schema_errors.InvalidApplicationHeaderException(e) + except schema_errors.LockedEntityUpdateException as leue: + raise leue + except Exception as e: + msg = f"Failed to call the {validator_type} method: {validator_method_name} defined for entity {normalized_entity_type}" + # Log the full stack trace, prepend a line with our message + logger.exception(msg) + raise e """ @@ -1360,10 +1380,10 @@ def validate_trigger_type(trigger_type:TriggerTypeEnum): Parameters ---------- validator_type : str - One of the validator types: before_entity_create_validator + Name of an entity-level validator type, which must be listed in accepted_validator_types and found in this schema manager module. """ def validate_entity_level_validator_type(validator_type): - accepted_validator_types = ['before_entity_create_validator'] + accepted_validator_types = ['before_entity_create_validator', 'before_entity_update_validator'] separator = ', ' if validator_type.lower() not in accepted_validator_types: diff --git a/src/schema/schema_validators.py b/src/schema/schema_validators.py index 5a6fd5e1..75b96b5c 100644 --- a/src/schema/schema_validators.py +++ b/src/schema/schema_validators.py @@ -14,14 +14,13 @@ logger = logging.getLogger(__name__) - #################################################################################################### ## Entity Level Validators #################################################################################################### """ Validate the application specified in the custom HTTP header -for creating a new entity via POST or updating via PUT +for creating a new entity via POST. Parameters ---------- @@ -30,7 +29,13 @@ request: Flask request The instance of Flask request passed in from application request """ -def validate_application_header_before_entity_create(normalized_entity_type, request): +def validate_application_header_before_entity_create(options_dict): + if 'http_request' in options_dict: + request = options_dict['http_request'] + else: + logger.error(f"validate_application_header_before_entity_create() expected 'http_request' in" + f" options_dict, but it was missing in {str(options_dict)}.") + raise KeyError("Entity validator internal misconfiguration.") # A list of applications allowed to create this new entity or update Dataset and Upload # Use lowercase for comparison applications_allowed = [ @@ -41,6 +46,24 @@ def validate_application_header_before_entity_create(normalized_entity_type, req _validate_application_header(applications_allowed, request.headers) +""" +Validate required conditions prior to allowing update of an existing entity via PUT. + +Parameters +---------- +normalized_type : str + One of the types defined in the schema yaml: Dataset, Upload +request: Flask request + The instance of Flask request passed in from application request +""" +def validate_entity_not_locked_before_update(options_dict): + if 'existing_entity_dict' in options_dict: + existing_entity_dict = options_dict['existing_entity_dict'] + else: + logger.error(f"validate_entity_not_locked_before_update() expected 'existing_entity_dict' in" + f" options_dict, but it was missing in {str(options_dict)}.") + raise KeyError("Entity validator internal misconfiguration.") + _is_entity_locked_against_update(existing_entity_dict) ############################################################################################## ## Property Level Validators @@ -74,7 +97,6 @@ def validate_recognized_dataset_type(property_key, normalized_entity_type, reque raise ValueError(f"Proposed Dataset dataset_type '{proposed_dataset_type_prefix}'" f" is not recognized in the existing ontology." f" Valid values are: {str(target_list)}.") - """ Validate the specified value for an Upload's intended_dataset_type is in the valueset UBKG recognizes. @@ -103,7 +125,6 @@ def validate_intended_dataset_type(property_key, normalized_entity_type, request f" is not recognized in the existing ontology." f" Valid values are: {str(target_list)}.") - """ Validate the target list has no duplicated items @@ -126,7 +147,6 @@ def validate_no_duplicates_in_list(property_key, normalized_entity_type, request if len(set(target_list)) != len(target_list): raise ValueError(f"The {property_key} field must only contain unique items") - """ Validate that a given dataset is not a component of a multi-assay split parent dataset fore allowing status to be updated. If a component dataset needs to be updated, update it via its parent multi-assay dataset @@ -144,8 +164,6 @@ def validate_no_duplicates_in_list(property_key, normalized_entity_type, request new_data_dict : dict The json data in request body, already after the regular validations """ - - def validate_dataset_not_component(property_key, normalized_entity_type, request, existing_data_dict, new_data_dict): headers = request.headers if not headers.get(SchemaConstants.INTERNAL_TRIGGER) == SchemaConstants.COMPONENT_DATASET: @@ -157,7 +175,6 @@ def validate_dataset_not_component(property_key, normalized_entity_type, request f" {existing_data_dict['uuid']}. Can not change status on component datasets directly. Status" f"change must occur on parent multi-assay split dataset") - """ If the provided previous revision is already a revision of another dataset, disallow """ @@ -169,7 +186,6 @@ def validate_if_revision_is_unique(property_key, normalized_entity_type, request raise ValueError(f"Dataset marked as previous revision is already the previous revision of another dataset. " f"Each dataset may only be the previous revision of one other dataset") - """ If an entity has a DOI, do not allow it to be updated """ @@ -542,6 +558,63 @@ def validate_upload_status_value(property_key, normalized_entity_type, request, if new_status not in accepted_status_values: raise ValueError(f"Invalid status value: {new_status}") +""" +Validate the anticipated_complete_data string provided for an Upload + +Parameters +---------- +property_key : str + The target property key +normalized_type : str + Submission +request: Flask request object + The instance of Flask request passed in from application request +existing_data_dict : dict + A dictionary that contains all existing entity properties +new_data_dict : dict + The json data in request body, already after the regular validations +""" +def validate_anticipated_complete_date(property_key, normalized_entity_type, request, existing_data_dict, new_data_dict): + MAX_ANTICIPATED_COMPLETE_DATE = '2026-12' + anticipated_complete_date_str = new_data_dict[property_key] + if not re.fullmatch(pattern=r'^\d{4}-\d{2}$', string=anticipated_complete_date_str): + raise ValueError(f"Format of '{anticipated_complete_date_str}' does not match the format YYYY-MM") + anticipated_year, anticipated_month = map(int, anticipated_complete_date_str.split("-")) + if anticipated_month < 1 or anticipated_month > 12: + raise ValueError(f"Anticipated completion month of '{anticipated_complete_date_str[5:]}' is not valid") + now = datetime.now() + current_year = now.year + current_month = now.month + if anticipated_year < current_year or \ + (anticipated_year == current_year and anticipated_month < current_month): + raise ValueError( f"Anticipated complete date '{anticipated_complete_date_str}'" + f" cannot be before the current month.") + max_anticipated_year, max_anticipated_month = map(int, MAX_ANTICIPATED_COMPLETE_DATE.split("-")) + if anticipated_year > max_anticipated_year: + raise ValueError( f"Anticipated complete date '{anticipated_complete_date_str}'" + f" cannot be after '{MAX_ANTICIPATED_COMPLETE_DATE}'.") + +""" +Validate the anticipated_dataset_count integer provided for an Upload + +Parameters +---------- +property_key : str + The target property key +normalized_type : str + Submission +request: Flask request object + The instance of Flask request passed in from application request +existing_data_dict : dict + A dictionary that contains all existing entity properties +new_data_dict : dict + The json data in request body, already after the regular validations +""" +def validate_anticipated_dataset_count(property_key, normalized_entity_type, request, existing_data_dict, new_data_dict): + # anticipated_dataset_count of type int, assured by provenance_schema.yaml "type: integer" + anticipated_dataset_count = new_data_dict[property_key] + if anticipated_dataset_count <= 0: + raise ValueError(f"{property_key} must be positive integer when specified.") """ Validate the provided value of Sample.sample_category on create via POST and update via PUT @@ -564,8 +637,14 @@ def validate_sample_category(property_key, normalized_entity_type, request, exis sample_category = new_data_dict[property_key].lower() if sample_category not in defined_tissue_types: - raise ValueError(f"Invalid sample_category: {sample_category}") + raise ValueError(f"Invalid sample_category: {sample_category}." + f" Should be one of {', '.join(defined_tissue_types)}.") + # Given the sample_category is a defined_tissue_types element, assure the request has + # the proper case for storage + if new_data_dict[property_key] != sample_category: + raise ValueError(f"The case of sample_category '{new_data_dict[property_key]}'" + f" must be specified as '{sample_category}'.") """ Validate the provided value of Publication.publication_date is in the correct format against ISO 8601 Format: @@ -827,9 +906,39 @@ def _validate_application_header(applications_allowed, request_headers): if not app_header: msg = f"Unable to proceed due to missing {SchemaConstants.HUBMAP_APP_HEADER} header from request" - raise schema_errors.MissingApplicationHeaderException(msg) + raise MissingApplicationHeaderException(msg) # Use lowercase for comparing the application header value against the yaml if app_header.lower() not in applications_allowed: msg = f"Unable to proceed due to invalid {SchemaConstants.HUBMAP_APP_HEADER} header value: {app_header}" - raise schema_errors.InvalidApplicationHeaderException(msg) + raise InvalidApplicationHeaderException(msg) + +""" +Indicate if the entity meets a criteria to lock out modification updates + +Parameters +---------- +request_headers: Flask request.headers object, behaves like a dict + The instance of Flask request.headers passed in from application request +""" +def _is_entity_locked_against_update(existing_entity_dict): + entity_type = existing_entity_dict['entity_type'] + if entity_type in ['Publication','Dataset']: + if 'status' in existing_entity_dict and existing_entity_dict['status'] == 'Published': + raise schema_errors.LockedEntityUpdateException(f"{entity_type} cannot be modified, due to" + f" status={existing_entity_dict['status']}.") + elif entity_type in ['Donor','Sample']: + if 'data_access_level' in existing_entity_dict and existing_entity_dict['data_access_level'] == 'public': + raise schema_errors.LockedEntityUpdateException(f"{entity_type} cannot be modified, due to" + f" data_access_level={existing_entity_dict['data_access_level']}.") + elif entity_type in ['Collection','Epicollection']: + if 'doi_url' in existing_entity_dict and existing_entity_dict['doi_url']: + raise schema_errors.LockedEntityUpdateException(f"{entity_type} cannot be modified, due to" + f" doi_url={existing_entity_dict['doi_url']}.") + # Probably never get here, since doi_url and registered_doi must be set as a pair. + if 'registered_doi' in existing_entity_dict and existing_entity_dict['registered_doi']: + raise schema_errors.LockedEntityUpdateException(f"{entity_type} cannot be modified, due to" + f" registered_doi={existing_entity_dict['registered_doi']}.") + else: + entity_uuid = existing_entity_dict['uuid'] + raise schema_errors.LockedEntityUpdateException(f'Unable to check if {entity_type} for {entity_uuid} is locked!') diff --git a/src/schema_templating/example-yaml-templates/upload-schema.yaml b/src/schema_templating/example-yaml-templates/upload-schema.yaml index 9f05e9f1..45a24b44 100644 --- a/src/schema_templating/example-yaml-templates/upload-schema.yaml +++ b/src/schema_templating/example-yaml-templates/upload-schema.yaml @@ -81,4 +81,10 @@ Upload: items: $ref: '#/components/schemas/Dataset' readOnly: true - description: "The datasets that are contained in this Upload." \ No newline at end of file + description: "The datasets that are contained in this Upload." + anticipated_complete_upload_month: + type: string + description: The month that the Upload is anticipated to have all required data uploaded, in the format YYYY-MM. + anticipated_dataset_count: + type: integer + description: The total number of datasets that this Upload will eventually contain.