Skip to content

Commit 39326a7

Browse files
committed
Don't run after_create and after_update triggers if not specified
1 parent 16535f6 commit 39326a7

File tree

3 files changed

+52
-48
lines changed

3 files changed

+52
-48
lines changed

src/app.py

Lines changed: 25 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -1204,7 +1204,7 @@ def create_entity(entity_type):
12041204
# For Dataset: link to direct ancestors
12051205
# For Collection: link to member Datasets
12061206
# For Upload: link to parent Lab node
1207-
after_create(normalized_entity_type, request, user_token, merged_dict)
1207+
after_create(normalized_entity_type, request, user_token, merged_dict, json_data_dict)
12081208

12091209
# By default we'll return all the properties but skip these time-consuming ones
12101210
# Donor doesn't need to skip any
@@ -1451,12 +1451,12 @@ def update_entity(id):
14511451
if direct_ancestor_dict['entity_type'] not in ['Donor', 'Sample']:
14521452
bad_request_error(f"The uuid: {direct_ancestor_uuid} is not a Donor neither a Sample, cannot be used as the direct ancestor of this Sample")
14531453

1454-
# Generate 'before_update_triiger' data and update the entity details in Neo4j
1454+
# Generate 'before_update_trigger' data and update the entity details in Neo4j
14551455
merged_updated_dict = update_entity_details(request, normalized_entity_type, user_token, json_data_dict, entity_dict)
14561456

14571457
# Handle linkages update via `after_update_trigger` methods
14581458
if has_direct_ancestor_uuid:
1459-
after_update(normalized_entity_type, request, user_token, merged_updated_dict)
1459+
after_update(normalized_entity_type, request, user_token, merged_updated_dict, json_data_dict)
14601460
# 2/17/23 - Adding direct ancestor checks to publication as well as dataset.
14611461
elif normalized_entity_type in ['Dataset', 'Publication']:
14621462
# A bit more validation if `direct_ancestor_uuids` provided
@@ -1482,10 +1482,15 @@ def update_entity(id):
14821482

14831483
# Generate 'before_update_trigger' data and update the entity details in Neo4j
14841484
merged_updated_dict = update_entity_details(request, normalized_entity_type, user_token, json_data_dict, entity_dict)
1485+
1486+
print("==========DEBUG Begin")
1487+
print(merged_updated_dict)
1488+
print("==========DEBUG End")
1489+
14851490

14861491
# Handle linkages update via `after_update_trigger` methods
14871492
if has_direct_ancestor_uuids or has_associated_collection_uuid or has_updated_status:
1488-
after_update(normalized_entity_type, request, user_token, merged_updated_dict)
1493+
after_update(normalized_entity_type, request, user_token, merged_updated_dict, json_data_dict)
14891494
elif normalized_entity_type == 'Upload':
14901495
has_dataset_uuids_to_link = False
14911496
if ('dataset_uuids_to_link' in json_data_dict) and (json_data_dict['dataset_uuids_to_link']):
@@ -1500,13 +1505,13 @@ def update_entity(id):
15001505

15011506
# Handle linkages update via `after_update_trigger` methods
15021507
if has_dataset_uuids_to_link or has_dataset_uuids_to_unlink or has_updated_status:
1503-
after_update(normalized_entity_type, request, user_token, merged_updated_dict)
1508+
after_update(normalized_entity_type, request, user_token, merged_updated_dict, json_data_dict)
15041509
elif schema_manager.entity_type_instanceof(normalized_entity_type, 'Collection'):
15051510
# Generate 'before_update_trigger' data and update the entity details in Neo4j
15061511
merged_updated_dict = update_entity_details(request, normalized_entity_type, user_token, json_data_dict, entity_dict)
15071512

15081513
# Handle linkages update via `after_update_trigger` methods
1509-
after_update(normalized_entity_type, request, user_token, merged_updated_dict)
1514+
after_update(normalized_entity_type, request, user_token, merged_updated_dict, json_data_dict)
15101515
else:
15111516
# Generate 'before_update_trigger' data and update the entity details in Neo4j
15121517
merged_updated_dict = update_entity_details(request, normalized_entity_type, user_token, json_data_dict, entity_dict)
@@ -4580,7 +4585,7 @@ def _get_dataset_associated_metadata(dataset_dict, dataset_visibility, valid_use
45804585

45814586

45824587
"""
4583-
Generate 'before_create_triiger' data and create the entity details in Neo4j
4588+
Generate 'before_create_trigger' data and create the entity details in Neo4j
45844589
45854590
Parameters
45864591
----------
@@ -5000,7 +5005,7 @@ def create_multiple_component_details(request, normalized_entity_type, user_toke
50005005

50015006

50025007
"""
5003-
Execute 'after_create_triiger' methods
5008+
Execute 'after_create_trigger' methods
50045009
50055010
Parameters
50065011
----------
@@ -5013,8 +5018,10 @@ def create_multiple_component_details(request, normalized_entity_type, user_toke
50135018
merged_data_dict: dict
50145019
The merged dict that contains the entity dict newly created and
50155020
information from user request json that are not stored in Neo4j
5021+
json_data_dict: dict
5022+
The json request dict
50165023
"""
5017-
def after_create(normalized_entity_type, request, user_token, merged_data_dict):
5024+
def after_create(normalized_entity_type, request, user_token, merged_data_dict, json_data_dict):
50185025
try:
50195026
# 'after_create_trigger' and 'after_update_trigger' don't generate property values
50205027
# It just returns the empty dict, no need to assign value
@@ -5024,7 +5031,7 @@ def after_create(normalized_entity_type, request, user_token, merged_data_dict):
50245031
, request=request
50255032
, user_token=user_token
50265033
, existing_data_dict=merged_data_dict
5027-
, new_data_dict={})
5034+
, new_data_dict=json_data_dict)
50285035
except schema_errors.AfterCreateTriggerException:
50295036
# Log the full stack trace, prepend a line with our message
50305037
msg = "The entity has been created, but failed to execute one of the 'after_create_trigger' methods"
@@ -5036,7 +5043,7 @@ def after_create(normalized_entity_type, request, user_token, merged_data_dict):
50365043

50375044

50385045
"""
5039-
Generate 'before_create_triiger' data and create the entity details in Neo4j
5046+
Generate 'before_create_trigger' data and create the entity details in Neo4j
50405047
50415048
Parameters
50425049
----------
@@ -5133,20 +5140,19 @@ def update_entity_details(request, normalized_entity_type, user_token, json_data
51335140
The instance of Flask request passed in from application request
51345141
user_token: str
51355142
The user's globus groups token
5136-
entity_dict: dict
5137-
The entity dict newly updated
5143+
merged_updated_dict: dict
5144+
The merged entity dict containing newly updated values and existing values
5145+
json_data_dict: dict
5146+
The data dict containing new values
51385147
"""
5139-
def after_update(normalized_entity_type, request, user_token, entity_dict):
5148+
def after_update(normalized_entity_type, request, user_token, merged_updated_dict, json_data_dict):
51405149
try:
5141-
# 'after_create_trigger' and 'after_update_trigger' don't generate property values
5142-
# It just returns the empty dict, no need to assign value
5143-
# Use {} sicne no new dict
51445150
schema_manager.generate_triggered_data( trigger_type=TriggerTypeEnum.AFTER_UPDATE
51455151
, normalized_class=normalized_entity_type
51465152
, request=request
51475153
, user_token=user_token
5148-
, existing_data_dict=entity_dict
5149-
, new_data_dict={})
5154+
, existing_data_dict=merged_updated_dict
5155+
, new_data_dict=json_data_dict)
51505156
except schema_errors.AfterUpdateTriggerException:
51515157
# Log the full stack trace, prepend a line with our message
51525158
msg = "The entity information has been updated, but failed to execute one of the 'after_update_trigger' methods"

src/schema/schema_manager.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -410,8 +410,7 @@ def generate_triggered_data(trigger_type: TriggerTypeEnum, normalized_class, req
410410
if trigger_type in [TriggerTypeEnum.AFTER_CREATE, TriggerTypeEnum.AFTER_UPDATE]:
411411
# Only call the triggers if the propery key presents from the incoming data
412412
# E.g., 'direct_ancestor_uuid' for Sample, 'dataset_uuids' for Collection
413-
# This `existing_data_dict` is the newly created or updated entity dict
414-
if key in existing_data_dict:
413+
if key in new_data_dict:
415414
trigger_method_name = properties[key][trigger_type.value]
416415

417416
try:
@@ -423,8 +422,7 @@ def generate_triggered_data(trigger_type: TriggerTypeEnum, normalized_class, req
423422
# No return values for 'after_create_trigger' and 'after_update_trigger'
424423
# because the property value is already set and stored in neo4j
425424
# Normally it's building linkages between entity nodes
426-
# Use {} since no incoming new_data_dict
427-
trigger_method_to_call(key, normalized_class, request, user_token, existing_data_dict, {})
425+
trigger_method_to_call(key, normalized_class, request, user_token, existing_data_dict, new_data_dict)
428426
except Exception:
429427
msg = f"Failed to call the {trigger_type.value} method: {trigger_method_name}"
430428
# Log the full stack trace, prepend a line with our message

src/schema/schema_triggers.py

Lines changed: 25 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -848,11 +848,11 @@ def link_dataset_to_direct_ancestors(property_key, normalized_type, request, use
848848
if 'uuid' not in existing_data_dict:
849849
raise KeyError("Missing 'uuid' key in 'existing_data_dict' during calling 'link_dataset_to_direct_ancestors()' trigger method.")
850850

851-
if 'direct_ancestor_uuids' not in existing_data_dict:
852-
raise KeyError("Missing 'direct_ancestor_uuids' key in 'existing_data_dict' during calling 'link_dataset_to_direct_ancestors()' trigger method.")
851+
if 'direct_ancestor_uuids' not in new_data_dict:
852+
raise KeyError("Missing 'direct_ancestor_uuids' key in 'new_data_dict' during calling 'link_dataset_to_direct_ancestors()' trigger method.")
853853

854854
dataset_uuid = existing_data_dict['uuid']
855-
direct_ancestor_uuids = existing_data_dict['direct_ancestor_uuids']
855+
direct_ancestor_uuids = new_data_dict['direct_ancestor_uuids']
856856

857857
# Generate property values for Activity node
858858
activity_data_dict = schema_manager.generate_activity_data(normalized_type, request, user_token, existing_data_dict)
@@ -891,11 +891,11 @@ def link_collection_to_datasets(property_key, normalized_type, request, user_tok
891891
if 'uuid' not in existing_data_dict:
892892
raise KeyError("Missing 'uuid' key in 'existing_data_dict' during calling 'link_collection_to_datasets()' trigger method.")
893893

894-
if 'dataset_uuids' not in existing_data_dict:
895-
raise KeyError("Missing 'dataset_uuids' key in 'existing_data_dict' during calling 'link_collection_to_datasets()' trigger method.")
894+
if 'dataset_uuids' not in new_data_dict:
895+
raise KeyError("Missing 'dataset_uuids' key in 'new_data_dict' during calling 'link_collection_to_datasets()' trigger method.")
896896

897897
collection_uuid = existing_data_dict['uuid']
898-
dataset_uuids = existing_data_dict['dataset_uuids']
898+
dataset_uuids = new_data_dict['dataset_uuids']
899899

900900
try:
901901
# Create a linkage (without an Activity node) between the Collection node and each Dataset it contains.
@@ -1022,14 +1022,14 @@ def link_to_previous_revision(property_key, normalized_type, request, user_token
10221022
if 'uuid' not in existing_data_dict:
10231023
raise KeyError("Missing 'uuid' key in 'existing_data_dict' during calling 'link_to_previous_revision()' trigger method.")
10241024

1025-
if 'previous_revision_uuid' not in existing_data_dict:
1026-
raise KeyError("Missing 'previous_revision_uuid' key in 'existing_data_dict' during calling 'link_to_previous_revision()' trigger method.")
1025+
if 'previous_revision_uuid' not in new_data_dict:
1026+
raise KeyError("Missing 'previous_revision_uuid' key in 'new_data_dict' during calling 'link_to_previous_revision()' trigger method.")
10271027

10281028
entity_uuid = existing_data_dict['uuid']
1029-
if isinstance(existing_data_dict['previous_revision_uuid'], list):
1030-
previous_uuid = existing_data_dict['previous_revision_uuid']
1029+
if isinstance(new_data_dict['previous_revision_uuid'], list):
1030+
previous_uuid = new_data_dict['previous_revision_uuid']
10311031
else:
1032-
previous_uuid = [existing_data_dict['previous_revision_uuid']]
1032+
previous_uuid = [new_data_dict['previous_revision_uuid']]
10331033

10341034
# Create a revision reltionship from this new Dataset node and its previous revision of dataset node in neo4j
10351035
try:
@@ -1743,8 +1743,6 @@ def delete_thumbnail_file(property_key, normalized_type, request, user_token, ex
17431743
A merged dictionary that contains all possible input data to be used
17441744
"""
17451745
def update_status(property_key, normalized_type, request, user_token, existing_data_dict, new_data_dict):
1746-
set_status_history(property_key, normalized_type, request, user_token, existing_data_dict, new_data_dict)
1747-
17481746
if 'uuid' not in existing_data_dict:
17491747
raise KeyError("Missing 'uuid' key in 'existing_data_dict' during calling 'update_status()' trigger method.")
17501748
uuid = existing_data_dict['uuid']
@@ -1753,6 +1751,8 @@ def update_status(property_key, normalized_type, request, user_token, existing_d
17531751
raise KeyError("Missing 'status' key in 'existing_data_dict' during calling 'update_status()' trigger method.")
17541752
status = existing_data_dict['status']
17551753

1754+
set_status_history(property_key, normalized_type, request, user_token, existing_data_dict, new_data_dict)
1755+
17561756
# Only apply to non-published parent datasets
17571757
if status.lower() != 'published':
17581758
# Only sync the child component datasets status for Multi-Assay Split
@@ -1946,14 +1946,14 @@ def link_sample_to_direct_ancestor(property_key, normalized_type, request, user_
19461946
if 'uuid' not in existing_data_dict:
19471947
raise KeyError("Missing 'uuid' key in 'existing_data_dict' during calling 'link_sample_to_direct_ancestor()' trigger method.")
19481948

1949-
if 'direct_ancestor_uuid' not in existing_data_dict:
1950-
raise KeyError("Missing 'direct_ancestor_uuid' key in 'existing_data_dict' during calling 'link_sample_to_direct_ancestor()' trigger method.")
1949+
if 'direct_ancestor_uuid' not in new_data_dict:
1950+
raise KeyError("Missing 'direct_ancestor_uuid' key in 'new_data_dict' during calling 'link_sample_to_direct_ancestor()' trigger method.")
19511951

19521952
sample_uuid = existing_data_dict['uuid']
19531953

19541954
# Build a list of direct ancestor uuids
19551955
# Only one uuid in the list in this case
1956-
direct_ancestor_uuids = [existing_data_dict['direct_ancestor_uuid']]
1956+
direct_ancestor_uuids = [new_data_dict['direct_ancestor_uuid']]
19571957

19581958
# Generate property values for Activity node
19591959
activity_data_dict = schema_manager.generate_activity_data(normalized_type, request, user_token, existing_data_dict)
@@ -1992,10 +1992,10 @@ def link_publication_to_associated_collection(property_key, normalized_type, req
19921992
if 'uuid' not in existing_data_dict:
19931993
raise KeyError("Missing 'uuid' key in 'existing_data_dict' during calling 'link_publication_to_associated_collection()' trigger method.")
19941994

1995-
if 'associated_collection_uuid' not in existing_data_dict:
1996-
raise KeyError("Missing 'associated_collection_uuid' key in 'existing_data_dict' during calling 'link_publication_to_associated_collection()' trigger method.")
1995+
if 'associated_collection_uuid' not in new_data_dict:
1996+
raise KeyError("Missing 'associated_collection_uuid' key in 'new_data_dict' during calling 'link_publication_to_associated_collection()' trigger method.")
19971997

1998-
associated_collection_uuid = existing_data_dict['associated_collection_uuid']
1998+
associated_collection_uuid = new_data_dict['associated_collection_uuid']
19991999

20002000
# No activity node. We are creating a direct link to the associated collection
20012001

@@ -2203,11 +2203,11 @@ def link_datasets_to_upload(property_key, normalized_type, request, user_token,
22032203
if 'uuid' not in existing_data_dict:
22042204
raise KeyError("Missing 'uuid' key in 'existing_data_dict' during calling 'link_datasets_to_upload()' trigger method.")
22052205

2206-
if 'dataset_uuids_to_link' not in existing_data_dict:
2207-
raise KeyError("Missing 'dataset_uuids_to_link' key in 'existing_data_dict' during calling 'link_datasets_to_upload()' trigger method.")
2206+
if 'dataset_uuids_to_link' not in new_data_dict:
2207+
raise KeyError("Missing 'dataset_uuids_to_link' key in 'new_data_dict' during calling 'link_datasets_to_upload()' trigger method.")
22082208

22092209
upload_uuid = existing_data_dict['uuid']
2210-
dataset_uuids = existing_data_dict['dataset_uuids_to_link']
2210+
dataset_uuids = new_data_dict['dataset_uuids_to_link']
22112211

22122212
try:
22132213
# Create a direct linkage (Dataset) - [:IN_UPLOAD] -> (Submission) for each dataset
@@ -2244,11 +2244,11 @@ def unlink_datasets_from_upload(property_key, normalized_type, request, user_tok
22442244
if 'uuid' not in existing_data_dict:
22452245
raise KeyError("Missing 'uuid' key in 'existing_data_dict' during calling 'unlink_datasets_from_upload()' trigger method.")
22462246

2247-
if 'dataset_uuids_to_unlink' not in existing_data_dict:
2248-
raise KeyError("Missing 'dataset_uuids_to_unlink' key in 'existing_data_dict' during calling 'unlink_datasets_from_upload()' trigger method.")
2247+
if 'dataset_uuids_to_unlink' not in new_data_dict:
2248+
raise KeyError("Missing 'dataset_uuids_to_unlink' key in 'new_data_dict' during calling 'unlink_datasets_from_upload()' trigger method.")
22492249

22502250
upload_uuid = existing_data_dict['uuid']
2251-
dataset_uuids = existing_data_dict['dataset_uuids_to_unlink']
2251+
dataset_uuids = new_data_dict['dataset_uuids_to_unlink']
22522252

22532253
try:
22542254
# Delete the linkage (Dataset) - [:IN_UPLOAD] -> (Upload) for each dataset

0 commit comments

Comments
 (0)