@@ -1201,9 +1201,7 @@ def upsert_data(self, files, execution_id, **kwargs):
12011201 exec_obj = orchestrator .get_execution_object (execution_id )
12021202
12031203 # getting the related model schema for the resource
1204- original_resource = ResourceBase .objects .filter (pk = exec_obj .input_params .get ("resource_pk" )).first ()
1205- self .real_instance = original_resource .get_real_instance ()
1206- model = ModelSchema .objects .filter (name = original_resource .alternate .split (":" )[- 1 ]).first ()
1204+ original_resource , model = self .___get_dynamic_schema (exec_obj )
12071205 if not model :
12081206 raise UpsertException (
12091207 "This dataset does't support updates. Please upload the dataset again to have the upsert operations enabled"
@@ -1244,6 +1242,12 @@ def upsert_data(self, files, execution_id, **kwargs):
12441242 "layer_name" : original_resource .title ,
12451243 }
12461244
1245+ def ___get_dynamic_schema (self , exec_obj ):
1246+ original_resource = ResourceBase .objects .filter (pk = exec_obj .input_params .get ("resource_pk" )).first ()
1247+ self .real_instance = original_resource .get_real_instance ()
1248+ model = ModelSchema .objects .filter (name = original_resource .alternate .split (":" )[- 1 ]).first ()
1249+ return original_resource , model
1250+
12471251 def _commit_upsert (self , model_obj , OriginalResource , upsert_key , layer_iterator ):
12481252 valid_create = 0
12491253 valid_update = 0
@@ -1466,6 +1470,15 @@ def refresh_geonode_resource(self, execution_id, asset=None, dataset=None, creat
14661470 self .__fixup_primary_key (dataset )
14671471 return dataset
14681472
1473+ def fixup_dynamic_model_fields (self , _exec , files ):
1474+ """
1475+ Utility needed during the replace workflow,
1476+ it will sync all the FieldSchema along with the current resource uploaded.
1477+ This is mandatory in order to have a reliable field structure in the DB
1478+ """
1479+ fields_schema , needed_field_schema = self .__get_new_and_original_schema (files , str (_exec .exec_id ))
1480+ fields_schema .filter (~ Q (name__in = (x ["name" ] for x in needed_field_schema ))).delete ()
1481+
14691482
14701483@importer_app .task (
14711484 base = ErrorBaseTaskClass ,
0 commit comments