Skip to content

Commit dcb6e34

Browse files
Debug and move function to file size
1 parent 36f6365 commit dcb6e34

File tree

1 file changed

+79
-133
lines changed

1 file changed

+79
-133
lines changed

tasks/task_runners.py

Lines changed: 79 additions & 133 deletions
Original file line numberDiff line numberDiff line change
@@ -208,6 +208,19 @@ def stop_task(name):
208208
task.finished_at = timezone.now()
209209
task.save()
210210

211+
def write_file_size(response):
212+
if response:
213+
LOG.debug(response)
214+
for item in response:
215+
if item:
216+
config = configparser.ConfigParser()
217+
config["FileInfo"] = {"FileSize": str(item["zip_file_size_bytes"])}
218+
size_path = join(
219+
download_dir, f"{item['download_url'].split('/')[-1]}_size.ini"
220+
)
221+
with open([size_path], "w") as configfile:
222+
config.write(configfile)
223+
211224
def finish_task(name, created_files=None, response_back=None, planet_file=False):
212225
LOG.debug("Task Finish: {0} for run: {1}".format(name, run_uid))
213226
task = ExportTask.objects.get(run__uid=run_uid, name=name)
@@ -437,18 +450,14 @@ def add_metadata(z, theme):
437450

438451
if geojson:
439452
try:
440-
LOG.debug("Galaxy fetch started geojson for run: {0}".format(run_uid))
453+
LOG.debug(
454+
"Raw Data API fetch started geojson for run: {0}".format(run_uid)
455+
)
441456
response_back = geojson.fetch("geojson", is_hdx_export=True)
442-
for r in response_back:
443-
config = configparser.ConfigParser()
444-
config["FileInfo"] = {"FileSize": str(r["zip_file_size_bytes"])}
445-
size_path = join(
446-
download_dir, f"{r['download_url'].split('/')[-1]}_size.ini"
447-
)
448-
with open(size_path, "w") as configfile:
449-
config.write(configfile)
450-
451-
LOG.debug("Galaxy fetch ended for geojson run: {0}".format(run_uid))
457+
write_file_size(response_back)
458+
LOG.debug(
459+
"Raw Data API fetch ended for geojson run: {0}".format(run_uid)
460+
)
452461
finish_task("geojson", response_back=response_back)
453462
all_zips += response_back
454463
except Exception as ex:
@@ -457,7 +466,7 @@ def add_metadata(z, theme):
457466

458467
if csv:
459468
try:
460-
LOG.debug("Galaxy fetch started for csv run: {0}".format(run_uid))
469+
LOG.debug("Raw Data API fetch started for csv run: {0}".format(run_uid))
461470
response_back = csv.fetch("csv", is_hdx_export=True)
462471
for r in response_back:
463472
config = configparser.ConfigParser()
@@ -468,7 +477,7 @@ def add_metadata(z, theme):
468477
with open(size_path, "w") as configfile:
469478
config.write(configfile)
470479

471-
LOG.debug("Galaxy fetch ended for csv run: {0}".format(run_uid))
480+
LOG.debug("Raw Data API fetch ended for csv run: {0}".format(run_uid))
472481
finish_task("csv", response_back=response_back)
473482
all_zips += response_back
474483

@@ -480,20 +489,16 @@ def add_metadata(z, theme):
480489
try:
481490
if settings.USE_RAW_DATA_API_FOR_HDX:
482491
LOG.debug(
483-
"Galaxy fetch started for geopackage run: {0}".format(run_uid)
492+
"Raw Data API fetch started for geopackage run: {0}".format(
493+
run_uid
494+
)
484495
)
485496
response_back = geopackage.fetch("gpkg", is_hdx_export=True)
486-
for r in response_back:
487-
config = configparser.ConfigParser()
488-
config["FileInfo"] = {"FileSize": str(r["zip_file_size_bytes"])}
489-
size_path = join(
490-
download_dir, f"{r['download_url'].split('/')[-1]}_size.ini"
491-
)
492-
with open(size_path, "w") as configfile:
493-
config.write(configfile)
494-
497+
write_file_size(response_back)
495498
LOG.debug(
496-
"Galaxy fetch ended for geopackage run: {0}".format(run_uid)
499+
"Raw Data API fetch ended for geopackage run: {0}".format(
500+
run_uid
501+
)
497502
)
498503
finish_task("geopackage", response_back=response_back)
499504
all_zips += response_back
@@ -532,19 +537,15 @@ def add_metadata(z, theme):
532537
if shp:
533538
try:
534539
if settings.USE_RAW_DATA_API_FOR_HDX:
535-
LOG.debug("Galaxy fetch started for shp run: {0}".format(run_uid))
540+
LOG.debug(
541+
"Raw Data API fetch started for shp run: {0}".format(run_uid)
542+
)
536543

537544
response_back = shp.fetch("shp", is_hdx_export=True)
538-
for r in response_back:
539-
config = configparser.ConfigParser()
540-
config["FileInfo"] = {"FileSize": str(r["zip_file_size_bytes"])}
541-
size_path = join(
542-
download_dir, f"{r['download_url'].split('/')[-1]}_size.ini"
543-
)
544-
with open(size_path, "w") as configfile:
545-
config.write(configfile)
546-
547-
LOG.debug("Galaxy fetch ended for shp run: {0}".format(run_uid))
545+
write_file_size(response_back)
546+
LOG.debug(
547+
"Raw Data API fetch ended for shp run: {0}".format(run_uid)
548+
)
548549
finish_task("shp", response_back=response_back)
549550
all_zips += response_back
550551
else:
@@ -582,18 +583,14 @@ def add_metadata(z, theme):
582583
if kml:
583584
try:
584585
if settings.USE_RAW_DATA_API_FOR_HDX:
585-
LOG.debug("Galaxy fetch started for kml run: {0}".format(run_uid))
586+
LOG.debug(
587+
"Raw Data API fetch started for kml run: {0}".format(run_uid)
588+
)
586589
response_back = kml.fetch("kml", is_hdx_export=True)
587-
for r in response_back:
588-
config = configparser.ConfigParser()
589-
config["FileInfo"] = {"FileSize": str(r["zip_file_size_bytes"])}
590-
size_path = join(
591-
download_dir, f"{r['download_url'].split('/')[-1]}_size.ini"
592-
)
593-
with open(size_path, "w") as configfile:
594-
config.write(configfile)
595-
596-
LOG.debug("Galaxy fetch ended for kml run: {0}".format(run_uid))
590+
write_file_size(response_back)
591+
LOG.debug(
592+
"Raw Data API fetch ended for kml run: {0}".format(run_uid)
593+
)
597594
finish_task("kml", response_back=response_back)
598595
all_zips += response_back
599596

@@ -789,95 +786,68 @@ def add_metadata(z, theme):
789786

790787
if geojson:
791788
try:
792-
LOG.debug("Galaxy fetch started for geojson run: {0}".format(run_uid))
789+
LOG.debug(
790+
"Raw Data API fetch started for geojson run: {0}".format(run_uid)
791+
)
793792
all_feature_filter_json = join(
794793
os.getcwd(), "tasks/tests/fixtures/all_features_filters.json"
795794
)
796795
response_back = geojson.fetch(
797796
"geojson", all_feature_filter_json=all_feature_filter_json
798797
)
799-
for r in response_back:
800-
config = configparser.ConfigParser()
801-
config["FileInfo"] = {"FileSize": str(r["zip_file_size_bytes"])}
802-
size_path = join(
803-
download_dir, f"{r['download_url'].split('/')[-1]}_size.ini"
804-
)
805-
with open(size_path, "w") as configfile:
806-
config.write(configfile)
798+
write_file_size(response_back)
807799

808-
LOG.debug("Galaxy fetch ended for geojson run: {0}".format(run_uid))
800+
LOG.debug(
801+
"Raw Data API fetch ended for geojson run: {0}".format(run_uid)
802+
)
809803
finish_task("geojson", response_back=response_back)
810804
except Exception as ex:
811805
stop_task("geojson")
812806
raise ex
813807

814808
if fgb:
815809
try:
816-
LOG.debug("Galaxy fetch started for fgb run: {0}".format(run_uid))
810+
LOG.debug("Raw Data API fetch started for fgb run: {0}".format(run_uid))
817811
all_feature_filter_json = join(
818812
os.getcwd(), "tasks/tests/fixtures/all_features_filters.json"
819813
)
820814
response_back = fgb.fetch(
821815
"fgb", all_feature_filter_json=all_feature_filter_json
822816
)
823-
for r in response_back:
824-
config = configparser.ConfigParser()
825-
config["FileInfo"] = {"FileSize": str(r["zip_file_size_bytes"])}
826-
size_path = join(
827-
download_dir, f"{r['download_url'].split('/')[-1]}_size.ini"
828-
)
829-
with open(size_path, "w") as configfile:
830-
config.write(configfile)
831-
832-
LOG.debug("Galaxy fetch ended for fgb run: {0}".format(run_uid))
817+
write_file_size(response_back)
818+
LOG.debug("Raw Data API fetch ended for fgb run: {0}".format(run_uid))
833819
finish_task("fgb", response_back=response_back)
834820
except Exception as ex:
835821
stop_task("fgb")
836822
raise ex
837823

838824
if csv:
839825
try:
840-
LOG.debug("Galaxy fetch started for csv run: {0}".format(run_uid))
826+
LOG.debug("Raw Data API fetch started for csv run: {0}".format(run_uid))
841827
all_feature_filter_json = join(
842828
os.getcwd(), "tasks/tests/fixtures/all_features_filters.json"
843829
)
844830
response_back = csv.fetch(
845831
"csv", all_feature_filter_json=all_feature_filter_json
846832
)
847-
for r in response_back:
848-
config = configparser.ConfigParser()
849-
config["FileInfo"] = {"FileSize": str(r["zip_file_size_bytes"])}
850-
size_path = join(
851-
download_dir, f"{r['download_url'].split('/')[-1]}_size.ini"
852-
)
853-
with open(size_path, "w") as configfile:
854-
config.write(configfile)
855-
856-
LOG.debug("Galaxy fetch ended for csv run: {0}".format(run_uid))
833+
write_file_size(response_back)
834+
LOG.debug("Raw Data API fetch ended for csv run: {0}".format(run_uid))
857835
finish_task("csv", response_back=response_back)
858836
except Exception as ex:
859837
stop_task("csv")
860838
raise ex
861839

862840
if sql:
863841
try:
864-
LOG.debug("Galaxy fetch started for sql run: {0}".format(run_uid))
842+
LOG.debug("Raw Data API fetch started for sql run: {0}".format(run_uid))
865843
all_feature_filter_json = join(
866844
os.getcwd(), "tasks/tests/fixtures/all_features_filters.json"
867845
)
868846
response_back = sql.fetch(
869847
"sql", all_feature_filter_json=all_feature_filter_json
870848
)
871-
for r in response_back:
872-
config = configparser.ConfigParser()
873-
config["FileInfo"] = {"FileSize": str(r["zip_file_size_bytes"])}
874-
size_path = join(
875-
download_dir, f"{r['download_url'].split('/')[-1]}_size.ini"
876-
)
877-
with open(size_path, "w") as configfile:
878-
config.write(configfile)
879-
880-
LOG.debug("Galaxy fetch ended for sql run: {0}".format(run_uid))
849+
write_file_size(response_back)
850+
LOG.debug("Raw Data API fetch ended for sql run: {0}".format(run_uid))
881851
finish_task("sql", response_back=response_back)
882852
except Exception as ex:
883853
stop_task("sql")
@@ -886,69 +856,49 @@ def add_metadata(z, theme):
886856
if geopackage:
887857
try:
888858
LOG.debug(
889-
"Galaxy fetch started for geopackage run: {0}".format(run_uid)
859+
"Raw Data API fetch started for geopackage run: {0}".format(run_uid)
890860
)
891861
all_feature_filter_json = join(
892862
os.getcwd(), "tasks/tests/fixtures/all_features_filters.json"
893863
)
894864
response_back = geopackage.fetch(
895865
"gpkg", all_feature_filter_json=all_feature_filter_json
896866
)
897-
for r in response_back:
898-
config = configparser.ConfigParser()
899-
config["FileInfo"] = {"FileSize": str(r["zip_file_size_bytes"])}
900-
size_path = join(
901-
download_dir, f"{r['download_url'].split('/')[-1]}_size.ini"
902-
)
903-
with open(size_path, "w") as configfile:
904-
config.write(configfile)
905-
906-
LOG.debug("Galaxy fetch ended for geopackage run: {0}".format(run_uid))
867+
write_file_size(response_back)
868+
LOG.debug(
869+
"Raw Data API fetch ended for geopackage run: {0}".format(run_uid)
870+
)
907871
finish_task("geopackage", response_back=response_back)
908872
except Exception as ex:
909873
stop_task("geopackage")
910874
raise ex
911875

912876
if shp:
913877
try:
914-
LOG.debug("Galaxy fetch started for shp run: {0}".format(run_uid))
878+
LOG.debug(
879+
"Raw Data API fetch started for shp run: {0}".format(run_uid)
880+
)
915881
response_back = shp.fetch(
916882
"shp", all_feature_filter_json=all_feature_filter_json
917883
)
918-
for r in response_back:
919-
config = configparser.ConfigParser()
920-
config["FileInfo"] = {"FileSize": str(r["zip_file_size_bytes"])}
921-
size_path = join(
922-
download_dir, f"{r['download_url'].split('/')[-1]}_size.ini"
923-
)
924-
with open(size_path, "w") as configfile:
925-
config.write(configfile)
926-
927-
LOG.debug("Galaxy fetch ended for shp run: {0}".format(run_uid))
884+
write_file_size(response_back)
885+
LOG.debug("Raw Data API fetch ended for shp run: {0}".format(run_uid))
928886
finish_task("shp", response_back=response_back)
929887
except Exception as ex:
930888
stop_task("shp")
931889
raise ex
932890

933891
if kml:
934892
try:
935-
LOG.debug("Galaxy fetch started for kml run: {0}".format(run_uid))
893+
LOG.debug("Raw Data API fetch started for kml run: {0}".format(run_uid))
936894
all_feature_filter_json = join(
937895
os.getcwd(), "tasks/tests/fixtures/all_features_filters.json"
938896
)
939897
response_back = kml.fetch(
940898
"kml", all_feature_filter_json=all_feature_filter_json
941899
)
942-
for r in response_back:
943-
config = configparser.ConfigParser()
944-
config["FileInfo"] = {"FileSize": str(r["zip_file_size_bytes"])}
945-
size_path = join(
946-
download_dir, f"{r['download_url'].split('/')[-1]}_size.ini"
947-
)
948-
with open(size_path, "w") as configfile:
949-
config.write(configfile)
950-
951-
LOG.debug("Galaxy fetch ended for kml run: {0}".format(run_uid))
900+
write_file_size(response_back)
901+
LOG.debug("Raw Data API fetch ended for kml run: {0}".format(run_uid))
952902
finish_task("kml", response_back=response_back)
953903

954904
except Exception as ex:
@@ -965,7 +915,9 @@ def add_metadata(z, theme):
965915
access_token=settings.RAW_DATA_ACCESS_TOKEN,
966916
)
967917
start_task("mbtiles")
968-
LOG.debug("Galaxy fetch started for mbtiles run: {0}".format(run_uid))
918+
LOG.debug(
919+
"Raw Data API fetch started for mbtiles run: {0}".format(run_uid)
920+
)
969921
all_feature_filter_json = join(
970922
os.getcwd(), "tasks/tests/fixtures/all_features_filters.json"
971923
)
@@ -975,16 +927,10 @@ def add_metadata(z, theme):
975927
min_zoom=job.mbtiles_minzoom,
976928
max_zoom=job.mbtiles_maxzoom,
977929
)
978-
for r in response_back:
979-
config = configparser.ConfigParser()
980-
config["FileInfo"] = {"FileSize": str(r["zip_file_size_bytes"])}
981-
size_path = join(
982-
download_dir, f"{r['download_url'].split('/')[-1]}_size.ini"
983-
)
984-
with open(size_path, "w") as configfile:
985-
config.write(configfile)
986-
987-
LOG.debug("Galaxy fetch ended for mbtiles run: {0}".format(run_uid))
930+
write_file_size(response_back)
931+
LOG.debug(
932+
"Raw Data API fetch ended for mbtiles run: {0}".format(run_uid)
933+
)
988934
finish_task("mbtiles", response_back=response_back)
989935

990936
except Exception as ex:

0 commit comments

Comments
 (0)