@@ -208,6 +208,19 @@ def stop_task(name):
208
208
task .finished_at = timezone .now ()
209
209
task .save ()
210
210
211
+ def write_file_size (response ):
212
+ if response :
213
+ LOG .debug (response )
214
+ for item in response :
215
+ if item :
216
+ config = configparser .ConfigParser ()
217
+ config ["FileInfo" ] = {"FileSize" : str (item ["zip_file_size_bytes" ])}
218
+ size_path = join (
219
+ download_dir , f"{ item ['download_url' ].split ('/' )[- 1 ]} _size.ini"
220
+ )
221
+ with open ([size_path ], "w" ) as configfile :
222
+ config .write (configfile )
223
+
211
224
def finish_task (name , created_files = None , response_back = None , planet_file = False ):
212
225
LOG .debug ("Task Finish: {0} for run: {1}" .format (name , run_uid ))
213
226
task = ExportTask .objects .get (run__uid = run_uid , name = name )
@@ -437,18 +450,14 @@ def add_metadata(z, theme):
437
450
438
451
if geojson :
439
452
try :
440
- LOG .debug ("Galaxy fetch started geojson for run: {0}" .format (run_uid ))
453
+ LOG .debug (
454
+ "Raw Data API fetch started geojson for run: {0}" .format (run_uid )
455
+ )
441
456
response_back = geojson .fetch ("geojson" , is_hdx_export = True )
442
- for r in response_back :
443
- config = configparser .ConfigParser ()
444
- config ["FileInfo" ] = {"FileSize" : str (r ["zip_file_size_bytes" ])}
445
- size_path = join (
446
- download_dir , f"{ r ['download_url' ].split ('/' )[- 1 ]} _size.ini"
447
- )
448
- with open (size_path , "w" ) as configfile :
449
- config .write (configfile )
450
-
451
- LOG .debug ("Galaxy fetch ended for geojson run: {0}" .format (run_uid ))
457
+ write_file_size (response_back )
458
+ LOG .debug (
459
+ "Raw Data API fetch ended for geojson run: {0}" .format (run_uid )
460
+ )
452
461
finish_task ("geojson" , response_back = response_back )
453
462
all_zips += response_back
454
463
except Exception as ex :
@@ -457,7 +466,7 @@ def add_metadata(z, theme):
457
466
458
467
if csv :
459
468
try :
460
- LOG .debug ("Galaxy fetch started for csv run: {0}" .format (run_uid ))
469
+ LOG .debug ("Raw Data API fetch started for csv run: {0}" .format (run_uid ))
461
470
response_back = csv .fetch ("csv" , is_hdx_export = True )
462
471
for r in response_back :
463
472
config = configparser .ConfigParser ()
@@ -468,7 +477,7 @@ def add_metadata(z, theme):
468
477
with open (size_path , "w" ) as configfile :
469
478
config .write (configfile )
470
479
471
- LOG .debug ("Galaxy fetch ended for csv run: {0}" .format (run_uid ))
480
+ LOG .debug ("Raw Data API fetch ended for csv run: {0}" .format (run_uid ))
472
481
finish_task ("csv" , response_back = response_back )
473
482
all_zips += response_back
474
483
@@ -480,20 +489,16 @@ def add_metadata(z, theme):
480
489
try :
481
490
if settings .USE_RAW_DATA_API_FOR_HDX :
482
491
LOG .debug (
483
- "Galaxy fetch started for geopackage run: {0}" .format (run_uid )
492
+ "Raw Data API fetch started for geopackage run: {0}" .format (
493
+ run_uid
494
+ )
484
495
)
485
496
response_back = geopackage .fetch ("gpkg" , is_hdx_export = True )
486
- for r in response_back :
487
- config = configparser .ConfigParser ()
488
- config ["FileInfo" ] = {"FileSize" : str (r ["zip_file_size_bytes" ])}
489
- size_path = join (
490
- download_dir , f"{ r ['download_url' ].split ('/' )[- 1 ]} _size.ini"
491
- )
492
- with open (size_path , "w" ) as configfile :
493
- config .write (configfile )
494
-
497
+ write_file_size (response_back )
495
498
LOG .debug (
496
- "Galaxy fetch ended for geopackage run: {0}" .format (run_uid )
499
+ "Raw Data API fetch ended for geopackage run: {0}" .format (
500
+ run_uid
501
+ )
497
502
)
498
503
finish_task ("geopackage" , response_back = response_back )
499
504
all_zips += response_back
@@ -532,19 +537,15 @@ def add_metadata(z, theme):
532
537
if shp :
533
538
try :
534
539
if settings .USE_RAW_DATA_API_FOR_HDX :
535
- LOG .debug ("Galaxy fetch started for shp run: {0}" .format (run_uid ))
540
+ LOG .debug (
541
+ "Raw Data API fetch started for shp run: {0}" .format (run_uid )
542
+ )
536
543
537
544
response_back = shp .fetch ("shp" , is_hdx_export = True )
538
- for r in response_back :
539
- config = configparser .ConfigParser ()
540
- config ["FileInfo" ] = {"FileSize" : str (r ["zip_file_size_bytes" ])}
541
- size_path = join (
542
- download_dir , f"{ r ['download_url' ].split ('/' )[- 1 ]} _size.ini"
543
- )
544
- with open (size_path , "w" ) as configfile :
545
- config .write (configfile )
546
-
547
- LOG .debug ("Galaxy fetch ended for shp run: {0}" .format (run_uid ))
545
+ write_file_size (response_back )
546
+ LOG .debug (
547
+ "Raw Data API fetch ended for shp run: {0}" .format (run_uid )
548
+ )
548
549
finish_task ("shp" , response_back = response_back )
549
550
all_zips += response_back
550
551
else :
@@ -582,18 +583,14 @@ def add_metadata(z, theme):
582
583
if kml :
583
584
try :
584
585
if settings .USE_RAW_DATA_API_FOR_HDX :
585
- LOG .debug ("Galaxy fetch started for kml run: {0}" .format (run_uid ))
586
+ LOG .debug (
587
+ "Raw Data API fetch started for kml run: {0}" .format (run_uid )
588
+ )
586
589
response_back = kml .fetch ("kml" , is_hdx_export = True )
587
- for r in response_back :
588
- config = configparser .ConfigParser ()
589
- config ["FileInfo" ] = {"FileSize" : str (r ["zip_file_size_bytes" ])}
590
- size_path = join (
591
- download_dir , f"{ r ['download_url' ].split ('/' )[- 1 ]} _size.ini"
592
- )
593
- with open (size_path , "w" ) as configfile :
594
- config .write (configfile )
595
-
596
- LOG .debug ("Galaxy fetch ended for kml run: {0}" .format (run_uid ))
590
+ write_file_size (response_back )
591
+ LOG .debug (
592
+ "Raw Data API fetch ended for kml run: {0}" .format (run_uid )
593
+ )
597
594
finish_task ("kml" , response_back = response_back )
598
595
all_zips += response_back
599
596
@@ -789,95 +786,68 @@ def add_metadata(z, theme):
789
786
790
787
if geojson :
791
788
try :
792
- LOG .debug ("Galaxy fetch started for geojson run: {0}" .format (run_uid ))
789
+ LOG .debug (
790
+ "Raw Data API fetch started for geojson run: {0}" .format (run_uid )
791
+ )
793
792
all_feature_filter_json = join (
794
793
os .getcwd (), "tasks/tests/fixtures/all_features_filters.json"
795
794
)
796
795
response_back = geojson .fetch (
797
796
"geojson" , all_feature_filter_json = all_feature_filter_json
798
797
)
799
- for r in response_back :
800
- config = configparser .ConfigParser ()
801
- config ["FileInfo" ] = {"FileSize" : str (r ["zip_file_size_bytes" ])}
802
- size_path = join (
803
- download_dir , f"{ r ['download_url' ].split ('/' )[- 1 ]} _size.ini"
804
- )
805
- with open (size_path , "w" ) as configfile :
806
- config .write (configfile )
798
+ write_file_size (response_back )
807
799
808
- LOG .debug ("Galaxy fetch ended for geojson run: {0}" .format (run_uid ))
800
+ LOG .debug (
801
+ "Raw Data API fetch ended for geojson run: {0}" .format (run_uid )
802
+ )
809
803
finish_task ("geojson" , response_back = response_back )
810
804
except Exception as ex :
811
805
stop_task ("geojson" )
812
806
raise ex
813
807
814
808
if fgb :
815
809
try :
816
- LOG .debug ("Galaxy fetch started for fgb run: {0}" .format (run_uid ))
810
+ LOG .debug ("Raw Data API fetch started for fgb run: {0}" .format (run_uid ))
817
811
all_feature_filter_json = join (
818
812
os .getcwd (), "tasks/tests/fixtures/all_features_filters.json"
819
813
)
820
814
response_back = fgb .fetch (
821
815
"fgb" , all_feature_filter_json = all_feature_filter_json
822
816
)
823
- for r in response_back :
824
- config = configparser .ConfigParser ()
825
- config ["FileInfo" ] = {"FileSize" : str (r ["zip_file_size_bytes" ])}
826
- size_path = join (
827
- download_dir , f"{ r ['download_url' ].split ('/' )[- 1 ]} _size.ini"
828
- )
829
- with open (size_path , "w" ) as configfile :
830
- config .write (configfile )
831
-
832
- LOG .debug ("Galaxy fetch ended for fgb run: {0}" .format (run_uid ))
817
+ write_file_size (response_back )
818
+ LOG .debug ("Raw Data API fetch ended for fgb run: {0}" .format (run_uid ))
833
819
finish_task ("fgb" , response_back = response_back )
834
820
except Exception as ex :
835
821
stop_task ("fgb" )
836
822
raise ex
837
823
838
824
if csv :
839
825
try :
840
- LOG .debug ("Galaxy fetch started for csv run: {0}" .format (run_uid ))
826
+ LOG .debug ("Raw Data API fetch started for csv run: {0}" .format (run_uid ))
841
827
all_feature_filter_json = join (
842
828
os .getcwd (), "tasks/tests/fixtures/all_features_filters.json"
843
829
)
844
830
response_back = csv .fetch (
845
831
"csv" , all_feature_filter_json = all_feature_filter_json
846
832
)
847
- for r in response_back :
848
- config = configparser .ConfigParser ()
849
- config ["FileInfo" ] = {"FileSize" : str (r ["zip_file_size_bytes" ])}
850
- size_path = join (
851
- download_dir , f"{ r ['download_url' ].split ('/' )[- 1 ]} _size.ini"
852
- )
853
- with open (size_path , "w" ) as configfile :
854
- config .write (configfile )
855
-
856
- LOG .debug ("Galaxy fetch ended for csv run: {0}" .format (run_uid ))
833
+ write_file_size (response_back )
834
+ LOG .debug ("Raw Data API fetch ended for csv run: {0}" .format (run_uid ))
857
835
finish_task ("csv" , response_back = response_back )
858
836
except Exception as ex :
859
837
stop_task ("csv" )
860
838
raise ex
861
839
862
840
if sql :
863
841
try :
864
- LOG .debug ("Galaxy fetch started for sql run: {0}" .format (run_uid ))
842
+ LOG .debug ("Raw Data API fetch started for sql run: {0}" .format (run_uid ))
865
843
all_feature_filter_json = join (
866
844
os .getcwd (), "tasks/tests/fixtures/all_features_filters.json"
867
845
)
868
846
response_back = sql .fetch (
869
847
"sql" , all_feature_filter_json = all_feature_filter_json
870
848
)
871
- for r in response_back :
872
- config = configparser .ConfigParser ()
873
- config ["FileInfo" ] = {"FileSize" : str (r ["zip_file_size_bytes" ])}
874
- size_path = join (
875
- download_dir , f"{ r ['download_url' ].split ('/' )[- 1 ]} _size.ini"
876
- )
877
- with open (size_path , "w" ) as configfile :
878
- config .write (configfile )
879
-
880
- LOG .debug ("Galaxy fetch ended for sql run: {0}" .format (run_uid ))
849
+ write_file_size (response_back )
850
+ LOG .debug ("Raw Data API fetch ended for sql run: {0}" .format (run_uid ))
881
851
finish_task ("sql" , response_back = response_back )
882
852
except Exception as ex :
883
853
stop_task ("sql" )
@@ -886,69 +856,49 @@ def add_metadata(z, theme):
886
856
if geopackage :
887
857
try :
888
858
LOG .debug (
889
- "Galaxy fetch started for geopackage run: {0}" .format (run_uid )
859
+ "Raw Data API fetch started for geopackage run: {0}" .format (run_uid )
890
860
)
891
861
all_feature_filter_json = join (
892
862
os .getcwd (), "tasks/tests/fixtures/all_features_filters.json"
893
863
)
894
864
response_back = geopackage .fetch (
895
865
"gpkg" , all_feature_filter_json = all_feature_filter_json
896
866
)
897
- for r in response_back :
898
- config = configparser .ConfigParser ()
899
- config ["FileInfo" ] = {"FileSize" : str (r ["zip_file_size_bytes" ])}
900
- size_path = join (
901
- download_dir , f"{ r ['download_url' ].split ('/' )[- 1 ]} _size.ini"
902
- )
903
- with open (size_path , "w" ) as configfile :
904
- config .write (configfile )
905
-
906
- LOG .debug ("Galaxy fetch ended for geopackage run: {0}" .format (run_uid ))
867
+ write_file_size (response_back )
868
+ LOG .debug (
869
+ "Raw Data API fetch ended for geopackage run: {0}" .format (run_uid )
870
+ )
907
871
finish_task ("geopackage" , response_back = response_back )
908
872
except Exception as ex :
909
873
stop_task ("geopackage" )
910
874
raise ex
911
875
912
876
if shp :
913
877
try :
914
- LOG .debug ("Galaxy fetch started for shp run: {0}" .format (run_uid ))
878
+ LOG .debug (
879
+ "Raw Data API fetch started for shp run: {0}" .format (run_uid )
880
+ )
915
881
response_back = shp .fetch (
916
882
"shp" , all_feature_filter_json = all_feature_filter_json
917
883
)
918
- for r in response_back :
919
- config = configparser .ConfigParser ()
920
- config ["FileInfo" ] = {"FileSize" : str (r ["zip_file_size_bytes" ])}
921
- size_path = join (
922
- download_dir , f"{ r ['download_url' ].split ('/' )[- 1 ]} _size.ini"
923
- )
924
- with open (size_path , "w" ) as configfile :
925
- config .write (configfile )
926
-
927
- LOG .debug ("Galaxy fetch ended for shp run: {0}" .format (run_uid ))
884
+ write_file_size (response_back )
885
+ LOG .debug ("Raw Data API fetch ended for shp run: {0}" .format (run_uid ))
928
886
finish_task ("shp" , response_back = response_back )
929
887
except Exception as ex :
930
888
stop_task ("shp" )
931
889
raise ex
932
890
933
891
if kml :
934
892
try :
935
- LOG .debug ("Galaxy fetch started for kml run: {0}" .format (run_uid ))
893
+ LOG .debug ("Raw Data API fetch started for kml run: {0}" .format (run_uid ))
936
894
all_feature_filter_json = join (
937
895
os .getcwd (), "tasks/tests/fixtures/all_features_filters.json"
938
896
)
939
897
response_back = kml .fetch (
940
898
"kml" , all_feature_filter_json = all_feature_filter_json
941
899
)
942
- for r in response_back :
943
- config = configparser .ConfigParser ()
944
- config ["FileInfo" ] = {"FileSize" : str (r ["zip_file_size_bytes" ])}
945
- size_path = join (
946
- download_dir , f"{ r ['download_url' ].split ('/' )[- 1 ]} _size.ini"
947
- )
948
- with open (size_path , "w" ) as configfile :
949
- config .write (configfile )
950
-
951
- LOG .debug ("Galaxy fetch ended for kml run: {0}" .format (run_uid ))
900
+ write_file_size (response_back )
901
+ LOG .debug ("Raw Data API fetch ended for kml run: {0}" .format (run_uid ))
952
902
finish_task ("kml" , response_back = response_back )
953
903
954
904
except Exception as ex :
@@ -965,7 +915,9 @@ def add_metadata(z, theme):
965
915
access_token = settings .RAW_DATA_ACCESS_TOKEN ,
966
916
)
967
917
start_task ("mbtiles" )
968
- LOG .debug ("Galaxy fetch started for mbtiles run: {0}" .format (run_uid ))
918
+ LOG .debug (
919
+ "Raw Data API fetch started for mbtiles run: {0}" .format (run_uid )
920
+ )
969
921
all_feature_filter_json = join (
970
922
os .getcwd (), "tasks/tests/fixtures/all_features_filters.json"
971
923
)
@@ -975,16 +927,10 @@ def add_metadata(z, theme):
975
927
min_zoom = job .mbtiles_minzoom ,
976
928
max_zoom = job .mbtiles_maxzoom ,
977
929
)
978
- for r in response_back :
979
- config = configparser .ConfigParser ()
980
- config ["FileInfo" ] = {"FileSize" : str (r ["zip_file_size_bytes" ])}
981
- size_path = join (
982
- download_dir , f"{ r ['download_url' ].split ('/' )[- 1 ]} _size.ini"
983
- )
984
- with open (size_path , "w" ) as configfile :
985
- config .write (configfile )
986
-
987
- LOG .debug ("Galaxy fetch ended for mbtiles run: {0}" .format (run_uid ))
930
+ write_file_size (response_back )
931
+ LOG .debug (
932
+ "Raw Data API fetch ended for mbtiles run: {0}" .format (run_uid )
933
+ )
988
934
finish_task ("mbtiles" , response_back = response_back )
989
935
990
936
except Exception as ex :
0 commit comments