@@ -595,6 +595,33 @@ def test_aggregate_spatial_geometry_from_node(con100: Connection, get_geometries
595595 }
596596
597597
598+ def test_aggregate_spatial_geometry_url (con100 : Connection ):
599+ cube = con100 .load_collection ("S2" )
600+ result = cube .aggregate_spatial (geometries = "https://example.com/geometry.json" , reducer = "mean" )
601+ assert get_download_graph (result , drop_save_result = True , drop_load_collection = True ) == {
602+ "loadurl1" : {
603+ "process_id" : "load_url" ,
604+ "arguments" : {"url" : "https://example.com/geometry.json" , "format" : "GeoJSON" },
605+ },
606+ "aggregatespatial1" : {
607+ "process_id" : "aggregate_spatial" ,
608+ "arguments" : {
609+ "data" : {"from_node" : "loadcollection1" },
610+ "geometries" : {"from_node" : "loadurl1" },
611+ "reducer" : {
612+ "process_graph" : {
613+ "mean1" : {
614+ "process_id" : "mean" ,
615+ "arguments" : {"data" : {"from_parameter" : "data" }},
616+ "result" : True ,
617+ }
618+ }
619+ },
620+ },
621+ },
622+ }
623+
624+
598625def test_aggregate_spatial_window (con100 : Connection ):
599626 img = con100 .load_collection ("S2" )
600627 size = [5 , 3 ]
@@ -763,21 +790,19 @@ def test_mask_polygon_parameter(con100: Connection):
763790 }
764791
765792
766- def test_mask_polygon_path (con100 : Connection ):
767- img = con100 .load_collection ("S2" )
768- masked = img .mask_polygon (mask = "path/to/polygon.json" )
769- assert sorted (masked .flat_graph ().keys ()) == ["loadcollection1" , "maskpolygon1" , "readvector1" ]
770- assert masked .flat_graph ()["maskpolygon1" ] == {
771- "process_id" : "mask_polygon" ,
772- "arguments" : {
773- "data" : {"from_node" : "loadcollection1" },
774- "mask" : {"from_node" : "readvector1" },
793+ @pytest .mark .parametrize ("path_factory" , [str , pathlib .Path ])
794+ def test_mask_polygon_path (con100 : Connection , path_factory , test_data ):
795+ path = path_factory (test_data .get_path ("geojson/polygon02.json" ))
796+ cube = con100 .load_collection ("S2" )
797+ masked = cube .mask_polygon (mask = path )
798+ assert get_download_graph (masked , drop_save_result = True , drop_load_collection = True ) == {
799+ "maskpolygon1" : {
800+ "process_id" : "mask_polygon" ,
801+ "arguments" : {
802+ "data" : {"from_node" : "loadcollection1" },
803+ "mask" : {"type" : "Polygon" , "coordinates" : [[[3 , 50 ], [4 , 50 ], [4 , 51 ], [3 , 50 ]]]},
804+ },
775805 },
776- "result" : True ,
777- }
778- assert masked .flat_graph ()["readvector1" ] == {
779- "process_id" : "read_vector" ,
780- "arguments" : {"filename" : "path/to/polygon.json" },
781806 }
782807
783808
@@ -1490,18 +1515,19 @@ def test_chunk_polygon_parameter(con100: Connection):
14901515 }
14911516
14921517
1493- def test_chunk_polygon_path (con100 : Connection ):
1518+ @pytest .mark .parametrize ("path_factory" , [str , pathlib .Path ])
1519+ def test_chunk_polygon_path (con100 : Connection , test_data , path_factory ):
1520+ path = path_factory (test_data .get_path ("geojson/polygon02.json" ))
14941521 cube = con100 .load_collection ("S2" )
14951522 process = lambda data : data .run_udf (udf = "myfancycode" , runtime = "Python" )
14961523 with pytest .warns (UserDeprecationWarning , match = "Use `apply_polygon`" ):
1497- result = cube .chunk_polygon (chunks = " path/to/polygon.json" , process = process )
1524+ result = cube .chunk_polygon (chunks = path , process = process )
14981525 assert get_download_graph (result , drop_save_result = True , drop_load_collection = True ) == {
1499- "readvector1" : {"process_id" : "read_vector" , "arguments" : {"filename" : "path/to/polygon.json" }},
15001526 "chunkpolygon1" : {
15011527 "process_id" : "chunk_polygon" ,
15021528 "arguments" : {
15031529 "data" : {"from_node" : "loadcollection1" },
1504- "chunks" : {"from_node " : "readvector1" },
1530+ "chunks" : {"type " : "Polygon" , "coordinates" : [[[ 3 , 50 ], [ 4 , 50 ], [ 4 , 51 ], [ 3 , 50 ]]] },
15051531 "process" : {
15061532 "process_graph" : {
15071533 "runudf1" : {
@@ -1704,21 +1730,17 @@ def test_apply_polygon_parameter(con100: Connection, geometries_argument, geomet
17041730 ("geometries" , "geometries" ),
17051731 ],
17061732)
1707- def test_apply_polygon_path (con100 : Connection , geometries_argument , geometries_parameter ):
1733+ def test_apply_polygon_path (con100 : Connection , geometries_argument , geometries_parameter , test_data ):
1734+ path = test_data .get_path ("geojson/polygon02.json" )
17081735 cube = con100 .load_collection ("S2" )
17091736 process = UDF (code = "myfancycode" , runtime = "Python" )
1710- result = cube .apply_polygon (** {geometries_argument : " path/to/polygon.json" }, process = process )
1737+ result = cube .apply_polygon (** {geometries_argument : path }, process = process )
17111738 assert get_download_graph (result , drop_save_result = True , drop_load_collection = True ) == {
1712- "readvector1" : {
1713- # TODO #104 #457 get rid of non-standard read_vector
1714- "process_id" : "read_vector" ,
1715- "arguments" : {"filename" : "path/to/polygon.json" },
1716- },
17171739 "applypolygon1" : {
17181740 "process_id" : "apply_polygon" ,
17191741 "arguments" : {
17201742 "data" : {"from_node" : "loadcollection1" },
1721- geometries_parameter : {"from_node " : "readvector1" },
1743+ geometries_parameter : {"type " : "Polygon" , "coordinates" : [[[ 3 , 50 ], [ 4 , 50 ], [ 4 , 51 ], [ 3 , 50 ]]] },
17221744 "process" : {
17231745 "process_graph" : {
17241746 "runudf1" : {
0 commit comments