11import unittest
22import os
33import json
4- from shapely .geometry import Polygon , MultiPolygon , Point , LineString , MultiLineString , GeometryCollection
4+ from shapely .geometry import (
5+ Polygon ,
6+ MultiPolygon ,
7+ Point ,
8+ LineString ,
9+ MultiLineString ,
10+ GeometryCollection ,
11+ )
512import pandas as pd
613from unittest .mock import patch , MagicMock
7- from mapswipe_workers .utils .process_mapillary import create_tiles , download_and_process_tile , coordinate_download , geojson_to_polygon , filter_by_timerange , filter_results
14+ from mapswipe_workers .utils .process_mapillary import (
15+ create_tiles ,
16+ download_and_process_tile ,
17+ coordinate_download ,
18+ geojson_to_polygon ,
19+ filter_by_timerange ,
20+ filter_results ,
21+ )
822
923
1024# Assuming create_tiles, download_and_process_tile, and coordinate_download are imported
1125
12- class TestTileGroupingFunctions (unittest .TestCase ):
1326
27+ class TestTileGroupingFunctions (unittest .TestCase ):
1428 @classmethod
1529 def setUpClass (cls ):
1630 with open (
@@ -39,7 +53,9 @@ def setUp(self):
3953 self .token = "test_token"
4054 self .level = 14
4155 self .test_polygon = Polygon ([(0 , 0 ), (1 , 0 ), (1 , 1 ), (0 , 1 )])
42- self .test_multipolygon = MultiPolygon ([self .test_polygon , Polygon ([(2 , 2 ), (3 , 2 ), (3 , 3 ), (2 , 3 )])])
56+ self .test_multipolygon = MultiPolygon (
57+ [self .test_polygon , Polygon ([(2 , 2 ), (3 , 2 ), (3 , 3 ), (2 , 3 )])]
58+ )
4359 self .empty_polygon = Polygon ()
4460 self .empty_geometry = GeometryCollection ()
4561
@@ -67,9 +83,21 @@ def test_geojson_to_polygon_feature_collection_with_multiple_polygons(self):
6783 geojson_data = {
6884 "type" : "FeatureCollection" ,
6985 "features" : [
70- {"type" : "Feature" , "geometry" : {"type" : "Polygon" , "coordinates" : [[(0 , 0 ), (1 , 0 ), (1 , 1 ), (0 , 1 ), (0 , 0 )]]}},
71- {"type" : "Feature" , "geometry" : {"type" : "Polygon" , "coordinates" : [[(2 , 2 ), (3 , 2 ), (3 , 3 ), (2 , 3 ), (2 , 2 )]]}}
72- ]
86+ {
87+ "type" : "Feature" ,
88+ "geometry" : {
89+ "type" : "Polygon" ,
90+ "coordinates" : [[(0 , 0 ), (1 , 0 ), (1 , 1 ), (0 , 1 ), (0 , 0 )]],
91+ },
92+ },
93+ {
94+ "type" : "Feature" ,
95+ "geometry" : {
96+ "type" : "Polygon" ,
97+ "coordinates" : [[(2 , 2 ), (3 , 2 ), (3 , 3 ), (2 , 3 ), (2 , 2 )]],
98+ },
99+ },
100+ ],
73101 }
74102 result = geojson_to_polygon (geojson_data )
75103 self .assertIsInstance (result , MultiPolygon )
@@ -80,8 +108,8 @@ def test_geojson_to_polygon_single_feature_polygon(self):
80108 "type" : "Feature" ,
81109 "geometry" : {
82110 "type" : "Polygon" ,
83- "coordinates" : [[(0 , 0 ), (1 , 0 ), (1 , 1 ), (0 , 1 ), (0 , 0 )]]
84- }
111+ "coordinates" : [[(0 , 0 ), (1 , 0 ), (1 , 1 ), (0 , 1 ), (0 , 0 )]],
112+ },
85113 }
86114 result = geojson_to_polygon (geojson_data )
87115 self .assertIsInstance (result , Polygon )
@@ -93,9 +121,9 @@ def test_geojson_to_polygon_single_feature_multipolygon(self):
93121 "type" : "MultiPolygon" ,
94122 "coordinates" : [
95123 [[(0 , 0 ), (1 , 0 ), (1 , 1 ), (0 , 1 ), (0 , 0 )]],
96- [[(2 , 2 ), (3 , 2 ), (3 , 3 ), (2 , 3 ), (2 , 2 )]]
97- ]
98- }
124+ [[(2 , 2 ), (3 , 2 ), (3 , 3 ), (2 , 3 ), (2 , 2 )]],
125+ ],
126+ },
99127 }
100128 result = geojson_to_polygon (geojson_data )
101129 self .assertIsInstance (result , MultiPolygon )
@@ -105,102 +133,117 @@ def test_geojson_to_polygon_non_polygon_geometry_in_feature_collection(self):
105133 geojson_data = {
106134 "type" : "FeatureCollection" ,
107135 "features" : [
108- {"type" : "Feature" , "geometry" : {"type" : "LineString" , "coordinates" : [(0 , 0 ), (1 , 1 )]}}
109- ]
136+ {
137+ "type" : "Feature" ,
138+ "geometry" : {"type" : "LineString" , "coordinates" : [(0 , 0 ), (1 , 1 )]},
139+ }
140+ ],
110141 }
111142 with self .assertRaises (ValueError ) as context :
112143 geojson_to_polygon (geojson_data )
113144 self .assertEqual (str (context .exception ), "Non-polygon geometries cannot be combined into a MultiPolygon." )
114145
115146 def test_geojson_to_polygon_empty_feature_collection (self ):
116- geojson_data = {
117- "type" : "FeatureCollection" ,
118- "features" : []
119- }
147+ geojson_data = {"type" : "FeatureCollection" , "features" : []}
120148 result = geojson_to_polygon (geojson_data )
121149 self .assertTrue (result .is_empty )
122150
123151 def test_geojson_to_polygon_contribution_geojson (self ):
124152 result = geojson_to_polygon (self .fixture_data )
125153 self .assertIsInstance (result , Polygon )
126154
127- @patch ('mapswipe_workers.utils.process_mapillary.vt2geojson_tools.vt_bytes_to_geojson' )
128- @patch ('mapswipe_workers.utils.process_mapillary.requests.get' )
155+ @patch (
156+ "mapswipe_workers.utils.process_mapillary.vt2geojson_tools.vt_bytes_to_geojson"
157+ )
158+ @patch ("mapswipe_workers.utils.process_mapillary.requests.get" )
129159 def test_download_and_process_tile_success (self , mock_get , mock_vt2geojson ):
130160 # Mock the response from requests.get
131161 mock_response = MagicMock ()
132162 mock_response .status_code = 200
133- mock_response .content = b' mock vector tile data' # Example mock data
163+ mock_response .content = b" mock vector tile data" # Example mock data
134164 mock_get .return_value = mock_response
135165
136166 # Mock the return value of vt_bytes_to_geojson
137167 mock_vt2geojson .return_value = {
138168 "features" : [
139- {"geometry" : {"type" : "Point" , "coordinates" : [0 , 0 ]}, "properties" : {"id" : 1 }}
169+ {
170+ "geometry" : {"type" : "Point" , "coordinates" : [0 , 0 ]},
171+ "properties" : {"id" : 1 },
172+ }
140173 ]
141174 }
142175
143- row = {'x' : 1 , 'y' : 1 , 'z' : 14 }
144- token = ' test_token'
176+ row = {"x" : 1 , "y" : 1 , "z" : 14 }
177+ token = " test_token"
145178
146179 result , failed = download_and_process_tile (row , token )
147180
148181 # Assertions
149182 self .assertIsNone (failed )
150183 self .assertIsInstance (result , pd .DataFrame )
151184 self .assertEqual (len (result ), 1 )
152- self .assertEqual (result [' geometry' ][0 ].wkt , ' POINT (0 0)' )
185+ self .assertEqual (result [" geometry" ][0 ].wkt , " POINT (0 0)" )
153186
154- @patch (' mapswipe_workers.utils.process_mapillary.requests.get' )
187+ @patch (" mapswipe_workers.utils.process_mapillary.requests.get" )
155188 def test_download_and_process_tile_failure (self , mock_get ):
156189 # Mock a failed response
157190 mock_response = MagicMock ()
158191 mock_response .status_code = 500
159192 mock_get .return_value = mock_response
160193
161- row = pd .Series ({'x' : 1 , 'y' : 1 , 'z' : self .level })
194+ row = pd .Series ({"x" : 1 , "y" : 1 , "z" : self .level })
162195 result , failed = download_and_process_tile (row , self .token )
163196
164197 self .assertIsNone (result )
165198 self .assertIsNotNone (failed )
166199
167- @patch (' mapswipe_workers.utils.process_mapillary.download_and_process_tile' )
200+ @patch (" mapswipe_workers.utils.process_mapillary.download_and_process_tile" )
168201 def test_coordinate_download (self , mock_download_and_process_tile ):
169- mock_download_and_process_tile .return_value = (pd .DataFrame ([{"geometry" : None }]), None )
202+ mock_download_and_process_tile .return_value = (
203+ pd .DataFrame ([{"geometry" : None }]),
204+ None ,
205+ )
170206
171- metadata , failed = coordinate_download (self .test_polygon , self .level , self .token )
207+ metadata , failed = coordinate_download (
208+ self .test_polygon , self .level , self .token
209+ )
172210
173211 self .assertIsInstance (metadata , pd .DataFrame )
174212 self .assertTrue (failed .empty )
175213
176- @patch (' mapswipe_workers.utils.process_mapillary.download_and_process_tile' )
214+ @patch (" mapswipe_workers.utils.process_mapillary.download_and_process_tile" )
177215 def test_coordinate_download_with_failures (self , mock_download_and_process_tile ):
178- mock_download_and_process_tile .return_value = (None , pd .Series ({"x" : 1 , "y" : 1 , "z" : self .level }))
216+ mock_download_and_process_tile .return_value = (
217+ None ,
218+ pd .Series ({"x" : 1 , "y" : 1 , "z" : self .level }),
219+ )
179220
180- metadata , failed = coordinate_download (self .test_polygon , self .level , self .token )
221+ metadata , failed = coordinate_download (
222+ self .test_polygon , self .level , self .token
223+ )
181224
182225 self .assertTrue (metadata .empty )
183226 self .assertFalse (failed .empty )
184227
185228 def test_filter_within_time_range (self ):
186- start_time = ' 2016-01-20 00:00:00'
187- end_time = ' 2022-01-21 23:59:59'
229+ start_time = " 2016-01-20 00:00:00"
230+ end_time = " 2022-01-21 23:59:59"
188231 filtered_df = filter_by_timerange (self .fixture_df , start_time , end_time )
189232
190233 self .assertEqual (len (filtered_df ), 3 )
191- self .assertTrue (all (filtered_df [' captured_at' ] >= pd .to_datetime (start_time )))
192- self .assertTrue (all (filtered_df [' captured_at' ] <= pd .to_datetime (end_time )))
234+ self .assertTrue (all (filtered_df [" captured_at" ] >= pd .to_datetime (start_time )))
235+ self .assertTrue (all (filtered_df [" captured_at" ] <= pd .to_datetime (end_time )))
193236
194237 def test_filter_without_end_time (self ):
195- start_time = ' 2020-01-20 00:00:00'
238+ start_time = " 2020-01-20 00:00:00"
196239 filtered_df = filter_by_timerange (self .fixture_df , start_time )
197240
198241 self .assertEqual (len (filtered_df ), 3 )
199- self .assertTrue (all (filtered_df [' captured_at' ] >= pd .to_datetime (start_time )))
242+ self .assertTrue (all (filtered_df [" captured_at" ] >= pd .to_datetime (start_time )))
200243
201244 def test_filter_time_no_data (self ):
202- start_time = ' 2016-01-30 00:00:00'
203- end_time = ' 2016-01-31 00:00:00'
245+ start_time = " 2016-01-30 00:00:00"
246+ end_time = " 2016-01-31 00:00:00"
204247 filtered_df = filter_by_timerange (self .fixture_df , start_time , end_time )
205248 self .assertTrue (filtered_df .empty )
206249
@@ -221,10 +264,13 @@ def test_filter_organization_id(self):
221264 self .assertEqual (len (filtered_df ), 1 )
222265
223266 def test_filter_time_range (self ):
224- start_time = '2016-01-20 00:00:00'
225- end_time = '2022-01-21 23:59:59'
226- filtered_df = filter_results (self .fixture_df , start_time = start_time , end_time = end_time )
267+ start_time = "2016-01-20 00:00:00"
268+ end_time = "2022-01-21 23:59:59"
269+ filtered_df = filter_results (
270+ self .fixture_df , start_time = start_time , end_time = end_time
271+ )
227272 self .assertEqual (len (filtered_df ), 3 )
228273
229- if __name__ == '__main__' :
274+
275+ if __name__ == "__main__" :
230276 unittest .main ()
0 commit comments