diff --git a/mapillary_tools/camm/camm_builder.py b/mapillary_tools/camm/camm_builder.py index 4eb62ae5e..6f4c9b08f 100644 --- a/mapillary_tools/camm/camm_builder.py +++ b/mapillary_tools/camm/camm_builder.py @@ -43,12 +43,12 @@ def _create_edit_list( ] break - assert ( - 0 <= points[0].time - ), f"expect non-negative point time but got {points[0]}" - assert ( - points[0].time <= points[-1].time - ), f"expect points to be sorted but got first point {points[0]} and last point {points[-1]}" + assert 0 <= points[0].time, ( + f"expect non-negative point time but got {points[0]}" + ) + assert points[0].time <= points[-1].time, ( + f"expect points to be sorted but got first point {points[0]} and last point {points[-1]}" + ) if idx == 0: if 0 < points[0].time: @@ -92,9 +92,9 @@ def convert_points_to_raw_samples( timedelta = int((points[idx + 1].time - point.time) * timescale) else: timedelta = 0 - assert ( - 0 <= timedelta <= builder.UINT32_MAX - ), f"expected timedelta {timedelta} between {points[idx]} and {points[idx + 1]} with timescale {timescale} to be <= UINT32_MAX" + assert 0 <= timedelta <= builder.UINT32_MAX, ( + f"expected timedelta {timedelta} between {points[idx]} and {points[idx + 1]} with timescale {timescale} to be <= UINT32_MAX" + ) yield sample_parser.RawSample( # will update later diff --git a/mapillary_tools/exiftool_read_video.py b/mapillary_tools/exiftool_read_video.py index 1ec7714cd..a8ee18423 100644 --- a/mapillary_tools/exiftool_read_video.py +++ b/mapillary_tools/exiftool_read_video.py @@ -5,6 +5,7 @@ import xml.etree.ElementTree as ET from . import exif_read, exiftool_read, geo +from .telemetry import GPSFix, GPSPoint MAX_TRACK_ID = 10 @@ -87,7 +88,7 @@ def _aggregate_gps_track( alt_tag: T.Optional[str] = None, direction_tag: T.Optional[str] = None, ground_speed_tag: T.Optional[str] = None, -) -> T.List[geo.PointWithFix]: +) -> T.List[GPSPoint]: """ Aggregate all GPS data by the tags. It requires lat, lon to be present, and their lengths must match. @@ -173,15 +174,16 @@ def _aggregate_float_values_same_length( if timestamp is None or lon is None or lat is None: continue track.append( - geo.PointWithFix( + GPSPoint( time=timestamp, lon=lon, lat=lat, alt=alt, angle=direction, - gps_fix=None, - gps_precision=None, - gps_ground_speed=ground_speed, + epoch_time=None, + fix=None, + precision=None, + ground_speed=ground_speed, ) ) @@ -230,8 +232,8 @@ def _aggregate_gps_track_by_sample_time( ground_speed_tag: T.Optional[str] = None, gps_fix_tag: T.Optional[str] = None, gps_precision_tag: T.Optional[str] = None, -) -> T.List[geo.PointWithFix]: - track: T.List[geo.PointWithFix] = [] +) -> T.List[GPSPoint]: + track: T.List[GPSPoint] = [] expanded_gps_fix_tag = None if gps_fix_tag is not None: @@ -249,7 +251,7 @@ def _aggregate_gps_track_by_sample_time( gps_fix_texts = texts_by_tag.get(expanded_gps_fix_tag) if gps_fix_texts: try: - gps_fix = geo.GPSFix(int(gps_fix_texts[0])) + gps_fix = GPSFix(int(gps_fix_texts[0])) except ValueError: gps_fix = None @@ -280,7 +282,7 @@ def _aggregate_gps_track_by_sample_time( for idx, point in enumerate(points): point.time = sample_time + idx * avg_timedelta track.extend( - dataclasses.replace(point, gps_fix=gps_fix, gps_precision=gps_precision) + dataclasses.replace(point, fix=gps_fix, precision=gps_precision) for point in points ) @@ -355,7 +357,7 @@ def extract_model(self) -> T.Optional[str]: _, model = self._extract_make_and_model() return model - def _extract_gps_track_from_track(self) -> T.List[geo.PointWithFix]: + def _extract_gps_track_from_track(self) -> T.List[GPSPoint]: for track_id in range(1, MAX_TRACK_ID + 1): track_ns = f"Track{track_id}" if self._all_tags_exists( @@ -397,7 +399,7 @@ def _all_tags_exists(self, tags: T.Set[str]) -> bool: def _extract_gps_track_from_quicktime( self, namespace: str = "QuickTime" - ) -> T.List[geo.PointWithFix]: + ) -> T.List[GPSPoint]: if not self._all_tags_exists( { expand_tag(f"{namespace}:GPSDateTime"), diff --git a/mapillary_tools/ffmpeg.py b/mapillary_tools/ffmpeg.py index 03d4f40b9..b7215642c 100644 --- a/mapillary_tools/ffmpeg.py +++ b/mapillary_tools/ffmpeg.py @@ -202,10 +202,10 @@ def generate_binary_search(self, sorted_frame_indices: T.Sequence[int]) -> str: return "0" if length == 1: - return f"eq(n\\,{ sorted_frame_indices[0] })" + return f"eq(n\\,{sorted_frame_indices[0]})" middle = length // 2 - return f"if(lt(n\\,{ sorted_frame_indices[middle] })\\,{ self.generate_binary_search(sorted_frame_indices[:middle]) }\\,{ self.generate_binary_search(sorted_frame_indices[middle:]) })" + return f"if(lt(n\\,{sorted_frame_indices[middle]})\\,{self.generate_binary_search(sorted_frame_indices[:middle])}\\,{self.generate_binary_search(sorted_frame_indices[middle:])})" def extract_specified_frames( self, diff --git a/mapillary_tools/geo.py b/mapillary_tools/geo.py index b4e784087..bf5691cdf 100644 --- a/mapillary_tools/geo.py +++ b/mapillary_tools/geo.py @@ -6,7 +6,6 @@ import itertools import math import typing as T -from enum import Enum, unique WGS84_a = 6378137.0 WGS84_a_SQ = WGS84_a**2 @@ -32,45 +31,6 @@ class Point: angle: T.Optional[float] -@unique -class GPSFix(Enum): - NO_FIX = 0 - FIX_2D = 2 - FIX_3D = 3 - - -@dataclasses.dataclass -class PointWithFix(Point): - gps_fix: T.Optional[GPSFix] - gps_precision: T.Optional[float] - gps_ground_speed: T.Optional[float] - - -def _ecef_from_lla_DEPRECATED( - lat: float, lon: float, alt: float -) -> T.Tuple[float, float, float]: - """ - Deprecated because it is slow. Keep here for reference and comparison. - Use _ecef_from_lla2 instead. - - Compute ECEF XYZ from latitude, longitude and altitude. - - All using the WGS94 model. - Altitude is the distance to the WGS94 ellipsoid. - Check results here http://www.oc.nps.edu/oc2902w/coord/llhxyz.htm - - """ - a2 = WGS84_a**2 - b2 = WGS84_b**2 - lat = math.radians(lat) - lon = math.radians(lon) - L = 1.0 / math.sqrt(a2 * math.cos(lat) ** 2 + b2 * math.sin(lat) ** 2) - x = (a2 * L + alt) * math.cos(lat) * math.cos(lon) - y = (a2 * L + alt) * math.cos(lat) * math.sin(lon) - z = (b2 * L + alt) * math.sin(lat) - return x, y, z - - def _ecef_from_lla2(lat: float, lon: float) -> T.Tuple[float, float, float]: """ Compute ECEF XYZ from latitude, longitude and altitude. @@ -172,20 +132,6 @@ def pairwise(iterable: T.Iterable[_IT]) -> T.Iterable[T.Tuple[_IT, _IT]]: return zip(a, b) -def group_every( - iterable: T.Iterable[_IT], n: int -) -> T.Generator[T.Generator[_IT, None, None], None, None]: - """ - Return a generator that divides the iterable into groups by N. - """ - - if not (0 < n): - raise ValueError("expect 0 < n but got {0}".format(n)) - - for _, group in itertools.groupby(enumerate(iterable), key=lambda t: t[0] // n): - yield (item for _, item in group) - - def as_unix_time(dt: T.Union[datetime.datetime, int, float]) -> float: if isinstance(dt, (int, float)): return dt diff --git a/mapillary_tools/geotag/geotag_videos_from_exiftool_video.py b/mapillary_tools/geotag/geotag_videos_from_exiftool_video.py index aebe9de4f..3ed8465ed 100644 --- a/mapillary_tools/geotag/geotag_videos_from_exiftool_video.py +++ b/mapillary_tools/geotag/geotag_videos_from_exiftool_video.py @@ -8,6 +8,7 @@ from .. import exceptions, exiftool_read, geo, types from ..exiftool_read_video import ExifToolReadVideo +from ..telemetry import GPSPoint from . import gpmf_gps_filter, utils as video_utils from .geotag_from_generic import GeotagVideosFromGeneric @@ -45,11 +46,11 @@ def geotag_video(element: ET.Element) -> types.VideoMetadataOrError: points = geo.extend_deduplicate_points(points) assert points, "must have at least one point" - if all(isinstance(p, geo.PointWithFix) for p in points): + if all(isinstance(p, GPSPoint) for p in points): points = T.cast( T.List[geo.Point], gpmf_gps_filter.remove_noisy_points( - T.cast(T.List[geo.PointWithFix], points) + T.cast(T.List[GPSPoint], points) ), ) if not points: diff --git a/mapillary_tools/geotag/geotag_videos_from_video.py b/mapillary_tools/geotag/geotag_videos_from_video.py index fb489da86..b5ac06316 100644 --- a/mapillary_tools/geotag/geotag_videos_from_video.py +++ b/mapillary_tools/geotag/geotag_videos_from_video.py @@ -9,6 +9,7 @@ from .. import exceptions, geo, types from ..camm import camm_parser from ..mp4 import simple_mp4_parser as sparser +from ..telemetry import GPSPoint from . import blackvue_parser, gpmf_gps_filter, gpmf_parser, utils as video_utils from .geotag_from_generic import GeotagVideosFromGeneric @@ -155,11 +156,11 @@ def geotag_video( video_metadata.points = geo.extend_deduplicate_points(video_metadata.points) assert video_metadata.points, "must have at least one point" - if all(isinstance(p, geo.PointWithFix) for p in video_metadata.points): + if all(isinstance(p, GPSPoint) for p in video_metadata.points): video_metadata.points = T.cast( T.List[geo.Point], gpmf_gps_filter.remove_noisy_points( - T.cast(T.List[geo.PointWithFix], video_metadata.points) + T.cast(T.List[GPSPoint], video_metadata.points) ), ) if not video_metadata.points: diff --git a/mapillary_tools/geotag/gpmf_gps_filter.py b/mapillary_tools/geotag/gpmf_gps_filter.py index dad3769a9..c57698d1e 100644 --- a/mapillary_tools/geotag/gpmf_gps_filter.py +++ b/mapillary_tools/geotag/gpmf_gps_filter.py @@ -2,6 +2,7 @@ import typing as T from .. import constants, geo +from ..telemetry import GPSPoint from . import gps_filter """ @@ -13,8 +14,8 @@ def remove_outliers( - sequence: T.Sequence[geo.PointWithFix], -) -> T.Sequence[geo.PointWithFix]: + sequence: T.Sequence[GPSPoint], +) -> T.Sequence[GPSPoint]: distances = [ geo.gps_distance((left.lat, left.lon), (right.lat, right.lon)) for left, right in geo.pairwise(sequence) @@ -37,9 +38,7 @@ def remove_outliers( "Split to %d sequences with max distance %f", len(sequences), max_distance ) - ground_speeds = [ - p.gps_ground_speed for p in sequence if p.gps_ground_speed is not None - ] + ground_speeds = [p.ground_speed for p in sequence if p.ground_speed is not None] if len(ground_speeds) < 2: return sequence @@ -50,20 +49,20 @@ def remove_outliers( ) return T.cast( - T.List[geo.PointWithFix], + T.List[GPSPoint], gps_filter.find_majority(merged.values()), ) def remove_noisy_points( - sequence: T.Sequence[geo.PointWithFix], -) -> T.Sequence[geo.PointWithFix]: + sequence: T.Sequence[GPSPoint], +) -> T.Sequence[GPSPoint]: num_points = len(sequence) sequence = [ p for p in sequence # include points **without** GPS fix - if p.gps_fix is None or p.gps_fix.value in constants.GOPRO_GPS_FIXES + if p.fix is None or p.fix.value in constants.GOPRO_GPS_FIXES ] if len(sequence) < num_points: LOG.debug( @@ -77,7 +76,7 @@ def remove_noisy_points( p for p in sequence # include points **without** precision - if p.gps_precision is None or p.gps_precision <= constants.GOPRO_MAX_DOP100 + if p.precision is None or p.precision <= constants.GOPRO_MAX_DOP100 ] if len(sequence) < num_points: LOG.debug( diff --git a/mapillary_tools/geotag/gpmf_parser.py b/mapillary_tools/geotag/gpmf_parser.py index fb4d65cd5..f0916fd0f 100644 --- a/mapillary_tools/geotag/gpmf_parser.py +++ b/mapillary_tools/geotag/gpmf_parser.py @@ -3,12 +3,16 @@ import itertools import pathlib import typing as T +import datetime + import construct as C -from .. import geo, imu +from .. import imu from ..mp4.mp4_sample_parser import MovieBoxParser, Sample, TrackBoxParser +from ..telemetry import GPSFix, GPSPoint + """ Parsing GPS from GPMF data format stored in GoPros. See the GPMF spec: https://github.com/gopro/gpmf-parser @@ -129,12 +133,20 @@ class KLVDict(T.TypedDict): @dataclasses.dataclass class TelemetryData: - gps: T.List[geo.PointWithFix] + gps: T.List[GPSPoint] accl: T.List[imu.AccelerationData] gyro: T.List[imu.GyroscopeData] magn: T.List[imu.MagnetometerData] +def _gps5_timestamp_to_epoch_time(dtstr: str): + # yymmddhhmmss.sss + dt = datetime.datetime.strptime(dtstr, "%y%m%d%H%M%S.%f").replace( + tzinfo=datetime.timezone.utc + ) + return dt.timestamp() + + # A GPS5 stream example: # key = b'STRM' type = b'\x00' structure_size = 1 repeat = 400 # data = ListContainer: @@ -173,7 +185,7 @@ class TelemetryData: # ] def gps5_from_stream( stream: T.Sequence[KLVDict], -) -> T.Generator[geo.PointWithFix, None, None]: +) -> T.Generator[GPSPoint, None, None]: indexed: T.Dict[bytes, T.List[T.List[T.Any]]] = { klv["key"]: klv["data"] for klv in stream } @@ -191,10 +203,16 @@ def gps5_from_stream( gpsf = indexed.get(b"GPSF") if gpsf is not None: - gpsf_value = geo.GPSFix(gpsf[0][0]) + gpsf_value = GPSFix(gpsf[0][0]) else: gpsf_value = None + gpsu = indexed.get(b"GPSU") + if gpsu is not None: + epoch_time = _gps5_timestamp_to_epoch_time(gpsu[0][0].decode("utf-8")) + else: + epoch_time = None + gpsp = indexed.get(b"GPSP") if gpsp is not None: gpsp_value = gpsp[0][0] @@ -205,22 +223,36 @@ def gps5_from_stream( lat, lon, alt, ground_speed, _speed_3d = [ v / s for v, s in zip(point, scal_values) ] - yield geo.PointWithFix( + yield GPSPoint( # will figure out the actual timestamp later time=0, lat=lat, lon=lon, alt=alt, - gps_fix=gpsf_value, - gps_precision=gpsp_value, - gps_ground_speed=ground_speed, + epoch_time=epoch_time, + fix=gpsf_value, + precision=gpsp_value, + ground_speed=ground_speed, angle=None, ) +_EPOCH_TIME_IN_2000 = datetime.datetime( + 2000, 1, 1, tzinfo=datetime.timezone.utc +).timestamp() + + +def _gps9_timestamp_to_epoch_time( + days_since_2000: int, secs_since_midnight: float +) -> float: + epoch_time = _EPOCH_TIME_IN_2000 + days_since_2000 * 24 * 60 * 60 + epoch_time += secs_since_midnight + return epoch_time + + def gps9_from_stream( stream: T.Sequence[KLVDict], -) -> T.Generator[geo.PointWithFix, None, None]: +) -> T.Generator[GPSPoint, None, None]: indexed: T.Dict[bytes, T.List[T.List[T.Any]]] = { klv["key"]: klv["data"] for klv in stream } @@ -257,21 +289,24 @@ def gps9_from_stream( alt, speed_2d, _speed_3d, - _days_since_2000, - _secs_since_midnight, + days_since_2000, + secs_since_midnight, dop, gps_fix, ) = [v / s for v, s in zip(sample_data, scal_values)] - yield geo.PointWithFix( + epoch_time = _gps9_timestamp_to_epoch_time(days_since_2000, secs_since_midnight) + + yield GPSPoint( # will figure out the actual timestamp later time=0, lat=lat, lon=lon, alt=alt, - gps_fix=geo.GPSFix(gps_fix), - gps_precision=dop * 100, - gps_ground_speed=speed_2d, + epoch_time=epoch_time, + fix=GPSFix(gps_fix), + precision=dop * 100, + ground_speed=speed_2d, angle=None, ) @@ -292,8 +327,8 @@ def _find_first_device_id(stream: T.Sequence[KLVDict]) -> int: return device_id -def _find_first_gps_stream(stream: T.Sequence[KLVDict]) -> T.List[geo.PointWithFix]: - sample_points: T.List[geo.PointWithFix] = [] +def _find_first_gps_stream(stream: T.Sequence[KLVDict]) -> T.List[GPSPoint]: + sample_points: T.List[GPSPoint] = [] for klv in stream: if klv["key"] == b"STRM": @@ -342,9 +377,9 @@ def _apply_matrix( matrix: T.Sequence[float], values: T.Sequence[float] ) -> T.Generator[float, None, None]: size = len(values) - assert ( - len(matrix) == size * size - ), f"expecting a square matrix of size {size} x {size} but got {len(matrix)}" + assert len(matrix) == size * size, ( + f"expecting a square matrix of size {size} x {size} but got {len(matrix)}" + ) for y in range(size): row_start = y * size @@ -447,7 +482,7 @@ def _extract_points_from_samples( fp: T.BinaryIO, samples: T.Iterable[Sample] ) -> TelemetryData: # To keep GPS points from different devices separated - points_by_dvid: T.Dict[int, T.List[geo.PointWithFix]] = {} + points_by_dvid: T.Dict[int, T.List[GPSPoint]] = {} accls_by_dvid: T.Dict[int, T.List[imu.AccelerationData]] = {} gyros_by_dvid: T.Dict[int, T.List[imu.GyroscopeData]] = {} magns_by_dvid: T.Dict[int, T.List[imu.MagnetometerData]] = {} @@ -537,7 +572,7 @@ def _filter_gpmd_samples(track: TrackBoxParser) -> T.Generator[Sample, None, Non yield sample -def extract_points(fp: T.BinaryIO) -> T.List[geo.PointWithFix]: +def extract_points(fp: T.BinaryIO) -> T.List[GPSPoint]: """ Return a list of points (could be empty) if it is a valid GoPro video, otherwise None @@ -615,7 +650,7 @@ def extract_camera_model(fp: T.BinaryIO) -> str: return unicode_names[0].strip() -def parse_gpx(path: pathlib.Path) -> T.List[geo.PointWithFix]: +def parse_gpx(path: pathlib.Path) -> T.List[GPSPoint]: with path.open("rb") as fp: points = extract_points(fp) if points is None: diff --git a/mapillary_tools/mp4/construct_mp4_parser.py b/mapillary_tools/mp4/construct_mp4_parser.py index 8ca1454ba..6391e6846 100644 --- a/mapillary_tools/mp4/construct_mp4_parser.py +++ b/mapillary_tools/mp4/construct_mp4_parser.py @@ -610,9 +610,9 @@ def find_box_at_path( return box box_data = T.cast(T.Sequence[BoxDict], box["data"]) # ListContainer from construct is not sequence - assert isinstance( - box_data, T.Sequence - ), f"expect a list of boxes but got {type(box_data)} at path {path}" + assert isinstance(box_data, T.Sequence), ( + f"expect a list of boxes but got {type(box_data)} at path {path}" + ) found = find_box_at_path(box_data, path[1:]) if found is not None: return found diff --git a/mapillary_tools/mp4/mp4_sample_parser.py b/mapillary_tools/mp4/mp4_sample_parser.py index 38a624a74..6f5afdd46 100644 --- a/mapillary_tools/mp4/mp4_sample_parser.py +++ b/mapillary_tools/mp4/mp4_sample_parser.py @@ -59,9 +59,9 @@ def _extract_raw_samples( if not chunk_entries: return - assert ( - len(sizes) <= len(timedeltas) - ), f"got less ({len(timedeltas)}) sample time deltas (stts) than expected ({len(sizes)})" + assert len(sizes) <= len(timedeltas), ( + f"got less ({len(timedeltas)}) sample time deltas (stts) than expected ({len(sizes)})" + ) sample_idx = 0 chunk_idx = 0 diff --git a/mapillary_tools/mp4/simple_mp4_builder.py b/mapillary_tools/mp4/simple_mp4_builder.py index 632aecca0..b93a3ef29 100644 --- a/mapillary_tools/mp4/simple_mp4_builder.py +++ b/mapillary_tools/mp4/simple_mp4_builder.py @@ -404,8 +404,8 @@ def _rewrite_and_build_moov_typed_data( for box in _filter_trak_boxes(moov_children): sample_offset = _update_sbtl_sample_offsets(box, sample_offset) moov_typed_data = _build_moov_typed_data(moov_children) - assert ( - len(moov_typed_data) == moov_typed_data_size - ), f"{len(moov_typed_data)} != {moov_typed_data_size}" + assert len(moov_typed_data) == moov_typed_data_size, ( + f"{len(moov_typed_data)} != {moov_typed_data_size}" + ) return moov_typed_data diff --git a/mapillary_tools/process_geotag_properties.py b/mapillary_tools/process_geotag_properties.py index 3515d32c4..ff6096f7f 100644 --- a/mapillary_tools/process_geotag_properties.py +++ b/mapillary_tools/process_geotag_properties.py @@ -255,9 +255,9 @@ def process_geotag_properties( metadatas.extend(video_metadata) # filenames should be deduplicated in utils.find_images/utils.find_videos - assert len(metadatas) == len( - set(metadata.filename for metadata in metadatas) - ), "duplicate filenames found" + assert len(metadatas) == len(set(metadata.filename for metadata in metadatas)), ( + "duplicate filenames found" + ) return metadatas diff --git a/mapillary_tools/process_sequence_properties.py b/mapillary_tools/process_sequence_properties.py index 2ed7049ad..27ca1a4cc 100644 --- a/mapillary_tools/process_sequence_properties.py +++ b/mapillary_tools/process_sequence_properties.py @@ -209,9 +209,9 @@ def _interpolate_subsecs_for_sorting(sequence: PointSequence) -> None: gidx = gidx + len(group) for cur, nxt in geo.pairwise(sequence): - assert ( - cur.time <= nxt.time - ), f"sequence must be sorted but got {cur.time} > {nxt.time}" + assert cur.time <= nxt.time, ( + f"sequence must be sorted but got {cur.time} > {nxt.time}" + ) def _parse_filesize_in_bytes(filesize_str: str) -> int: @@ -335,9 +335,9 @@ def process_sequence_properties( results = error_metadatas + image_metadatas + video_metadatas - assert len(metadatas) == len( - results - ), f"expected {len(metadatas)} results but got {len(results)}" + assert len(metadatas) == len(results), ( + f"expected {len(metadatas)} results but got {len(results)}" + ) assert sequence_idx == len( set(metadata.MAPSequenceUUID for metadata in image_metadatas) ) diff --git a/mapillary_tools/sample_video.py b/mapillary_tools/sample_video.py index eb0217063..89b9de9ba 100644 --- a/mapillary_tools/sample_video.py +++ b/mapillary_tools/sample_video.py @@ -118,9 +118,9 @@ def sample_video( start_time=video_start_time_dt, ) else: - assert ( - 0 < video_sample_interval - ), "expect positive video_sample_interval but got {video_sample_interval}" + assert 0 < video_sample_interval, ( + "expect positive video_sample_interval but got {video_sample_interval}" + ) _sample_single_video_by_interval( video_path, sample_dir, @@ -339,9 +339,9 @@ def _sample_single_video_by_distance( f"Expect {len(sorted_sample_indices)} samples but extracted {len(frame_samples)} samples" ) for idx, (frame_idx_1based, sample_paths) in enumerate(frame_samples): - assert ( - len(sample_paths) == 1 - ), "Expect 1 sample path at {frame_idx_1based} but got {sample_paths}" + assert len(sample_paths) == 1, ( + "Expect 1 sample path at {frame_idx_1based} but got {sample_paths}" + ) if idx + 1 != frame_idx_1based: raise exceptions.MapillaryVideoError( f"Expect {sample_paths[0]} to be {idx + 1}th sample but got {frame_idx_1based}" @@ -352,9 +352,9 @@ def _sample_single_video_by_distance( continue video_sample, interp = sample_points_by_frame_idx[sample_idx] - assert ( - interp.time == video_sample.exact_composition_time - ), f"interpolated time {interp.time} should match the video sample time {video_sample.exact_composition_time}" + assert interp.time == video_sample.exact_composition_time, ( + f"interpolated time {interp.time} should match the video sample time {video_sample.exact_composition_time}" + ) timestamp = start_time + datetime.timedelta(seconds=interp.time) exif_edit = ExifEdit(sample_paths[0]) diff --git a/mapillary_tools/telemetry.py b/mapillary_tools/telemetry.py new file mode 100644 index 000000000..7e0312e15 --- /dev/null +++ b/mapillary_tools/telemetry.py @@ -0,0 +1,20 @@ +import dataclasses +import typing as T +from enum import Enum, unique + +from .geo import Point + + +@unique +class GPSFix(Enum): + NO_FIX = 0 + FIX_2D = 2 + FIX_3D = 3 + + +@dataclasses.dataclass +class GPSPoint(Point): + epoch_time: T.Optional[float] + fix: T.Optional[GPSFix] + precision: T.Optional[float] + ground_speed: T.Optional[float] diff --git a/mapillary_tools/upload.py b/mapillary_tools/upload.py index f0af1a135..55657bcba 100644 --- a/mapillary_tools/upload.py +++ b/mapillary_tools/upload.py @@ -655,9 +655,9 @@ def upload( ) for idx, video_metadata in enumerate(specified_video_metadatas): video_metadata.update_md5sum() - assert isinstance( - video_metadata.md5sum, str - ), "md5sum should be updated" + assert isinstance(video_metadata.md5sum, str), ( + "md5sum should be updated" + ) generator = camm_builder.camm_sample_generator2(video_metadata) with video_metadata.filename.open("rb") as src_fp: camm_fp = simple_mp4_builder.transform_mp4(src_fp, generator) diff --git a/mapillary_tools/upload_api_v4.py b/mapillary_tools/upload_api_v4.py index 7ccadc011..10014340d 100644 --- a/mapillary_tools/upload_api_v4.py +++ b/mapillary_tools/upload_api_v4.py @@ -154,9 +154,9 @@ def upload( if not chunk: break - assert ( - offset == self.entity_size - ), f"Offset ends at {offset} but the entity size is {self.entity_size}" + assert offset == self.entity_size, ( + f"Offset ends at {offset} but the entity size is {self.entity_size}" + ) payload = resp.json() try: diff --git a/mapillary_tools/video_data_extraction/extract_video_data.py b/mapillary_tools/video_data_extraction/extract_video_data.py index 22e12c7e5..716d4657e 100644 --- a/mapillary_tools/video_data_extraction/extract_video_data.py +++ b/mapillary_tools/video_data_extraction/extract_video_data.py @@ -5,10 +5,9 @@ import tqdm -import mapillary_tools.geotag.utils as video_utils - from .. import exceptions, geo, utils -from ..geotag import gpmf_gps_filter +from ..geotag import gpmf_gps_filter, utils as video_utils +from ..telemetry import GPSPoint from ..types import ( ErrorMetadata, FileType, @@ -81,7 +80,7 @@ def process_file(self, file: Path) -> VideoMetadataOrError: make = parser.extract_make() except Exception as e: ex = e - LOG.warn( + LOG.warning( '%(filename)s: Exception for parser %(parser)s while processing source %(source)s: "%(e)s"', {**log_vars, "e": e}, ) @@ -163,11 +162,11 @@ def _sanitize_points(points: T.Sequence[geo.Point]) -> T.Sequence[geo.Point]: points = geo.extend_deduplicate_points(points) - if all(isinstance(p, geo.PointWithFix) for p in points): + if all(isinstance(p, GPSPoint) for p in points): points = T.cast( T.Sequence[geo.Point], gpmf_gps_filter.remove_noisy_points( - T.cast(T.Sequence[geo.PointWithFix], points) + T.cast(T.Sequence[GPSPoint], points) ), ) if not points: diff --git a/tests/cli/gpmf_parser.py b/tests/cli/gpmf_parser.py index f5641d2ad..d271a588b 100644 --- a/tests/cli/gpmf_parser.py +++ b/tests/cli/gpmf_parser.py @@ -11,18 +11,19 @@ import mapillary_tools.geotag.gpmf_parser as gpmf_parser import mapillary_tools.geotag.gps_filter as gps_filter +import mapillary_tools.telemetry as telemetry import mapillary_tools.utils as utils from mapillary_tools.mp4 import mp4_sample_parser def _convert_points_to_gpx_track_segment( - points: T.Sequence[geo.PointWithFix], + points: T.Sequence[telemetry.GPSPoint], ) -> gpxpy.gpx.GPXTrackSegment: gpx_segment = gpxpy.gpx.GPXTrackSegment() gps_fix_map = { - geo.GPSFix.NO_FIX: "none", - geo.GPSFix.FIX_2D: "2d", - geo.GPSFix.FIX_3D: "3d", + telemetry.GPSFix.NO_FIX: "none", + telemetry.GPSFix.FIX_2D: "2d", + telemetry.GPSFix.FIX_3D: "3d", } for idx, point in enumerate(points): if idx + 1 < len(points): @@ -42,19 +43,23 @@ def _convert_points_to_gpx_track_segment( { "distance_between": distance, "speed_between": speed, - "ground_speed": point.gps_ground_speed, + "ground_speed": point.ground_speed, } ) + if point.epoch_time is not None: + epoch_time = point.epoch_time + else: + epoch_time = point.time gpxp = gpxpy.gpx.GPXTrackPoint( point.lat, point.lon, elevation=point.alt, - time=datetime.datetime.utcfromtimestamp(point.time), - position_dilution=point.gps_precision, + time=datetime.datetime.utcfromtimestamp(epoch_time), + position_dilution=point.precision, comment=comment, ) - if point.gps_fix is not None: - gpxp.type_of_gpx_fix = gps_fix_map.get(point.gps_fix) + if point.fix is not None: + gpxp.type_of_gpx_fix = gps_fix_map.get(point.fix) gpx_segment.points.append(gpxp) return gpx_segment @@ -85,10 +90,10 @@ def _convert_geojson(path: pathlib.Path): geomtry = {"type": "Point", "coordinates": [p.lon, p.lat]} properties = { "alt": p.alt, - "fix": p.gps_fix.value if p.gps_fix is not None else None, + "fix": p.fix.value if p.fix is not None else None, "index": idx, "name": path.name, - "precision": p.gps_precision, + "precision": p.precision, "time": p.time, } features.append( diff --git a/tests/cli/gps_filter.py b/tests/cli/gps_filter.py index 44a0b3c0a..96d0baddd 100644 --- a/tests/cli/gps_filter.py +++ b/tests/cli/gps_filter.py @@ -6,7 +6,7 @@ import gpxpy -from mapillary_tools import constants, geo +from mapillary_tools import constants, geo, telemetry from mapillary_tools.geotag import gps_filter from .gpmf_parser import _convert_points_to_gpx_track_segment @@ -36,11 +36,11 @@ def _parse_args(): def _gpx_track_segment_to_points( segment: gpxpy.gpx.GPXTrackSegment, -) -> T.List[geo.PointWithFix]: +) -> T.List[telemetry.GPSPoint]: gps_fix_map = { - "none": geo.GPSFix.NO_FIX, - "2d": geo.GPSFix.FIX_2D, - "3d": geo.GPSFix.FIX_3D, + "none": telemetry.GPSFix.NO_FIX, + "2d": telemetry.GPSFix.FIX_2D, + "3d": telemetry.GPSFix.FIX_3D, } points = [] for p in segment.points: @@ -57,41 +57,42 @@ def _gpx_track_segment_to_points( else: ground_speed = None - point = geo.PointWithFix( + point = telemetry.GPSPoint( time=geo.as_unix_time(T.cast(datetime.datetime, p.time)), lat=p.latitude, lon=p.longitude, alt=p.elevation, angle=None, - gps_fix=( + epoch_time=None, + fix=( gps_fix_map[p.type_of_gpx_fix] if p.type_of_gpx_fix is not None else None ), - gps_precision=p.position_dilution, - gps_ground_speed=ground_speed, + precision=p.position_dilution, + ground_speed=ground_speed, ) points.append(point) return points def _filter_noise( - points: T.Sequence[geo.PointWithFix], + points: T.Sequence[telemetry.GPSPoint], gps_fix: T.Set[int], max_dop: float, -) -> T.List[geo.PointWithFix]: +) -> T.List[telemetry.GPSPoint]: return [ p for p in points - if (p.gps_fix is None or p.gps_fix.value in gps_fix) - and (p.gps_precision is None or p.gps_precision <= max_dop) + if (p.fix is None or p.fix.value in gps_fix) + and (p.precision is None or p.precision <= max_dop) ] def _filter_outliers( - points: T.List[geo.PointWithFix], + points: T.List[telemetry.GPSPoint], gps_precision: float, -) -> T.List[geo.PointWithFix]: +) -> T.List[telemetry.GPSPoint]: if gps_precision == 0: return points @@ -111,7 +112,7 @@ def _filter_outliers( ) ground_speeds = [ - point.gps_ground_speed for point in points if point.gps_ground_speed is not None + point.ground_speed for point in points if point.ground_speed is not None ] if len(ground_speeds) < 2: return points @@ -120,7 +121,7 @@ def _filter_outliers( merged = gps_filter.dbscan(subseqs, gps_filter.speed_le(max_speed)) return T.cast( - T.List[geo.PointWithFix], + T.List[telemetry.GPSPoint], gps_filter.find_majority(merged.values()), ) diff --git a/tests/integration/fixtures.py b/tests/integration/fixtures.py index 3534272d3..c84f4d570 100644 --- a/tests/integration/fixtures.py +++ b/tests/integration/fixtures.py @@ -238,33 +238,39 @@ def verify_descs(expected: T.List[T.Dict], actual: T.Union[Path, T.List[T.Dict]] e = expected_desc["MAPCompassHeading"] assert "MAPCompassHeading" in actual_desc, actual_desc a = actual_desc["MAPCompassHeading"] - assert ( - abs(e["TrueHeading"] - a["TrueHeading"]) < 0.001 - ), f'got {a["TrueHeading"]} but expect {e["TrueHeading"]} in {filename}' - assert ( - abs(e["MagneticHeading"] - a["MagneticHeading"]) < 0.001 - ), f'got {a["MagneticHeading"]} but expect {e["MagneticHeading"]} in {filename}' + assert abs(e["TrueHeading"] - a["TrueHeading"]) < 0.001, ( + f"got {a['TrueHeading']} but expect {e['TrueHeading']} in {filename}" + ) + assert abs(e["MagneticHeading"] - a["MagneticHeading"]) < 0.001, ( + f"got {a['MagneticHeading']} but expect {e['MagneticHeading']} in {filename}" + ) if "MAPCaptureTime" in expected_desc: - assert ( - expected_desc["MAPCaptureTime"] == actual_desc["MAPCaptureTime"] - ), f'expect {expected_desc["MAPCaptureTime"]} but got {actual_desc["MAPCaptureTime"]} in {filename}' + assert expected_desc["MAPCaptureTime"] == actual_desc["MAPCaptureTime"], ( + f"expect {expected_desc['MAPCaptureTime']} but got {actual_desc['MAPCaptureTime']} in {filename}" + ) if "MAPLongitude" in expected_desc: assert ( abs(expected_desc["MAPLongitude"] - actual_desc["MAPLongitude"]) < 0.00001 - ), f'expect {expected_desc["MAPLongitude"]} but got {actual_desc["MAPLongitude"]} in {filename}' + ), ( + f"expect {expected_desc['MAPLongitude']} but got {actual_desc['MAPLongitude']} in {filename}" + ) if "MAPLatitude" in expected_desc: assert ( abs(expected_desc["MAPLatitude"] - actual_desc["MAPLatitude"]) < 0.00001 - ), f'expect {expected_desc["MAPLatitude"]} but got {actual_desc["MAPLatitude"]} in {filename}' + ), ( + f"expect {expected_desc['MAPLatitude']} but got {actual_desc['MAPLatitude']} in {filename}" + ) if "MAPAltitude" in expected_desc: assert ( abs(expected_desc["MAPAltitude"] - actual_desc["MAPAltitude"]) < 0.001 - ), f'expect {expected_desc["MAPAltitude"]} but got {actual_desc["MAPAltitude"]} in {filename}' + ), ( + f"expect {expected_desc['MAPAltitude']} but got {actual_desc['MAPAltitude']} in {filename}" + ) if "MAPDeviceMake" in expected_desc: assert expected_desc["MAPDeviceMake"] == actual_desc["MAPDeviceMake"] diff --git a/tests/integration/test_history.py b/tests/integration/test_history.py index 876e89f42..565c0e5a5 100644 --- a/tests/integration/test_history.py +++ b/tests/integration/test_history.py @@ -69,9 +69,9 @@ def test_upload_images( shell=True, ) assert x.returncode == 0, x.stderr - assert ( - len(setup_upload.listdir()) == 0 - ), "should NOT upload because it is uploaded already" + assert len(setup_upload.listdir()) == 0, ( + "should NOT upload because it is uploaded already" + ) @pytest.mark.usefixtures("setup_config") @@ -97,6 +97,6 @@ def test_upload_blackvue( shell=True, ) assert x.returncode == 0, x.stderr - assert ( - len(setup_upload.listdir()) == 0 - ), "should NOT upload because it is uploaded already" + assert len(setup_upload.listdir()) == 0, ( + "should NOT upload because it is uploaded already" + ) diff --git a/tests/integration/test_process_and_upload.py b/tests/integration/test_process_and_upload.py index d97b77188..4325a6c6f 100644 --- a/tests/integration/test_process_and_upload.py +++ b/tests/integration/test_process_and_upload.py @@ -64,7 +64,7 @@ "MAPCaptureTime": "2019_11_18_15_44_47_862", "MAPCompassHeading": {"MagneticHeading": 313.689, "TrueHeading": 313.689}, "MAPDeviceMake": "GoPro", - "MAPDeviceModel": "GoPro " "Max", + "MAPDeviceModel": "GoPro Max", "MAPLatitude": 33.1266719, "MAPLongitude": -117.3273063, "MAPOrientation": 1, @@ -75,7 +75,7 @@ "MAPCaptureTime": "2019_11_18_15_44_49_862", "MAPCompassHeading": {"MagneticHeading": 326.179, "TrueHeading": 326.179}, "MAPDeviceMake": "GoPro", - "MAPDeviceModel": "GoPro " "Max", + "MAPDeviceModel": "GoPro Max", "MAPLatitude": 33.1266891, "MAPLongitude": -117.3273151, "MAPOrientation": 1, @@ -86,7 +86,7 @@ "MAPCaptureTime": "2019_11_18_15_44_51_862", "MAPCompassHeading": {"MagneticHeading": 353.178, "TrueHeading": 353.178}, "MAPDeviceMake": "GoPro", - "MAPDeviceModel": "GoPro " "Max", + "MAPDeviceModel": "GoPro Max", "MAPLatitude": 33.1267078, "MAPLongitude": -117.3273264, "MAPOrientation": 1, @@ -97,7 +97,7 @@ "MAPCaptureTime": "2019_11_18_15_44_53_862", "MAPCompassHeading": {"MagneticHeading": 334.427, "TrueHeading": 334.427}, "MAPDeviceMake": "GoPro", - "MAPDeviceModel": "GoPro " "Max", + "MAPDeviceModel": "GoPro Max", "MAPLatitude": 33.1267282, "MAPLongitude": -117.3273391, "MAPOrientation": 1, @@ -108,7 +108,7 @@ "MAPCaptureTime": "2019_11_18_15_44_55_862", "MAPCompassHeading": {"MagneticHeading": 325.089, "TrueHeading": 325.089}, "MAPDeviceMake": "GoPro", - "MAPDeviceModel": "GoPro " "Max", + "MAPDeviceModel": "GoPro Max", "MAPLatitude": 33.12675, "MAPLongitude": -117.3273483, "MAPOrientation": 1, @@ -119,7 +119,7 @@ "MAPCaptureTime": "2019_11_18_15_44_57_862", "MAPCompassHeading": {"MagneticHeading": 327.867, "TrueHeading": 327.867}, "MAPDeviceMake": "GoPro", - "MAPDeviceModel": "GoPro " "Max", + "MAPDeviceModel": "GoPro Max", "MAPLatitude": 33.1267663, "MAPLongitude": -117.3273595, "MAPOrientation": 1, diff --git a/tests/unit/test_io_utils.py b/tests/unit/test_io_utils.py index c5ac86a09..1581db413 100644 --- a/tests/unit/test_io_utils.py +++ b/tests/unit/test_io_utils.py @@ -76,9 +76,9 @@ def test_chained(): thrown_x is None and thrown_y is None ), (thrown_x, thrown_y, whence, offset) if not thrown_x: - assert ( - x == y - ), f"whence={whence} offset={offset} x={x} y={y} {s.tell()} {c.tell()}" + assert x == y, ( + f"whence={whence} offset={offset} x={x} y={y} {s.tell()} {c.tell()}" + ) n = random.randint(-1, 20) assert s.read(n) == c.read(n), f"n={n}" diff --git a/tests/unit/test_uploader.py b/tests/unit/test_uploader.py index 24f6da1ab..fd6cc304a 100644 --- a/tests/unit/test_uploader.py +++ b/tests/unit/test_uploader.py @@ -31,9 +31,9 @@ def _validate_zip_dir(zip_dir: py.path.local): for zip_path in zip_dir.listdir(): with zipfile.ZipFile(zip_path) as ziph: upload_md5sum = json.loads(ziph.comment).get("upload_md5sum") - assert ( - str(os.path.basename(zip_path)) == f"mly_tools_{upload_md5sum}.zip" - ), zip_path + assert str(os.path.basename(zip_path)) == f"mly_tools_{upload_md5sum}.zip", ( + zip_path + ) descs.extend(validate_and_extract_zip(str(zip_path))) return descs