From f1eac71abc63e60c208900f0227f333b414db03a Mon Sep 17 00:00:00 2001 From: bearpaw Date: Fri, 15 Mar 2019 15:25:14 -0700 Subject: [PATCH] mannually merge from https://github.com/dimatura/pypcd/pull/9 --- .gitignore | 7 +++++++ pypcd/pypcd.py | 36 +++++++++++++++++++++--------------- pypcd/tests/test_pypcd.py | 18 +++++++++--------- 3 files changed, 37 insertions(+), 24 deletions(-) diff --git a/.gitignore b/.gitignore index ad6bebd..cb8cb32 100644 --- a/.gitignore +++ b/.gitignore @@ -5,3 +5,10 @@ build .eggs/ dist _build +bin +lib +include +.Python +.idea +pip-selfcheck.json +.pytest_cache/ diff --git a/pypcd/pypcd.py b/pypcd/pypcd.py index ed191a4..4feffe0 100644 --- a/pypcd/pypcd.py +++ b/pypcd/pypcd.py @@ -12,7 +12,6 @@ import re import struct import copy -import cStringIO as sio import numpy as np import warnings import lzf @@ -24,6 +23,11 @@ except ImportError: HAS_SENSOR_MSGS = False +try: + from io import StringIO as sio +except: + import cStringIO as sio + __all__ = ['PointCloud', 'point_cloud_to_path', 'point_cloud_to_buffer', @@ -91,11 +95,11 @@ def parse_header(lines): elif key in ('fields', 'type'): metadata[key] = value.split() elif key in ('size', 'count'): - metadata[key] = map(int, value.split()) + metadata[key] = list(map(int, value.split())) elif key in ('width', 'height', 'points'): metadata[key] = int(value) elif key == 'viewpoint': - metadata[key] = map(float, value.split()) + metadata[key] = list(map(float, value.split())) elif key == 'data': metadata[key] = value.strip().lower() # TODO apparently count is not required? @@ -205,9 +209,9 @@ def _build_dtype(metadata): fieldnames.append(f) typenames.append(np_type) else: - fieldnames.extend(['%s_%04d' % (f, i) for i in xrange(c)]) + fieldnames.extend(['%s_%04d' % (f, i) for i in range(c)]) typenames.extend([np_type]*c) - dtype = np.dtype(zip(fieldnames, typenames)) + dtype = np.dtype(list(zip(fieldnames, typenames))) return dtype @@ -278,6 +282,8 @@ def point_cloud_from_fileobj(f): header = [] while True: ln = f.readline().strip() + if not isinstance(ln, str): + ln = ln.decode('utf-8') header.append(ln) if ln.startswith('DATA'): metadata = parse_header(header) @@ -320,13 +326,13 @@ def point_cloud_to_fileobj(pc, fileobj, data_compression=None): assert(data_compression in ('ascii', 'binary', 'binary_compressed')) metadata['data'] = data_compression - header = write_header(metadata) + header = write_header(metadata).encode('utf-8') fileobj.write(header) if metadata['data'].lower() == 'ascii': fmtstr = build_ascii_fmtstr(pc) np.savetxt(fileobj, pc.pc_data, fmt=fmtstr) elif metadata['data'].lower() == 'binary': - fileobj.write(pc.pc_data.tostring('C')) + fileobj.write(pc.pc_data.tostring()) elif metadata['data'].lower() == 'binary_compressed': # TODO # a '_' field is ignored by pcl and breakes compressed point clouds. @@ -335,9 +341,9 @@ def point_cloud_to_fileobj(pc, fileobj, data_compression=None): # reorder to column-by-column uncompressed_lst = [] for fieldname in pc.pc_data.dtype.names: - column = np.ascontiguousarray(pc.pc_data[fieldname]).tostring('C') + column = np.ascontiguousarray(pc.pc_data[fieldname]).tostring() uncompressed_lst.append(column) - uncompressed = ''.join(uncompressed_lst) + uncompressed = b''.join(uncompressed_lst) uncompressed_size = len(uncompressed) # print("uncompressed_size = %r"%(uncompressed_size)) buf = lzf.compress(uncompressed) @@ -357,7 +363,7 @@ def point_cloud_to_fileobj(pc, fileobj, data_compression=None): def point_cloud_to_path(pc, fname): - with open(fname, 'w') as f: + with open(fname, 'wb') as f: point_cloud_to_fileobj(pc, f) @@ -370,21 +376,21 @@ def point_cloud_to_buffer(pc, data_compression=None): def save_point_cloud(pc, fname): """ Save pointcloud to fname in ascii format. """ - with open(fname, 'w') as f: + with open(fname, 'wb') as f: point_cloud_to_fileobj(pc, f, 'ascii') def save_point_cloud_bin(pc, fname): """ Save pointcloud to fname in binary format. """ - with open(fname, 'w') as f: + with open(fname, 'wb') as f: point_cloud_to_fileobj(pc, f, 'binary') def save_point_cloud_bin_compressed(pc, fname): """ Save pointcloud to fname in binary compressed format. """ - with open(fname, 'w') as f: + with open(fname, 'wb') as f: point_cloud_to_fileobj(pc, f, 'binary_compressed') @@ -481,7 +487,7 @@ def add_fields(pc, metadata, pc_data): else: fieldnames.extend(['%s_%04d' % (f, i) for i in xrange(c)]) typenames.extend([np_type]*c) - dtype = zip(fieldnames, typenames) + dtype = list(zip(fieldnames, typenames)) # new dtype. could be inferred? new_dtype = [(f, pc.pc_data.dtype[f]) for f in pc.pc_data.dtype.names] + dtype @@ -693,7 +699,7 @@ def save_pcd(self, fname, compression=None, **kwargs): warnings.warn('data_compression keyword is deprecated for' ' compression') compression = kwargs['data_compression'] - with open(fname, 'w') as f: + with open(fname, 'wb') as f: point_cloud_to_fileobj(self, f, compression) def save_pcd_to_fileobj(self, fileobj, compression=None, **kwargs): diff --git a/pypcd/tests/test_pypcd.py b/pypcd/tests/test_pypcd.py index 547d747..d428579 100644 --- a/pypcd/tests/test_pypcd.py +++ b/pypcd/tests/test_pypcd.py @@ -84,7 +84,7 @@ def test_parse_header(): def test_from_path(pcd_fname): - import pypcd + from pypcd import pypcd pc = pypcd.PointCloud.from_path(pcd_fname) fields = 'x y z normal_x normal_y normal_z curvature boundary k vp_x vp_y vp_z principal_curvature_x principal_curvature_y principal_curvature_z pc1 pc2'.split() @@ -95,7 +95,7 @@ def test_from_path(pcd_fname): def test_add_fields(pcd_fname): - import pypcd + from pypcd import pypcd pc = pypcd.PointCloud.from_path(pcd_fname) old_md = pc.get_metadata() @@ -114,7 +114,7 @@ def test_add_fields(pcd_fname): def test_path_roundtrip_ascii(pcd_fname): - import pypcd + from pypcd import pypcd pc = pypcd.PointCloud.from_path(pcd_fname) md = pc.get_metadata() @@ -138,7 +138,7 @@ def test_path_roundtrip_ascii(pcd_fname): def test_path_roundtrip_binary(pcd_fname): - import pypcd + from pypcd import pypcd pc = pypcd.PointCloud.from_path(pcd_fname) md = pc.get_metadata() @@ -152,7 +152,7 @@ def test_path_roundtrip_binary(pcd_fname): pc2 = pypcd.PointCloud.from_path(tmp_fname) md2 = pc2.get_metadata() - for k, v in md2.iteritems(): + for k, v in md2.items(): if k == 'data': assert v == 'binary' else: @@ -166,7 +166,7 @@ def test_path_roundtrip_binary(pcd_fname): def test_path_roundtrip_binary_compressed(pcd_fname): - import pypcd + from pypcd import pypcd pc = pypcd.PointCloud.from_path(pcd_fname) md = pc.get_metadata() @@ -180,7 +180,7 @@ def test_path_roundtrip_binary_compressed(pcd_fname): pc2 = pypcd.PointCloud.from_path(tmp_fname) md2 = pc2.get_metadata() - for k, v in md2.iteritems(): + for k, v in md2.items(): if k == 'data': assert v == 'binary_compressed' else: @@ -193,7 +193,7 @@ def test_path_roundtrip_binary_compressed(pcd_fname): def test_cat_pointclouds(pcd_fname): - import pypcd + from pypcd import pypcd pc = pypcd.PointCloud.from_path(pcd_fname) pc2 = pc.copy() pc2.pc_data['x'] += 0.1 @@ -204,7 +204,7 @@ def test_cat_pointclouds(pcd_fname): def test_ascii_bin1(ascii_pcd_fname, bin_pcd_fname): - import pypcd + from pypcd import pypcd apc1 = pypcd.point_cloud_from_path(ascii_pcd_fname) bpc1 = pypcd.point_cloud_from_path(bin_pcd_fname) am = cloud_centroid(apc1)