Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -5,3 +5,10 @@ build
.eggs/
dist
_build
bin
lib
include
.Python
.idea
pip-selfcheck.json
.pytest_cache/
36 changes: 21 additions & 15 deletions pypcd/pypcd.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@
import re
import struct
import copy
import cStringIO as sio
import numpy as np
import warnings
import lzf
Expand All @@ -24,6 +23,11 @@
except ImportError:
HAS_SENSOR_MSGS = False

try:
from io import StringIO as sio
except:
import cStringIO as sio

__all__ = ['PointCloud',
'point_cloud_to_path',
'point_cloud_to_buffer',
Expand Down Expand Up @@ -91,11 +95,11 @@ def parse_header(lines):
elif key in ('fields', 'type'):
metadata[key] = value.split()
elif key in ('size', 'count'):
metadata[key] = map(int, value.split())
metadata[key] = list(map(int, value.split()))
elif key in ('width', 'height', 'points'):
metadata[key] = int(value)
elif key == 'viewpoint':
metadata[key] = map(float, value.split())
metadata[key] = list(map(float, value.split()))
elif key == 'data':
metadata[key] = value.strip().lower()
# TODO apparently count is not required?
Expand Down Expand Up @@ -205,9 +209,9 @@ def _build_dtype(metadata):
fieldnames.append(f)
typenames.append(np_type)
else:
fieldnames.extend(['%s_%04d' % (f, i) for i in xrange(c)])
fieldnames.extend(['%s_%04d' % (f, i) for i in range(c)])
typenames.extend([np_type]*c)
dtype = np.dtype(zip(fieldnames, typenames))
dtype = np.dtype(list(zip(fieldnames, typenames)))
return dtype


Expand Down Expand Up @@ -278,6 +282,8 @@ def point_cloud_from_fileobj(f):
header = []
while True:
ln = f.readline().strip()
if not isinstance(ln, str):
ln = ln.decode('utf-8')
header.append(ln)
if ln.startswith('DATA'):
metadata = parse_header(header)
Expand Down Expand Up @@ -320,13 +326,13 @@ def point_cloud_to_fileobj(pc, fileobj, data_compression=None):
assert(data_compression in ('ascii', 'binary', 'binary_compressed'))
metadata['data'] = data_compression

header = write_header(metadata)
header = write_header(metadata).encode('utf-8')
fileobj.write(header)
if metadata['data'].lower() == 'ascii':
fmtstr = build_ascii_fmtstr(pc)
np.savetxt(fileobj, pc.pc_data, fmt=fmtstr)
elif metadata['data'].lower() == 'binary':
fileobj.write(pc.pc_data.tostring('C'))
fileobj.write(pc.pc_data.tostring())
elif metadata['data'].lower() == 'binary_compressed':
# TODO
# a '_' field is ignored by pcl and breakes compressed point clouds.
Expand All @@ -335,9 +341,9 @@ def point_cloud_to_fileobj(pc, fileobj, data_compression=None):
# reorder to column-by-column
uncompressed_lst = []
for fieldname in pc.pc_data.dtype.names:
column = np.ascontiguousarray(pc.pc_data[fieldname]).tostring('C')
column = np.ascontiguousarray(pc.pc_data[fieldname]).tostring()
uncompressed_lst.append(column)
uncompressed = ''.join(uncompressed_lst)
uncompressed = b''.join(uncompressed_lst)
uncompressed_size = len(uncompressed)
# print("uncompressed_size = %r"%(uncompressed_size))
buf = lzf.compress(uncompressed)
Expand All @@ -357,7 +363,7 @@ def point_cloud_to_fileobj(pc, fileobj, data_compression=None):


def point_cloud_to_path(pc, fname):
with open(fname, 'w') as f:
with open(fname, 'wb') as f:
point_cloud_to_fileobj(pc, f)


Expand All @@ -370,21 +376,21 @@ def point_cloud_to_buffer(pc, data_compression=None):
def save_point_cloud(pc, fname):
""" Save pointcloud to fname in ascii format.
"""
with open(fname, 'w') as f:
with open(fname, 'wb') as f:
point_cloud_to_fileobj(pc, f, 'ascii')


def save_point_cloud_bin(pc, fname):
""" Save pointcloud to fname in binary format.
"""
with open(fname, 'w') as f:
with open(fname, 'wb') as f:
point_cloud_to_fileobj(pc, f, 'binary')


def save_point_cloud_bin_compressed(pc, fname):
""" Save pointcloud to fname in binary compressed format.
"""
with open(fname, 'w') as f:
with open(fname, 'wb') as f:
point_cloud_to_fileobj(pc, f, 'binary_compressed')


Expand Down Expand Up @@ -481,7 +487,7 @@ def add_fields(pc, metadata, pc_data):
else:
fieldnames.extend(['%s_%04d' % (f, i) for i in xrange(c)])
typenames.extend([np_type]*c)
dtype = zip(fieldnames, typenames)
dtype = list(zip(fieldnames, typenames))
# new dtype. could be inferred?
new_dtype = [(f, pc.pc_data.dtype[f])
for f in pc.pc_data.dtype.names] + dtype
Expand Down Expand Up @@ -693,7 +699,7 @@ def save_pcd(self, fname, compression=None, **kwargs):
warnings.warn('data_compression keyword is deprecated for'
' compression')
compression = kwargs['data_compression']
with open(fname, 'w') as f:
with open(fname, 'wb') as f:
point_cloud_to_fileobj(self, f, compression)

def save_pcd_to_fileobj(self, fileobj, compression=None, **kwargs):
Expand Down
18 changes: 9 additions & 9 deletions pypcd/tests/test_pypcd.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ def test_parse_header():


def test_from_path(pcd_fname):
import pypcd
from pypcd import pypcd
pc = pypcd.PointCloud.from_path(pcd_fname)

fields = 'x y z normal_x normal_y normal_z curvature boundary k vp_x vp_y vp_z principal_curvature_x principal_curvature_y principal_curvature_z pc1 pc2'.split()
Expand All @@ -95,7 +95,7 @@ def test_from_path(pcd_fname):


def test_add_fields(pcd_fname):
import pypcd
from pypcd import pypcd
pc = pypcd.PointCloud.from_path(pcd_fname)

old_md = pc.get_metadata()
Expand All @@ -114,7 +114,7 @@ def test_add_fields(pcd_fname):


def test_path_roundtrip_ascii(pcd_fname):
import pypcd
from pypcd import pypcd
pc = pypcd.PointCloud.from_path(pcd_fname)
md = pc.get_metadata()

Expand All @@ -138,7 +138,7 @@ def test_path_roundtrip_ascii(pcd_fname):


def test_path_roundtrip_binary(pcd_fname):
import pypcd
from pypcd import pypcd
pc = pypcd.PointCloud.from_path(pcd_fname)
md = pc.get_metadata()

Expand All @@ -152,7 +152,7 @@ def test_path_roundtrip_binary(pcd_fname):

pc2 = pypcd.PointCloud.from_path(tmp_fname)
md2 = pc2.get_metadata()
for k, v in md2.iteritems():
for k, v in md2.items():
if k == 'data':
assert v == 'binary'
else:
Expand All @@ -166,7 +166,7 @@ def test_path_roundtrip_binary(pcd_fname):


def test_path_roundtrip_binary_compressed(pcd_fname):
import pypcd
from pypcd import pypcd
pc = pypcd.PointCloud.from_path(pcd_fname)
md = pc.get_metadata()

Expand All @@ -180,7 +180,7 @@ def test_path_roundtrip_binary_compressed(pcd_fname):

pc2 = pypcd.PointCloud.from_path(tmp_fname)
md2 = pc2.get_metadata()
for k, v in md2.iteritems():
for k, v in md2.items():
if k == 'data':
assert v == 'binary_compressed'
else:
Expand All @@ -193,7 +193,7 @@ def test_path_roundtrip_binary_compressed(pcd_fname):


def test_cat_pointclouds(pcd_fname):
import pypcd
from pypcd import pypcd
pc = pypcd.PointCloud.from_path(pcd_fname)
pc2 = pc.copy()
pc2.pc_data['x'] += 0.1
Expand All @@ -204,7 +204,7 @@ def test_cat_pointclouds(pcd_fname):


def test_ascii_bin1(ascii_pcd_fname, bin_pcd_fname):
import pypcd
from pypcd import pypcd
apc1 = pypcd.point_cloud_from_path(ascii_pcd_fname)
bpc1 = pypcd.point_cloud_from_path(bin_pcd_fname)
am = cloud_centroid(apc1)
Expand Down