Skip to content

Commit 92f5406

Browse files
xaduprewenbingl
authored andcommitted
Support for svmlib (#184)
* Support for svmlib * Add svm to requirements * Fix import issue with svmlib * remove unnecessary import * add libsvm * update ci * test installation of libsvm * update for libsvm * Update for SVM (sklearn + svmlib) * Fix libsvm converters, svm sklearn converters. * Final fixes for svm converters * Fix undetected shape issues * Fixes #189, wrong shape calculator for multi regression models * Update .appveyor.yml * Update .appveyor.yml * Disable unit test unit onnxruntime 0.1.5 is released (libsvm) * Update .appveyor.yml * Update .appveyor.yml
1 parent d20a738 commit 92f5406

29 files changed

+1097
-119
lines changed

.appveyor.yml

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,16 @@ install:
2525
- cmd: pip install %COREML_PATH% %ONNX_PATH% cntk==2.5.1
2626
- cmd: pip install -r requirements-dev.txt"
2727
- cmd: pip install %KERAS%
28+
# install libsvm from github
29+
- call "C:\Program Files (x86)\Microsoft Visual Studio\2017\Community\VC\Auxiliary\Build\vcvars64.bat"
30+
- git clone --recursive https://github.com/cjlin1/libsvm libsvm
31+
- cd libsvm
32+
- nmake -f Makefile.win lib
33+
- copy windows\*.dll python
34+
- cd ..
35+
- set PYTHONPATH=libsvm\python;%PYTHONPATH%
36+
- dir libsvm\python
37+
- python -c "import svmutil"
2838

2939
build: off
3040

.gitignore

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -35,8 +35,6 @@ TESTDUMP
3535
temp_*.onnx
3636
tests/sklearn/tests/*
3737
tests_backend/*.onnx
38-
tests/coreml/tests/*
39-
tests/end2end/tests/*
40-
tests/lightgbm/tests/*
38+
tests/*/tests/*
4139
tests/build/*
4240
tests/tests/

.travis.yml

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -49,6 +49,13 @@ install:
4949
- if [[ "$TRAVIS_PYTHON_VERSION" != "2.7" ]]; then
5050
pip install onnxruntime;
5151
fi
52+
# install libsvm
53+
- git clone --recursive https://github.com/cjlin1/libsvm libsvm
54+
- cd libsvm
55+
- make lib
56+
- cd ..
57+
- export PYTHONPATH=$PYTHONPATH:libsvm/python
58+
- python -c "import svmutil"
5259

5360
script:
5461
- python -m pytest --cov=onnxmltools --cov-report=term --cov-report=html --cov-report=xml tests

onnxmltools/convert/__init__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,5 +6,6 @@
66

77
from .main import convert_coreml
88
from .main import convert_keras
9+
from .main import convert_libsvm
910
from .main import convert_lightgbm
1011
from .main import convert_sklearn

onnxmltools/convert/common/_container.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -84,6 +84,10 @@ class SklearnModelContainer(CommonSklearnModelContainer):
8484
pass
8585

8686

87+
class LibSvmModelContainer(CommonSklearnModelContainer):
88+
pass
89+
90+
8791
class LightGbmModelContainer(CommonSklearnModelContainer):
8892
pass
8993

onnxmltools/convert/common/_topology.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,12 @@ def full_name(self):
4646
Return a globally unique variable ID
4747
'''
4848
return self.onnx_name
49+
50+
def __str__(self):
51+
if self.raw_name != self.onnx_name:
52+
return "Var(name='{0}', onnx='{1}', type={2})".format(self.raw_name, self.onnx_name, self.type)
53+
else:
54+
return "Var(name='{0}', type={1})".format(self.raw_name, self.type)
4955

5056

5157
class Operator(OperatorBase):

onnxmltools/convert/common/shape_calculator.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,6 @@ def calculate_linear_classifier_output_shapes(operator):
2626
'''
2727
check_input_and_output_numbers(operator, input_count_range=1, output_count_range=[1, 2])
2828
check_input_and_output_types(operator, good_input_types=[FloatTensorType, Int64TensorType])
29-
3029
if len(operator.inputs[0].type.shape) != 2:
3130
raise RuntimeError('Input must be a [N, C]-tensor')
3231

@@ -72,5 +71,10 @@ def calculate_linear_regressor_output_shapes(operator):
7271
check_input_and_output_numbers(operator, input_count_range=1, output_count_range=1)
7372

7473
N = operator.inputs[0].type.shape[0]
75-
operator.outputs[0].type = FloatTensorType([N, 1])
74+
op = operator.raw_operator
75+
if hasattr(op, 'n_outputs_'):
76+
nout = op.n_outputs_
77+
else:
78+
nout = 1
79+
operator.outputs[0].type = FloatTensorType([N, nout])
7680

onnxmltools/convert/common/utils.py

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -88,6 +88,17 @@ def lightgbm_installed():
8888
return False
8989

9090

91+
def libsvm_installed():
92+
"""
93+
Checks that *libsvm* is available.
94+
"""
95+
try:
96+
import svmutil
97+
return True
98+
except ImportError:
99+
return False
100+
101+
91102
def xgboost_installed():
92103
"""
93104
Checks that *xgboost* is available.
Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
#-------------------------------------------------------------------------
2+
# Copyright (c) Microsoft Corporation. All rights reserved.
3+
# Licensed under the MIT License. See License.txt in the project root for
4+
# license information.
5+
#--------------------------------------------------------------------------
6+
7+
from .convert import convert
Lines changed: 93 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,93 @@
1+
# -------------------------------------------------------------------------
2+
# Copyright (c) Microsoft Corporation. All rights reserved.
3+
# Licensed under the MIT License. See License.txt in the project root for
4+
# license information.
5+
# --------------------------------------------------------------------------
6+
7+
from ..common._container import LibSvmModelContainer
8+
from ..common._topology import *
9+
10+
11+
def _parse_libsvm_simple_model(scope, model, inputs):
12+
'''
13+
This function handles all non-pipeline models.
14+
15+
:param scope: Scope object
16+
:param model: A libsvm object (e.g., OneHotEncoder and LogisticRegression)
17+
:param inputs: A list of variables
18+
:return: A list of output variables which will be passed to next stage
19+
'''
20+
21+
if model.get_svm_type() in (0, 1):
22+
label_variable = scope.declare_local_variable('label', FloatTensorType())
23+
probability_map_variable = scope.declare_local_variable('probabilities', FloatTensorType())
24+
this_operator = scope.declare_local_operator("LibSvmSVC", model)
25+
this_operator.inputs = inputs
26+
this_operator.outputs.append(label_variable)
27+
this_operator.outputs.append(probability_map_variable)
28+
elif model.get_svm_type() in (4, 3):
29+
# We assume that all scikit-learn operator can only produce a single float tensor.
30+
variable = scope.declare_local_variable('variable', FloatTensorType())
31+
this_operator = scope.declare_local_operator("LibSvmSVR", model)
32+
this_operator.inputs = inputs
33+
this_operator.outputs.append(variable)
34+
else:
35+
raise ValueError("Unknown SVM type '{0}'".format(model.get_svm_type()))
36+
return this_operator.outputs
37+
38+
39+
def _parse_libsvm(scope, model, inputs):
40+
'''
41+
This is a delegate function. It doesn't nothing but invoke the correct parsing function according to the input
42+
model's type.
43+
:param scope: Scope object
44+
:param model: A scikit-learn object (e.g., OneHotEncoder and LogisticRegression)
45+
:param inputs: A list of variables
46+
:return: The output variables produced by the input model
47+
'''
48+
return _parse_libsvm_simple_model(scope, model, inputs)
49+
50+
51+
def parse_libsvm(model, initial_types=None, target_opset=None,
52+
custom_conversion_functions=None,
53+
custom_shape_calculators=None):
54+
# Put svmlib object into an abstract container so that our framework
55+
# can work seamlessly on models created
56+
# with different machine learning tools.
57+
raw_model_container = LibSvmModelContainer(model)
58+
59+
# Declare a computational graph. It will become a representation of
60+
# the input scikit-learn model after parsing.
61+
topology = Topology(raw_model_container,
62+
initial_types=initial_types,
63+
target_opset=target_opset,
64+
custom_conversion_functions=custom_conversion_functions,
65+
custom_shape_calculators=custom_shape_calculators)
66+
67+
# Declare an object to provide variables' and operators' naming mechanism.
68+
# In contrast to CoreML, one global scope
69+
# is enough for parsing scikit-learn models.
70+
scope = topology.declare_scope('__root__')
71+
72+
# Declare input variables. They should be the inputs of the scikit-learn model
73+
# you want to convert into ONNX.
74+
inputs = []
75+
for var_name, initial_type in initial_types:
76+
inputs.append(scope.declare_local_variable(var_name, initial_type))
77+
78+
# The object raw_model_container is a part of the topology we're going to return.
79+
# We use it to store the inputs of
80+
# the libsvm's computational graph.
81+
for variable in inputs:
82+
raw_model_container.add_input(variable)
83+
84+
# Parse the input libsvm model as a Topology object.
85+
outputs = _parse_libsvm(scope, model, inputs)
86+
87+
# THe object raw_model_container is a part of the topology we're going to return.
88+
# We use it to store the outputs of
89+
# the scikit-learn's computational graph.
90+
for variable in outputs:
91+
raw_model_container.add_output(variable)
92+
93+
return topology

0 commit comments

Comments
 (0)