Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
64 changes: 63 additions & 1 deletion tmva/pymva/src/RModelParser_Keras.cxx
Original file line number Diff line number Diff line change
Expand Up @@ -42,8 +42,10 @@ void AddKerasLayer(RModel &rmodel, PyObject *fLayer);
// Declaring Internal Functions for Keras layers which don't have activation as an additional attribute
std::unique_ptr<ROperator> MakeKerasActivation(PyObject *fLayer); // For instantiating ROperator for Keras Activation Layer
std::unique_ptr<ROperator> MakeKerasReLU(PyObject *fLayer); // For instantiating ROperator for Keras ReLU layer
std::unique_ptr<ROperator> MakeKerasLeakyReLU(PyObject *fLayer); // For instantiating ROperator for Keras Leaky ReLU layer
std::unique_ptr<ROperator> MakeKerasSelu(PyObject *fLayer); // For instantiating ROperator for Keras Selu layer
std::unique_ptr<ROperator> MakeKerasSigmoid(PyObject *fLayer); // For instantiating ROperator for Keras Sigmoid layer
std::unique_ptr<ROperator> MakeKerasSoftmax(PyObject *fLayer); // For instantiating ROperator for Keras Softmax layer
std::unique_ptr<ROperator> MakeKerasPermute(PyObject *fLayer); // For instantiating ROperator for Keras Permute Layer

// Declaring Internal function for Keras layers which have additional activation attribute
Expand All @@ -59,9 +61,11 @@ const KerasMethodMap mapKerasLayer = {

// For activation layers
{"ReLU", &MakeKerasReLU},
{"LeakyReLU", &MakeKerasLeakyReLU},

// For layers with activation attributes
{"relu", &MakeKerasReLU},
{"leakyRelu", &MakeKerasLeakyReLU},
{"selu", &MakeKerasSelu},
{"sigmoid", &MakeKerasSigmoid}
};
Expand All @@ -75,8 +79,9 @@ const KerasMethodMapWithActivation mapKerasLayerWithActivation = {
/// \brief Adds equivalent ROperator with respect to Keras model layer
/// into the referenced RModel object
///
/// \param[inout] rmodel RModel object, by reference, returned ith the added ROperator
/// \param[in] rmodel RModel object
/// \param[in] fLayer Python Keras layer as a Dictionary object
/// \param[out] RModel object with the added ROperator
///
/// Function adds equivalent ROperator into the referenced RModel object.
/// Keras models can have layers like Dense and Conv which have activation
Expand Down Expand Up @@ -259,6 +264,35 @@ std::unique_ptr<ROperator> MakeKerasReLU(PyObject* fLayer)
}


//////////////////////////////////////////////////////////////////////////////////
/// \brief Prepares a ROperator object for Keras Leaky ReLU activation
///
/// \param[in] fLayer Python Keras layer as a Dictionary object
/// \return Unique pointer to ROperator object
///
/// For instantiating a ROperator_Leaky_Relu object, the names of
/// input & output tensors and the deta-type of the layer are extracted.
std::unique_ptr<ROperator> MakeKerasLeakyReLU(PyObject* fLayer)
{
PyObject* fInputs=PyDict_GetItemString(fLayer,"layerInput");
PyObject* fOutputs=PyDict_GetItemString(fLayer,"layerOutput");

std::string fLayerDType = PyStringAsString(PyDict_GetItemString(fLayer,"layerDType"));
std::string fLayerInputName = PyStringAsString(PyList_GetItem(fInputs,0));
std::string fLayerOutputName = PyStringAsString(PyList_GetItem(fOutputs,0));

std::unique_ptr<ROperator> op;
switch(ConvertStringToType(fLayerDType)){
case ETensorType::FLOAT:
op.reset(new ROperator_Leaky_Relu<float>(fLayerInputName, fLayerOutputName));
break;
default:
throw std::runtime_error("TMVA::SOFIE - Unsupported - Operator Leaky Relu does not yet support input type " + fLayerDType);
}
return op;
}


//////////////////////////////////////////////////////////////////////////////////
/// \brief Prepares a ROperator object for Keras Selu activation
///
Expand Down Expand Up @@ -315,6 +349,34 @@ std::unique_ptr<ROperator> MakeKerasSigmoid(PyObject* fLayer){
}


//////////////////////////////////////////////////////////////////////////////////
/// \brief Prepares a ROperator object for Keras Softmax activation
///
/// \param[in] fLayer Python Keras layer as a Dictionary object
/// \return Unique pointer to ROperator object
///
/// For instantiating a ROperator_Softmax object, the names of
/// input & output tensors and the deta-type of the layer are extracted.
std::unique_ptr<ROperator> MakeKerasSoftmax(PyObject* fLayer){
PyObject* fInputs = PyDict_GetItemString(fLayer,"layerInput");
PyObject* fOutputs = PyDict_GetItemString(fLayer,"layerOutput");

std::string fLayerDType = PyStringAsString(PyDict_GetItemString(fLayer,"layerDType"));
std::string fLayerInputName = PyStringAsString(PyList_GetItem(fInputs,0));
std::string fLayerOutputName = PyStringAsString(PyList_GetItem(fOutputs,0));

std::unique_ptr<ROperator> op;
switch(ConvertStringToType(fLayerDType)){
case ETensorType::FLOAT:
op.reset(new ROperator_Softmax<float>(fLayerInputName, fLayerOutputName));
break;
default:
throw std::runtime_error("TMVA::SOFIE - Unsupported - Operator Softmax does not yet support input type " + fLayerDType);
}
return op;
}


//////////////////////////////////////////////////////////////////////////////////
/// \brief Prepares a ROperator object for Keras Permute layer
///
Expand Down
72 changes: 64 additions & 8 deletions tmva/pymva/src/RModelParser_PyTorch.cxx
Original file line number Diff line number Diff line change
Expand Up @@ -44,12 +44,14 @@ namespace INTERNAL{
// For searching and calling specific preparatory function for PyTorch ONNX Graph's node
std::unique_ptr<ROperator> MakePyTorchNode(PyObject* fNode);

std::unique_ptr<ROperator> MakePyTorchGemm(PyObject* fNode); // For instantiating ROperator for PyTorch ONNX's Gemm operator
std::unique_ptr<ROperator> MakePyTorchConv(PyObject* fNode); // For instantiating ROperator for PyTorch ONNX's Conv operator
std::unique_ptr<ROperator> MakePyTorchRelu(PyObject* fNode); // For instantiating ROperator for PyTorch ONNX's Relu operator
std::unique_ptr<ROperator> MakePyTorchSelu(PyObject* fNode); // For instantiating ROperator for PyTorch ONNX's Selu operator
std::unique_ptr<ROperator> MakePyTorchSigmoid(PyObject* fNode); // For instantiating ROperator for PyTorch ONNX's Sigmoid operator
std::unique_ptr<ROperator> MakePyTorchTranspose(PyObject* fNode); // For instantiating ROperator for PyTorch ONNX's Transpose operator
std::unique_ptr<ROperator> MakePyTorchGemm(PyObject* fNode); // For instantiating ROperator for PyTorch ONNX's Gemm operator
std::unique_ptr<ROperator> MakePyTorchConv(PyObject* fNode); // For instantiating ROperator for PyTorch ONNX's Conv operator
std::unique_ptr<ROperator> MakePyTorchRelu(PyObject* fNode); // For instantiating ROperator for PyTorch ONNX's Relu operator
std::unique_ptr<ROperator> MakePyTorchLeakyRelu(PyObject* fNode); // For instantiating ROperator for PyTorch ONNX's Relu operator
std::unique_ptr<ROperator> MakePyTorchSelu(PyObject* fNode); // For instantiating ROperator for PyTorch ONNX's Selu operator
std::unique_ptr<ROperator> MakePyTorchSigmoid(PyObject* fNode); // For instantiating ROperator for PyTorch ONNX's Sigmoid operator
std::unique_ptr<ROperator> MakePyTorchSoftmax(PyObject* fNode); // For instantiating ROperator for PyTorch ONNX's Softmax operator
std::unique_ptr<ROperator> MakePyTorchTranspose(PyObject* fNode); // For instantiating ROperator for PyTorch ONNX's Transpose operator

// For mapping PyTorch ONNX Graph's Node with the preparatory functions for ROperators
using PyTorchMethodMap = std::unordered_map<std::string, std::unique_ptr<ROperator> (*)(PyObject* fNode)>;
Expand All @@ -59,8 +61,10 @@ const PyTorchMethodMap mapPyTorchNode =
{"onnx::Gemm", &MakePyTorchGemm},
{"onnx::Conv", &MakePyTorchConv},
{"onnx::Relu", &MakePyTorchRelu},
{"onnx::LeakyRelu",&MakePyTorchLeakyRelu},
{"onnx::Selu", &MakePyTorchSelu},
{"onnx::Sigmoid", &MakePyTorchSigmoid},
{"onnx::Softmax", &MakePyTorchSoftmax},
{"onnx::Transpose", &MakePyTorchTranspose}
};

Expand Down Expand Up @@ -175,6 +179,33 @@ std::unique_ptr<ROperator> MakePyTorchRelu(PyObject* fNode){
return op;
}

//////////////////////////////////////////////////////////////////////////////////
/// \brief Prepares a ROperator_Leaky_Relu object
///
/// \param[in] fNode Python PyTorch ONNX Graph node
/// \return Unique pointer to ROperator object
///
/// For instantiating a ROperator_Leaky_Relu object, the names of
/// input & output tensors and the data-type of the Graph node
/// are extracted.
std::unique_ptr<ROperator> MakePyTorchLeakyRelu(PyObject* fNode){
PyObject* fInputs = PyDict_GetItemString(fNode,"nodeInputs");
PyObject* fOutputs = PyDict_GetItemString(fNode,"nodeOutputs");
std::string fNodeDType = PyStringAsString(PyList_GetItem(PyDict_GetItemString(fNode,"nodeDType"),0));
std::string fNameX = PyStringAsString(PyList_GetItem(fInputs,0));
std::string fNameY = PyStringAsString(PyList_GetItem(fOutputs,0));
std::unique_ptr<ROperator> op;
switch(ConvertStringToType(fNodeDType)){
case ETensorType::FLOAT: {
op.reset(new ROperator_Leaky_Relu<float>(fNameX,fNameY));
break;
}
default:
throw std::runtime_error("TMVA::SOFIE - Unsupported - Operator Leaky Relu does not yet support input type " + fNodeDType);
}
return op;
}

//////////////////////////////////////////////////////////////////////////////////
/// \brief Prepares a ROperator_Selu object
///
Expand Down Expand Up @@ -227,6 +258,31 @@ std::unique_ptr<ROperator> MakePyTorchSigmoid(PyObject* fNode){
return op;
}

//////////////////////////////////////////////////////////////////////////////////
/// \brief Prepares a ROperator_Softmax object
///
/// \param[in] fNode Python PyTorch ONNX Graph node
/// \return Unique pointer to ROperator object
///
/// For instantiating a ROperator_Softmax object, the names of
/// input & output tensors and the data-type of the Graph node
/// are extracted.
std::unique_ptr<ROperator> MakePyTorchSoftmax(PyObject* fNode){
PyObject* fInputs = PyDict_GetItemString(fNode,"nodeInputs");
PyObject* fOutputs = PyDict_GetItemString(fNode,"nodeOutputs");
std::string fNodeDType = PyStringAsString(PyList_GetItem(PyDict_GetItemString(fNode,"nodeDType"),0));

std::unique_ptr<ROperator> op;
switch(ConvertStringToType(fNodeDType)){
case ETensorType::FLOAT: {
op.reset(new ROperator_Softmax<float>(PyStringAsString(PyList_GetItem(fInputs,0)), PyStringAsString(PyList_GetItem(fOutputs,0))));
break;
}
default:
throw std::runtime_error("TMVA::SOFIE - Unsupported - Operator Softmax does not yet support input type " + fNodeDType);
}
return op;
}

//////////////////////////////////////////////////////////////////////////////////
/// \brief Prepares a ROperator_Transpose object
Expand Down Expand Up @@ -419,7 +475,7 @@ RModel Parse(std::string filename, std::vector<std::vector<size_t>> inputShapes,

//Getting the ONNX graph from model using the dummy inputs and example outputs
PyRunString("_set_onnx_shape_inference(True)",fGlobalNS,fLocalNS);
PyRunString("graph=_model_to_graph(model,dummyInputs,example_outputs=output)",fGlobalNS,fLocalNS);
PyRunString("graph=_model_to_graph(model,dummyInputs)",fGlobalNS,fLocalNS);


//Extracting the model information in list modelData
Expand Down Expand Up @@ -539,7 +595,7 @@ RModel Parse(std::string filename, std::vector<std::vector<size_t>> inputShapes,
}

//////////////////////////////////////////////////////////////////////////////////
/// \param[in] filepath file location of PyTorch .pt model
/// \param[in] filename file location of PyTorch .pt model
/// \param[in] inputShapes vector of input shape vectors
/// \return Parsed RModel object
///
Expand Down
2 changes: 2 additions & 0 deletions tmva/sofie/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,11 @@ ROOT_STANDARD_LIBRARY_PACKAGE(ROOTTMVASofie
TMVA/ROperator_Conv.hxx
TMVA/ROperator_Gemm.hxx
TMVA/ROperator_Relu.hxx
TMVA/ROperator_Leaky_Relu.hxx
TMVA/ROperator_Reshape.hxx
TMVA/ROperator_Selu.hxx
TMVA/ROperator_Sigmoid.hxx
TMVA/ROperater_Softmax.hxx
TMVA/ROperator_Slice.hxx
TMVA/ROperator_Transpose.hxx
TMVA/ROperator_Pool.hxx
Expand Down
76 changes: 76 additions & 0 deletions tmva/sofie/inc/TMVA/ROperater_Softmax.hxx
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
#ifndef TMVA_SOFIE_ROPERATOR_Softmax
#define TMVA_SOFIE_ROPERATOR_Softmax

#include "TMVA/SOFIE_common.hxx"
#include "TMVA/ROperator.hxx"
#include "TMVA/RModel.hxx"

#include <sstream>

namespace TMVA{
namespace Experimental{
namespace SOFIE{

template <typename T>
class ROperator_Softmax final : public ROperator
{

private:

std::string fNX;
std::string fNY;
std::vector<size_t> fShape;

public:
ROperator_Softmax(){}
ROperator_Softmax(std::string nameX, std::string nameY):
fNX(UTILITY::Clean_name(nameX)), fNY(UTILITY::Clean_name(nameY)){}

std::vector<ETensorType> TypeInference(std::vector<ETensorType> input){
return input;
}

std::vector<std::vector<size_t>> ShapeInference(std::vector<std::vector<size_t>> input){
auto ret = input; //suggest copy to compiler
return ret;
}

void Initialize(RModel& model){
if (model.CheckIfTensorAlreadyExist(fNX) == false){ //input must be a graph input, or already initialized intermediate tensor
throw std::runtime_error("TMVA SOFIE Softmax Op Input Tensor is not found in model");
}
fShape = model.GetTensorShape(fNX);
model.AddIntermediateTensor(fNY, model.GetTensorType(fNX), fShape);
}


std::string Generate(std::string OpName){
OpName = "op_" + OpName;
if (fShape.empty()){
throw std::runtime_error("TMVA SOFIE Transpose Softmax called to Generate without being initialized first");
}
std::stringstream out;
int length = 1;
for(auto& i: fShape){
length *= i;
}
out << "\n//------ SOFTMAX\n";
out << SP << "double sum = 0.0;\n";
out << SP << "for (int id = 0; id < " << length << " ; id++){\n";
out << SP << SP << "tensor_" << fNY << "[id] = std::exp( - tensor_" << fNX << "[id]);\n";
out << SP << SP << "sum += tensor_" << fNY << "[id];\n";
out << SP << "}\n";
out << SP << "for (int id = 0; id < " << length << " ; id++){\n";
out << SP << SP << "tensor_" << fNY << "[id] /= sum;\n";
out << SP << "}\n";
return out.str();
}

};

}//SOFIE
}//Experimental
}//TMVA


#endif //TMVA_SOFIE_ROPERATOR_Softmax
71 changes: 71 additions & 0 deletions tmva/sofie/inc/TMVA/ROperator_Leaky_Relu.hxx
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
#ifndef TMVA_SOFIE_ROPERATOR_LEAKY_RELU
#define TMVA_SOFIE_ROPERATOR_LEAKY_RELU

#include "TMVA/SOFIE_common.hxx"
#include "TMVA/ROperator.hxx"
#include "TMVA/RModel.hxx"

#include <sstream>

namespace TMVA{
namespace Experimental{
namespace SOFIE{

template <typename T>
class ROperator_Leaky_Relu final : public ROperator
{

private:

std::string fNX;
std::string fNY;
std::vector<size_t> fShape;

public:
ROperator_Leaky_Relu(){}
ROperator_Leaky_Relu(std::string nameX, std::string nameY):
fNX(UTILITY::Clean_name(nameX)), fNY(UTILITY::Clean_name(nameY)){}

std::vector<ETensorType> TypeInference(std::vector<ETensorType> input){
return input;
}

std::vector<std::vector<size_t>> ShapeInference(std::vector<std::vector<size_t>> input){
auto ret = input; //suggest copy to compiler
return ret;
}

void Initialize(RModel& model){
if (model.CheckIfTensorAlreadyExist(fNX) == false){ //input must be a graph input, or already initialized intermediate tensor
throw std::runtime_error("TMVA SOFIE Leaky Relu Op Input Tensor is not found in model");
}
fShape = model.GetTensorShape(fNX);
model.AddIntermediateTensor(fNY, model.GetTensorType(fNX), fShape);
}


std::string Generate(std::string OpName){
OpName = "op_" + OpName;
if (fShape.empty()) {
throw std::runtime_error("TMVA SOFIE Transpose Leaky Relu called to Generate without being initialized first");
}
std::stringstream out;
int length = 1;
for(auto& i: fShape){
length *= i;
}
out << "\n//------ LEAKY RELU\n";
out << SP << "for (int id = 0; id < " << length << " ; id++){\n";
out << SP << SP << "tensor_" << fNY << "[id] = ((tensor_" << fNX << "[id] > 0 )? tensor_" << fNX << "[id] : 0.01 * tensor_"<< fNX<<");\n";
out << SP << "}\n";
return out.str();
}

};

}//SOFIE
}//Experimental
}//TMVA


#endif //TMVA_SOFIE_ROPERATOR_RELU
Loading