Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Binary op2 #20

Open
wants to merge 7 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion tmva/sofie/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ ROOT_STANDARD_LIBRARY_PACKAGE(ROOTTMVASofie
TMVA/OperatorList.hxx
TMVA/RModel.hxx
TMVA/ROperator.hxx
TMVA/ROperator_Add.hxx
TMVA/ROperator_BasicBinary.hxx
TMVA/ROperator_BatchNormalization.hxx
TMVA/ROperator_Conv.hxx
TMVA/ROperator_Gemm.hxx
Expand Down
2 changes: 1 addition & 1 deletion tmva/sofie/inc/TMVA/OperatorList.hxx
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
#include "TMVA/ROperator_LSTM.hxx"
#include "TMVA/ROperator_BatchNormalization.hxx"
#include "TMVA/ROperator_Pool.hxx"
#include "TMVA/ROperator_Add.hxx"
#include "TMVA/ROperator_BasicBinary.hxx"
#include "TMVA/ROperator_Reshape.hxx"
#include "TMVA/ROperator_Slice.hxx"
#include "TMVA/ROperator_GRU.hxx"
88 changes: 0 additions & 88 deletions tmva/sofie/inc/TMVA/ROperator_Add.hxx

This file was deleted.

122 changes: 122 additions & 0 deletions tmva/sofie/inc/TMVA/ROperator_BasicBinary.hxx
Original file line number Diff line number Diff line change
@@ -0,0 +1,122 @@
#ifndef TMVA_SOFIE_ROperator_BasicBinary
#define TMVA_SOFIE_ROperator_BasicBinary

#include "TMVA/SOFIE_common.hxx"
#include "TMVA/ROperator.hxx"
#include "TMVA/RModel.hxx"

#include <sstream>

namespace TMVA{
namespace Experimental{
namespace SOFIE{

enum EBasicBinaryOperator { Add, Sub, Mul, Div };

template <typename T, EBasicBinaryOperator Op1>
struct BinaryOperatorTrait {
const char *Name() { return ""; }
const char *Op() { return ""; }
};
template <typename T>
struct BinaryOperatorTrait<T, Add> {
static const char *Name() { return "Add"; }
static const char *Op() { return "+"; }
};

template <typename T>
struct BinaryOperatorTrait<T, Sub> {
static const char *Name() { return "Sub"; }
static const char *Op() { return "-"; }
};

template <typename T>
struct BinaryOperatorTrait<T, Mul> {
static const char *Name() { return "Mul"; }
static const char *Op() { return "*"; }
};

template <typename T>
struct BinaryOperatorTrait<T, Div> {
static const char *Name() { return "Div"; }
static const char *Op() { return "/"; }
};

template<typename T, EBasicBinaryOperator Op>
class ROperator_BasicBinary final : public ROperator{
private:

std::string fNX1;
std::string fNX2;
std::string fNY;
std::vector<size_t> fShape;

// template <typename T, EBasicBinaryOperator Op1>
// BinaryOperatorTrait<T,Op1> *s;

public:
ROperator_BasicBinary(){}
ROperator_BasicBinary(std::string nameX1, std::string nameX2, std::string nameY):
fNX1(UTILITY::Clean_name(nameX1)), fNX2(UTILITY::Clean_name(nameX2)), fNY(UTILITY::Clean_name(nameY)){}

// type of output given input
std::vector<ETensorType> TypeInference(std::vector<ETensorType> input){
return input;
}

// shape of output tensors given input tensors
std::vector<std::vector<size_t>> ShapeInference(std::vector<std::vector<size_t>> input){
// assume now inputs have same shape (no broadcasting)
auto ret = std::vector<std::vector<size_t>>(1, input[0]); // return vector size 1 with first input
return ret;
}

void Initialize(RModel& model){
// input must be a graph input, or already initialized intermediate tensor
if (model.CheckIfTensorAlreadyExist(fNX1) == false){
throw std::runtime_error(std::string("TMVA SOFIE Binary Op Input Tensor ") + fNX1 + "is not found in model");
}
if (model.CheckIfTensorAlreadyExist(fNX2) == false) {
throw std::runtime_error(std::string("TMVA SOFIE Binary Op Input Tensor ") + fNX2 + "is not found in model");
}
auto shapeX1 = model.GetTensorShape(fNX1);
auto shapeX2 = model.GetTensorShape(fNX2);
// assume same shape X1 and X2
if (shapeX1 != shapeX2) {
fShape = UTILITY::Multidirectional_broadcast(shapeX1,shapeX2);
}
else if(shapeX1 == shapeX2){
fShape = shapeX1;
}
model.AddIntermediateTensor(fNY, model.GetTensorType(fNX1), fShape);
}


std::string Generate(std::string OpName){
OpName = "op_" + OpName;

if (fShape.empty()) {
throw std::runtime_error("TMVA SOFIE Binary Op called to Generate without being initialized first");
}
std::stringstream out;
// int length = 1;
// for(auto& i: fShape){
// length *= i;
// }
size_t length = ConvertShapeToLength(fShape);
out << "\n//------ " + std::string(BinaryOperatorTrait<T,Op>::Name())+"\n";
out << SP << "for (size_t id = 0; id < " << length << " ; id++){\n";
out << SP << SP << "tensor_" << fNY << "[id] = tensor_" << fNX1 << "[id]" +
std::string(BinaryOperatorTrait<T,Op>::Op()) + "tensor_" << fNX2 << "[id];\n";
out << SP << "}\n";
return out.str();
}

};

}//SOFIE
}//Experimental
}//TMVA


#endif //TMVA_SOFIE_ROperator_BasicBinary
1 change: 1 addition & 0 deletions tmva/sofie/inc/TMVA/SOFIE_common.hxx
Original file line number Diff line number Diff line change
Expand Up @@ -105,6 +105,7 @@ ETensorType GetTemplatedType(T /*obj*/ ){
namespace UTILITY{
template<typename T>
T* Unidirectional_broadcast(const T* original_data, const std::vector<size_t> original_shape, const std::vector<size_t> target_shape);
std::vector<size_t> Multidirectional_broadcast(const std::vector<size_t> input1_shape, const std::vector<size_t> input2_shape);
std::string Clean_name(std::string input_tensor_name);


Expand Down
45 changes: 44 additions & 1 deletion tmva/sofie/src/SOFIE_common.cxx
Original file line number Diff line number Diff line change
Expand Up @@ -135,6 +135,49 @@ T* UTILITY::Unidirectional_broadcast(const T* original_data, const std::vector<s
return new_datavector;
}



std::vector<size_t> UTILITY::Multidirectional_broadcast(std::vector<size_t> input1_shape, std::vector<size_t> input2_shape)
{
std::vector<size_t> input_shape = (input1_shape.size() > input2_shape.size())?input1_shape:input2_shape;
std::vector<size_t> output_shape(input_shape);

if(input1_shape.size() < input2_shape.size()){
// Check if input1_shape.size() < input2_shape.size() we insert in the shape vector values of 1 at the beginning of the tensor until input1_shape.size() == input2_shape.size()
auto it = input1_shape.begin();
while (input1_shape.size() < input2_shape.size()) {
it = input1_shape.insert(it, 1);
}
}
else if(input2_shape.size() < input1_shape.size()){
// Check if input2_shape.size() < input1_shape.size() we insert in the shape vector values of 1 at the beginning of the tensor until input1_shape.size() == input2_shape.size()
auto it = input2_shape.begin();
while (input2_shape.size() < input1_shape.size()) {
it = input2_shape.insert(it, 1);
}
}
//check if both the input have same shape, nothing to do directly return the output_shape as the same shape.
if(input1_shape.size() == input2_shape.size()){
if(input1_shape != input2_shape){
//Check the shape values, if input1[i] not equal to input2[i] we have the result shape equal to input1[i] if input2[i] = 1 or viceversa
for(size_t j = 0; j < input1_shape.size() ; j++){
if(input1_shape[j] == input2_shape[j]){
output_shape[j] = input1_shape[j];
}
else if(input1_shape[j] > input2_shape[j] && input2_shape[j] == 1){
output_shape[j] = input1_shape[j];
}
else if(input2_shape[j] > input1_shape[j] && input1_shape[j] == 1){
output_shape[j] = input2_shape[j];
}
}
}

}
return output_shape;

}

std::string UTILITY::Clean_name(std::string input_tensor_name){
std::string s (input_tensor_name);
s.erase(std::remove_if(s.begin(), s.end(), []( char const& c ) -> bool { return !std::isalnum(c); } ), s.end());
Expand All @@ -145,4 +188,4 @@ template float* UTILITY::Unidirectional_broadcast(const float* original_data, co

}//SOFIE
}//Experimental
}//TMVA
}//TMVA
Loading