1#ifndef TMVA_SOFIE_ROPERATOR_LeakyRelu
2#define TMVA_SOFIE_ROPERATOR_LeakyRelu
32 if(std::is_same<T, float>::value){
37 std::runtime_error(
"TMVA SOFIE Encountered unsupported type parsing a Leaky Relu operator");
44 std::vector<ETensorType>
TypeInference(std::vector<ETensorType> input)
override {
48 std::vector<std::vector<size_t>>
ShapeInference(std::vector<std::vector<size_t>> input)
override {
54 if (model.CheckIfTensorAlreadyExist(
fNX) ==
false){
55 throw std::runtime_error(
"TMVA SOFIE Leaky Relu Op Input Tensor is not found in model");
58 model.AddIntermediateTensor(
fNY, model.GetTensorType(
fNX),
fShape);
62 std::string
Generate(std::string OpName)
override {
63 OpName =
"op_" + OpName;
65 throw std::runtime_error(
"TMVA SOFIE Operator Leaky Relu called to Generate without being initialized first");
67 std::stringstream out;
70 out <<
SP <<
"constexpr float " << OpName <<
"_alpha = " << std::setprecision(std::numeric_limits<float>::max_digits10) <<
falpha <<
";\n";
72 out <<
"\n//------ LEAKY RELU\n";
73 out <<
SP <<
"for (int id = 0; id < " << length <<
" ; id++){\n";
74 out <<
SP <<
SP <<
"tensor_" <<
fNY <<
"[id] = ((tensor_" <<
fNX <<
"[id] >= 0 )? tensor_" <<
fNX <<
"[id] : "<< OpName <<
"_alpha * tensor_"<<
fNX<<
"[id]);\n";
std::string Generate(std::string OpName) override
std::vector< ETensorType > TypeInference(std::vector< ETensorType > input) override
std::vector< std::vector< size_t > > ShapeInference(std::vector< std::vector< size_t > > input) override
void Initialize(RModel &model) override
ROperator_LeakyRelu(float alpha, std::string nameX, std::string nameY)
std::vector< size_t > fShape
std::vector< std::string_view > fInputTensorNames
const std::string SP
space used to correctly indent the generated C++ code
std::vector< std::string_view > fOutputTensorNames
std::size_t ConvertShapeToLength(const std::vector< size_t > &shape)
create variable transformations