1#ifndef TMVA_SOFIE_ROPERATOR_Custom 
    2#define TMVA_SOFIE_ROPERATOR_Custom 
   10namespace Experimental{
 
   43    std::vector<std::vector<size_t>> 
ShapeInference(std::vector<std::vector<size_t>>)
 override {
return {{}};};
 
   44    std::vector<ETensorType> 
TypeInference(std::vector<ETensorType>)
 override { 
return {};};
 
   52         throw std::runtime_error(
"TMVA SOFIE Custom " + 
fOpName + 
" Op Input Tensor " + it + 
" is not found in model");
 
   58        throw std::runtime_error(
"TMVA SOFIE Custom "+ 
fOpName + 
" Op was not intialized with the names/shapes of all the output tensors");
 
   69         std::cout << 
"Custom operator using " << 
fHeaderName;
 
   71         std::cout << 
" ---> ";
 
 
   80      std::stringstream out;
 
   81      out << 
"\n//------ "<<
fOpName<<
" \n";
 
   83      for(
long unsigned int i = 0; i<
fInputNames.size(); ++i){
 
   87      for(
long unsigned int i = 0; i<
fOutputNames.size(); ++i){
 
   91      out << 
SP << 
fOpName<<
"::Compute("+args+
");\n";
 
 
 
ROOT::Detail::TRangeCast< T, true > TRangeDynCast
TRangeDynCast is an adapter class that allows the typed iteration through a TCollection.
void AddNeededCustomHeader(std::string filename)
void AddIntermediateTensor(std::string tensor_name, ETensorType type, std::vector< Dim > dim_shape)
bool CheckIfTensorAlreadyExist(std::string tensor_name)
const ETensorType & GetTensorType(std::string name) const
const std::vector< size_t > & GetTensorShape(std::string name) const
void UpdateOutputTensorList(std::vector< std::string > curr_output_tensor, std::vector< std::string > modify_output_tensor)
ROperator_Custom(std::string OpName, std::vector< std::string >Inputs, std::vector< std::string >Outputs, std::vector< std::vector< std::size_t > > OutputShapes, std::string HeaderName)
std::vector< std::string > fOutputNames
void Initialize(RModel &model) override
std::vector< std::vector< size_t > > ShapeInference(std::vector< std::vector< size_t > >) override
std::vector< std::size_t > fInputSizes
std::vector< ETensorType > TypeInference(std::vector< ETensorType >) override
std::vector< std::vector< std::size_t > > fOutputShapes
std::string Generate(std::string OpName) override
std::vector< std::string > fInputNames
std::vector< std::string_view > fInputTensorNames
const std::string SP
space used to correctly indent the generated C++ code
std::vector< std::string_view > fOutputTensorNames
std::string Clean_name(std::string input_tensor_name)
std::string ConvertTypeToString(ETensorType type)
std::size_t ConvertShapeToLength(std::vector< size_t > shape)
create variable transformations