Logo ROOT  
Reference Guide
 
Loading...
Searching...
No Matches
SOFIE_common.cxx
Go to the documentation of this file.
2#include<cctype>
3#include <sstream>
4
5namespace TMVA{
6namespace Experimental{
7namespace SOFIE{
8
9std::vector<Dim> ConvertShapeToDim(std::vector<size_t> shape){
10 std::vector<Dim> fshape(shape.size());
11 for (size_t i =0; i < shape.size(); i++){
12 fshape[i].dim = shape[i];
13 }
14 return fshape;
15}
16
17std::size_t ConvertShapeToLength(std::vector<size_t> shape){
18 std::size_t fLength = 1;
19 for (auto& dim: shape) fLength *= dim;
20 return fLength;
21}
22
24 switch(type){
25 case ETensorType::FLOAT : {
26 return "float";
27 }
28 default:{
29 return "other";
30 }
31 }
32}
33
35 if(type == "float32" || type == "Float"){
36 return ETensorType::FLOAT;
37 }
38 else{
40 }
41}
42
43std::string ConvertShapeToString(std::vector<size_t> shape) {
44 std::stringstream out;
45 out << "{ ";
46 for (size_t i = 0; i < shape.size(); i++) {
47 out << shape[i];
48 if (i < shape.size()-1) out << " , ";
49 }
50 out << " }";
51 return out.str();
52}
53
54namespace{
55template<typename T>
56static inline void copy_vector_data(int_t no_of_copies, int_t input_size, T* input, T* target){ //only visible within this translation unit
57 std::memcpy(target, input, input_size * sizeof(T));
58 int_t already_copied = 1;
59
60 while (already_copied * 2 <= no_of_copies){
61 std::memcpy(target + already_copied * input_size, target, already_copied * input_size * sizeof(T));
62 already_copied *= 2;
63 }
64
65 if (already_copied < no_of_copies){
66 std::memcpy(target + already_copied * input_size, target, (no_of_copies - already_copied) * input_size * sizeof(T));
67 }
68}
69}
70
71template <typename T>
72T* UTILITY::Unidirectional_broadcast(const T* original_data, const std::vector<size_t> original_shape, const std::vector<size_t> target_shape)
73{
74
75 int original_length = 1;
76 int target_length = 1;
77 for (size_t i = 0; i < original_shape.size(); i++){
78 original_length *= original_shape[i];
79 }
80 for (size_t i = 0; i < target_shape.size(); i++){
81 target_length *= target_shape[i];
82 }
83 if (original_shape.size() > target_shape.size()) {
84 std::string targetShape = "target : " + ConvertShapeToString(target_shape);
85 std::string originalShape = "original : " + ConvertShapeToString(original_shape);
86 throw std::runtime_error(
87 "TMVA::SOFIE Error in Broadcasting Tensor : original array has more dimensions than target shape," + originalShape + ", " + targetShape);
88 }
89 // if shape's sizes are different prepend 1 to get tensor with same shape size
90 // since the broadcast is unidirectional we can only prepend
91 std::vector<size_t> current_shape(original_shape);
92 auto it = current_shape.begin();
93 while (current_shape.size() < target_shape.size()) {
94 it = current_shape.insert(it, 1);
95 }
96 // this code below will work
97 // when shape are not equal e.g. (3,4,5,6) and (3) and we add 1 in all missing positions
98 // since broadcasting is uni-directional we do not use it
99 // std::vector<size_t> current_shape(target_shape.size(),1);
100 // for (size_t i = 0; i < original_shape.size(); i++) {
101 // for (size_t j = 0; j < target_shape.size(); j++) {
102 // if (target_shape[j] == original_shape[i])
103 // current_shape[j] = original_shape[i];
104 // }
105 // }
106
107 T* new_datavector = new T[target_length];
108 std::memcpy(new_datavector, original_data, original_length * sizeof(T));
109
110 for (int dim = (int) target_shape.size() - 1; dim >= 0; dim--){
111 if (current_shape[dim] != target_shape[dim]){
112 if (current_shape[dim] != 1) {
113 std::string targetShape = "target : " + ConvertShapeToString(target_shape);
114 std::string originalShape = "original : " + ConvertShapeToString(current_shape);
115 throw std::runtime_error ("TMVA::SOFIE Error in Broadcasting Tensor at least one dimension to be broadcast of the original array is not 1, " + originalShape + ", " + targetShape);
116 }
117 int_t group_size = 1;
118 int_t no_of_groups = 1;
119 int_t no_of_copies = target_shape[dim];
120
121 for (size_t i = dim + 1; i < target_shape.size(); i++){
122 group_size *= current_shape[i];
123 }
124 for (int i = 0; i < dim; i++){
125 no_of_groups *= current_shape[i];
126 }
127
128 for (int curr_group = no_of_groups - 1; curr_group >= 0; curr_group--){
129 copy_vector_data<T>(no_of_copies, group_size, new_datavector + curr_group * group_size,new_datavector + curr_group * group_size * no_of_copies);
130 }
131
132 current_shape[dim] = target_shape[dim];
133 }
134 }
135 return new_datavector;
136}
137
138std::string UTILITY::Clean_name(std::string input_tensor_name){
139 std::string s (input_tensor_name);
140 s.erase(std::remove_if(s.begin(), s.end(), []( char const& c ) -> bool { return !std::isalnum(c); } ), s.end());
141 return s;
142}
143
144template float* UTILITY::Unidirectional_broadcast(const float* original_data, const std::vector<size_t> original_shape, const std::vector<size_t> target_shape);
145
146}//SOFIE
147}//Experimental
148}//TMVA
#define c(i)
Definition RSha256.hxx:101
int type
Definition TGX11.cxx:121
std::string Clean_name(std::string input_tensor_name)
T * Unidirectional_broadcast(const T *original_data, const std::vector< size_t > original_shape, const std::vector< size_t > target_shape)
std::vector< Dim > ConvertShapeToDim(std::vector< size_t > shape)
std::string ConvertShapeToString(std::vector< size_t > shape)
std::string ConvertTypeToString(ETensorType type)
ETensorType ConvertStringToType(std::string type)
std::size_t ConvertShapeToLength(std::vector< size_t > shape)
create variable transformations