Logo ROOT  
Reference Guide
 
Loading...
Searching...
No Matches
SOFIE_common.cxx
Go to the documentation of this file.
2
3#include <cctype>
4#include <sstream>
5#include <stdexcept>
6#include <charconv>
7
8namespace TMVA {
9namespace Experimental {
10namespace SOFIE {
11
12/// @brief Convert shape from integer format to dynamic one (based on Dim)
13/// @param shape
14/// @return shape based on Dim
15std::vector<Dim> ConvertShapeToDim(const std::vector<size_t> & shape){
16 std::vector<Dim> ret_shape(shape.size());
17 for (size_t i =0; i < shape.size(); i++){
18 ret_shape[i].dim = shape[i];
19 }
20 return ret_shape;
21}
22
23/// @brief Convert shape based on Dim to integer format
24/// @param shape
25/// @return shape based on integer. Return an empty shape in case shape is dynamic (has a parameter)
26std::vector<size_t> ConvertShapeToInt(const std::vector<Dim> & shape){
27 std::vector<size_t> ret_shape(shape.size());
28 for (size_t i =0; i < shape.size(); i++){
29 if (shape[i].isParam) {
30 // try converting to integer in case string is a number >=0
31 int val = -1;
32 try {
33 val = std::stoi(shape[i].param);
34 if (val >= 0) ret_shape[i] = static_cast<size_t>(val);
35 else {
36 ret_shape.clear();
37 break;
38 }
39 }
40 catch (const std::invalid_argument& ) {
41 ret_shape.clear();
42 break;
43 }
44 } else {
45 ret_shape[i] = shape[i].dim;
46 }
47 }
48 return ret_shape;
49}
50
51
52std::size_t ConvertShapeToLength(const std::vector<size_t> & shape){
53 // Empty shape represent scalar values, so we return a length=1
54 std::size_t fLength = 1;
55 for (auto& dim: shape) fLength *= dim;
56 return fLength;
57}
58
60 switch(type){
61 case ETensorType::FLOAT : {
62 return "float";
63 }
64 case ETensorType::INT8 : {
65 return "int8_t";
66 }
67 case ETensorType::INT16 : {
68 return "int16_t";
69 }
70 case ETensorType::INT32 : {
71 return "int32_t";
72 }
73 case ETensorType::INT64 : {
74 return "int64_t";
75 }
76 case ETensorType::UINT8 : {
77 return "uint8_t";
78 }
79 case ETensorType::UINT16 : {
80 return "uint16_t";
81 }
82 case ETensorType::UINT32 : {
83 return "uint32_t";
84 }
85 case ETensorType::UINT64 : {
86 return "uint64_t";
87 }
88 case ETensorType::DOUBLE : {
89 return "double";
90 }
91 case ETensorType::BOOL : {
92 return "bool";
93 }
94 default:{
95 return "other_" + std::to_string( (int) type);
96 }
97 }
98}
99
101 if(type == "float32" || type == "float" || type == "Float"){
102 return ETensorType::FLOAT;
103 }
104 else if(type == "int64" || type == "int64_t"){
105 return ETensorType::INT64;
106 }
107 else if (type == "double" || type == "float64"){
108 return ETensorType::DOUBLE;
109 }
110 else if (type == "bool" ){
111 return ETensorType::BOOL;
112 }
113 else{
115 }
116}
117
118std::string ConvertShapeToString(const std::vector<size_t> & shape) {
119 std::stringstream out;
120 out << "{ ";
121 for (size_t i = 0; i < shape.size(); i++) {
122 out << shape[i];
123 if (i < shape.size()-1) out << " , ";
124 }
125 out << " }";
126 return out.str();
127}
128
129std::string ConvertDimShapeToString(const std::vector<Dim> & shape) {
130 std::stringstream out;
131 out << "{ ";
132 for (size_t i = 0; i < shape.size(); i++) {
133 out << shape[i].GetVal();
134 if (i < shape.size()-1) out << " , ";
135 }
136 out << " }";
137 return out.str();
138}
139
140std::string ConvertDimShapeToLength(const std::vector<Dim> & shape) {
141 // convert generic shape to a string
142 // multiply all the integer specified dimensions of the shape
143 std::string length;
144 // case of empty vectors return 1
145 if (shape.empty()) return "1";
146 size_t int_length = 0;
147 for (size_t i = 0; i < shape.size(); i++) {
148 if (shape[i].isParam) {
149 if (!length.empty()) length += " * ";
150 length += shape[i].param;
151 } else {
152 if (int_length == 0)
153 int_length = shape[i].dim;
154 else
155 int_length *= shape[i].dim;
156 }
157 }
158 // multiply the integer components to the parametric one
159 // if larger than 1
160 if (int_length > 0) {
161 if (!length.empty() && int_length > 1) {
162 length += " * ";
163 length += std::to_string(int_length);
164 } else if (length.empty()) { // case is full known shape
165 length = std::to_string(int_length);
166 }
167 }
168 return length;
169}
170std::string ConvertShapeToString(const std::vector<Dim> & shape) {
171 return ConvertDimShapeToString(shape);
172}
173std::string ConvertDynamicShapeToLength(const std::vector<Dim> & shape) {
174 return ConvertDimShapeToLength(shape);
175}
176
177
178namespace{
179template<typename T>
180static inline void copy_vector_data(int_t no_of_copies, int_t input_size, T* input, T* target){ //only visible within this translation unit
181 std::memcpy(target, input, input_size * sizeof(T));
183
184 while (already_copied * 2 <= no_of_copies){
185 std::memcpy(target + already_copied * input_size, target, already_copied * input_size * sizeof(T));
186 already_copied *= 2;
187 }
188
190 std::memcpy(target + already_copied * input_size, target, (no_of_copies - already_copied) * input_size * sizeof(T));
191 }
192}
193}
194
195bool IsInteger(const std::string & s) {
196 int value;
197 auto [ptr, ec] = std::from_chars(s.data(), s.data() + s.size(), value);
198 return ec == std::errc() && ptr == s.data() + s.size();
199}
200
201bool UTILITY::AreSameShape(const std::vector<size_t>& shapeA, const std::vector<size_t>& shapeB) {
202 if (shapeA.size() != shapeB.size()) {
203 return false;
204 }
205 for (size_t dim = 0; dim < shapeA.size(); dim++) {
206 if (shapeA[dim] != shapeB[dim]) {
207 return false;
208 }
209 }
210 return true;
211}
212bool UTILITY::AreSameShape(const std::vector<size_t>& shapeA, const std::vector<Dim>& shapeB) {
213 if (shapeA.size() != shapeB.size()) {
214 return false;
215 }
216 for (size_t dim = 0; dim < shapeA.size(); dim++) {
217 if (shapeB[dim].isParam) return false;
218 if (shapeA[dim] != shapeB[dim].dim) {
219 return false;
220 }
221 }
222 return true;
223}
224bool UTILITY::AreSameShape(const std::vector<Dim>& shapeA, const std::vector<Dim>& shapeB) {
225 if (shapeA.size() != shapeB.size()) {
226 return false;
227 }
228 for (size_t dim = 0; dim < shapeA.size(); dim++) {
229 if (shapeA[dim].GetVal() != shapeB[dim].GetVal()) {
230 return false;
231 }
232 }
233 return true;
234}
235
236std::vector<size_t> UTILITY::MultidirectionalBroadcastShape(std::vector<std::vector<size_t>> shape)
237{
238 if (shape.size() < 2) {
239 throw
240 std::runtime_error("TMVA::SOFIE - MultidirectionalBroadcastShape requires at least 2 input shapes.");
241 }
242 // Number of input shapes to broadcast
243 size_t n = shape.size();
244 // Size of the output shape
245 size_t targetSize = shape[0].size();
246 for (size_t i = 1; i < n; i++) {
247 targetSize = std::max(targetSize, shape[i].size());
248 }
249 // Check if they have the same size
250 bool sameSize = true;
251 for (size_t i = 0; i < n; i++) {
252 if (shape[i].size() != targetSize) {
253 sameSize = false;
254 break;
255 }
256 }
257 if (sameSize) {
258 // Check if they have the same shape
259 bool sameShape = true;
260 for (size_t i = 1; i < n; i++) {
261 for (size_t dim = 0; dim < shape[0].size(); dim++) {
262 if (shape[i][dim] != shape[0][dim]) {
263 sameShape = false;
264 break;
265 }
266 }
267 if (!sameShape) {
268 break;
269 }
270 }
271 if (sameShape) {
272 return shape[0];
273 } else {
274 // Set the target shape
275 std::vector<size_t> targetShape(targetSize, 1);
276 for (size_t i = 0; i < n; i++) {
277 for (size_t dim = 0; dim < targetSize; dim++) {
278 targetShape[dim] = std::max(targetShape[dim], shape[i][dim]);
279 }
280 }
281 // Check if the input shapes are broadcastable to targetShape
282 bool broadcastable = true;
283 for (size_t i = 0; i < n; i++) {
284 for (size_t dim = 0; dim < targetSize; dim++) {
285 if (shape[i][dim] != 1 && targetShape[dim] != 1 && shape[i][dim] != targetShape[dim]) {
286 broadcastable = false;
287 break;
288 }
289 if (!broadcastable) {
290 break;
291 }
292 }
293 }
294 // They have the same shape and they are broadcastable to targetShape
295 if (broadcastable) {
296 return targetShape;
297 } else {
298 std::stringstream ss;
299 ss << "TMVA::SOFIE - Error multidirectional broadcasting shapes ";
300 for (size_t i = 0; i < n; i++) {
301 ss << ConvertShapeToString(shape[i]);
302 if (n > 2 && i < n - 2) {
303 ss << ", ";
304 } else if ( n >=2 && i == n - 2) {
305 ss << " and ";
306 }
307 }
308 ss << " to the same shape.";
309 throw
310 std::runtime_error(ss.str());
311 }
312 } // end sameShape
313 } // end sameSize
314 // Prepend the ith shape with ones
315 for (size_t i = 0; i < n; i++) {
316 if (shape[i].size() < targetSize) {
317 std::vector<size_t> newShape(targetSize, 1);
318 size_t offset = targetSize - shape[i].size();
319 std::copy(shape[i].begin(), shape[i].end(), newShape.begin() + offset);
320 shape[i] = newShape;
321 }
322 }
323 // Set the target shape
324 std::vector<size_t> targetShape(targetSize, 1);
325 for (size_t i = 0; i < n; i++) {
326 for (size_t dim = 0; dim < targetSize; dim++) {
327 targetShape[dim] = std::max(targetShape[dim], shape[i][dim]);
328 }
329 }
330 // Check if the shapes are broadcastable to targetShape
331 bool broadcastable = true;
332 for (size_t i = 0; i < n; i++) {
333 for (size_t dim = 0; dim < targetSize; dim++) {
334 if (shape[i][dim] != targetShape[dim] && shape[i][dim] != 1 && targetShape[dim] != 1) {
335 broadcastable = false;
336 break;
337 }
338 }
339 if (!broadcastable) {
340 break;
341 }
342 }
343 if (broadcastable) {
344 return targetShape;
345 } else {
346 std::stringstream ss;
347 ss << "TMVA::SOFIE - Error multidirectional broadcasting shapes ";
348 for (size_t i = 0; i < n; i++) {
349 ss << ConvertShapeToString(shape[i]);
350 if (n > 2 && i < n - 2) {
351 ss << ", ";
352 } else if ( n >=2 && i == n - 2) {
353 ss << " and ";
354 }
355 }
356 ss << " to the same shape.";
357 throw
358 std::runtime_error(ss.str());
359 }
360}
361
362// check multi-directional broadcasting of two shapes (need to pass inputs by non const ref. since we might prepends with one's
363// return a pair of integer flag and new broadcasted shape
364// if flag = 0: shape are identical
365// flag = 1: return shape is equal to A, we broadcast B
366// flag = 2: return shape is equal to B we broadcast A
367// flag = 3: return shape is common of two we broadcast A and B to output
368std::pair<int, std::vector<size_t>> UTILITY::MultidirectionalBroadcastShape(std::vector<size_t> & shapeA, std::vector<size_t> & shapeB)
369{
370 size_t sizeA = shapeA.size();
371 size_t sizeB = shapeB.size();
372 // Check if A and B have the same shape
374 return std::make_pair(0, shapeA);
375 }
376 // Find the common shape of A and B
377 size_t size = std::max(sizeA, sizeB);
378 if (sizeA < size) {
379 // prepend 1's in A to make of same shape as B
380 std::vector<size_t> newShapeA(size, 1);
381 size_t offset = size - sizeA;
382 std::copy(shapeA.begin(), shapeA.end(), newShapeA.begin() + offset);
383 shapeA = std::move(newShapeA);
384 }
385 if (sizeB < size) {
386 std::vector<size_t> newShapeB(size, 1);
387 size_t offset = size - sizeB;
388 std::copy(shapeB.begin(), shapeB.end(), newShapeB.begin() + offset);
389 shapeB = std::move(newShapeB);
390 }
391 bool broadcastable = true;
392 for (size_t i = 0; i < size; i++) {
393 if (shapeA[i] != shapeB[i] && shapeA[i] != 1 && shapeB[i] != 1) {
394 broadcastable = false;
395 break;
396 }
397 }
398 int broadcastFlag = 0;
399 if (broadcastable) {
400 // The output shape is max(outShape, targetShape)
401 std::vector<size_t> targetShape(size, 1);
402 for (size_t i = 0; i < size; i++) {
403 targetShape[i] = std::max(shapeA[i], shapeB[i]);
404 if (shapeB[i] < targetShape[i]) broadcastFlag |= 1;
405 if (shapeA[i] < targetShape[i]) broadcastFlag |= 2;
406 }
407 return std::make_pair(broadcastFlag, targetShape);
408 } else {
409 throw
410 std::runtime_error("TMVA::SOFIE - Error multidirectional broadcasting tensors of shape "
412 + " to a common shape.");
413 }
414}
415// unidirectional broadcast- only B changes
416std::vector<size_t> UTILITY::UnidirectionalBroadcastShape(std::vector<size_t> & shapeA, std::vector<size_t> & shapeB)
417{
419 if (ret.first > 1) {
420 std::runtime_error("TMVA::SOFIE - Error unidirectional broadcasting tensors of shape "
422 + " to a common shape.");
423 }
424 return ret.second;
425}
426
427// for broadcasting Dim shapes
428// flag indicates also which vector needs to be broadcasted
429// flag & 1 == 1 : broadcast B -> A
430// flag & 2 == 2 : broadcast A -> B
431// flag & 4 == 4 a run time check is needed on shapes with values
432std::pair<int, std::vector<Dim>> UTILITY::MultidirectionalBroadcastShape(std::vector<Dim> & shapeA, std::vector<Dim> & shapeB) {
433 size_t sizeA = shapeA.size();
434 size_t sizeB = shapeB.size();
435 // Check if A and B have the same shape
437 return std::make_pair(0, shapeA);
438 }
439 // Find the common shape of A and B
440 size_t size = std::max(sizeA, sizeB);
441 if (sizeA < size) {
442 // prepend 1's in A to make of same shape as B
443 std::vector<Dim> newShapeA(size, Dim{1});
444 size_t offset = size - sizeA;
445 std::copy(shapeA.begin(), shapeA.end(), newShapeA.begin() + offset);
446 shapeA = std::move(newShapeA);
447 }
448 if (sizeB < size) {
449 std::vector<Dim> newShapeB(size, Dim{1});
450 size_t offset = size - sizeB;
451 std::copy(shapeB.begin(), shapeB.end(), newShapeB.begin() + offset);
452 shapeB = std::move(newShapeB);
453 }
454
455 int broadcastFlag = 0;
456 // The output shape is targetShape
457 std::vector<Dim> targetShape(size);
458 for (size_t i = 0; i < size; i++) {
459 // assume we broadcast to the parametric value
460 if (shapeA[i] == shapeB[i]) {
461 targetShape[i] = shapeA[i];
462 } else if (shapeA[i].isParam && shapeB[i].GetVal() == "1" ) {
463 // broadcast B to A (case A is parametric with )
464 targetShape[i] = shapeA[i];
465 broadcastFlag |= 1;
466 } else if (shapeA[i].GetVal() == "1" && shapeB[i].isParam) {
467 // broadcast A to B
468 targetShape[i] = shapeB[i];
469 broadcastFlag |= 2;
470 } else if (!shapeA[i].isParam && !shapeB[i].isParam) {
471 if (shapeB[i].dim == 1) {
472 targetShape[i] = shapeA[i];
473 broadcastFlag |= 1;
474 } else if (shapeA[i].dim == 1) {
475 targetShape[i] = shapeB[i];
476 broadcastFlag |= 2;
477 } else {
478 // non broadcastable case cannot have A and B two different defined shapes different than one
479 broadcastFlag = -1;
480 }
481 } else if (shapeA[i].isParam && shapeB[i].isParam) {
482 // full dynamic case - we will decided at run time
483 std::stringstream s;
484 s << "std::max(" << shapeA[i] << "," << shapeB[i] << ")";
485 // use -1 for dim to indicate is an expression
486 targetShape[i] = Dim { s.str() , static_cast<size_t>(-1)};
487 broadcastFlag |= 4;
488 } else if (shapeA[i].isParam && !shapeB[i].isParam) {
489 // A -> B need to check at run time if consistent
490 targetShape[i] = shapeB[i];
491 broadcastFlag |= 6;
492 } else if (!shapeA[i].isParam && shapeB[i].isParam) {
493 // B -> A need to check at run time if consistent
494 targetShape[i] = shapeA[i];
495 broadcastFlag |= 5;
496 } else {
497 // all cases should be covered
498 throw std::runtime_error("TMVA::SOFIE - Fatal error in MultiDirectionalBroadCastDimShape");
499 }
500 }
501 if (broadcastFlag == -1) {
502 throw std::runtime_error("TMVA::SOFIE - Error multidirectional broadcasting tensors of shape " +
504 " to a common shape.");
505 }
506
507 return std::make_pair(broadcastFlag, targetShape);
508}
509
510std::string UTILITY::Clean_name(std::string input_tensor_name){
511 std::string s (input_tensor_name);
512 std::replace( s.begin(), s.end(), '-', '_');
513 // replace all non-alpohanumeric character except for "_"
514 s.erase(std::remove_if(s.begin(), s.end(), []( char const& c ) -> bool { return !std::isalnum(c) && c != '_'; } ), s.end());
515 return s;
516}
517
518std::vector<size_t> UTILITY::ComputeStrideFromShape(const std::vector<size_t> & shape) {
519 // assume row major layout
520 const auto size = shape.size();
521 std::vector<size_t> strides(size,1);
522 for (std::size_t i = 1; i < size; i++) {
523 strides[size - 1 - i] = strides[size - i ] * shape[size - i];
524 }
525 return strides;
526}
527
528std::vector<Dim> UTILITY::ComputeStrideFromShape(const std::vector<Dim> & shape) {
529 // assume row major layout
530 const auto size = shape.size();
531 std::vector<Dim> strides(size);
532 if (size > 0) {
533 strides[size-1] = Dim{1};
534 for (std::size_t i = 1; i < size; i++) {
535 if (!shape[size-i].isParam && !strides[size-i].isParam)
536 strides[size - 1 - i] = Dim{strides[size-i].dim * shape[size-i].dim};
537 else {
538 if (strides[size-i].GetVal() == "1")
539 strides[size - 1 - i] = shape[size-i];
540 else if (shape[size-i].GetVal() == "1")
541 strides[size - 1 - i] = strides[size-i];
542 else
543 strides[size - 1 - i] = Dim{std::string(strides[size-i].GetVal() + "*" + shape[size-i].GetVal())};
544 }
545 }
546 }
547 return strides;
548}
549
550} // namespace SOFIE
551} // namespace Experimental
552} // namespace TMVA
#define c(i)
Definition RSha256.hxx:101
size_t size(const MatrixT &matrix)
retrieve the size of a square matrix
ROOT::Detail::TRangeCast< T, true > TRangeDynCast
TRangeDynCast is an adapter class that allows the typed iteration through a TCollection.
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void input
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void char Point_t Rectangle_t WindowAttributes_t Float_t Float_t Float_t Int_t Int_t UInt_t UInt_t Rectangle_t Int_t Int_t Window_t TString Int_t GCValues_t GetPrimarySelectionOwner GetDisplay GetScreen GetColormap GetNativeEvent const char const char dpyName wid window const char font_name cursor keysym reg const char only_if_exist regb h Point_t winding char text const char depth char const char Int_t count const char ColorStruct_t color const char Pixmap_t Pixmap_t PictureAttributes_t attr const char char ret_data h unsigned char height h offset
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void char Point_t Rectangle_t WindowAttributes_t Float_t Float_t Float_t Int_t Int_t UInt_t UInt_t Rectangle_t Int_t Int_t Window_t TString Int_t GCValues_t GetPrimarySelectionOwner GetDisplay GetScreen GetColormap GetNativeEvent const char const char dpyName wid window const char font_name cursor keysym reg const char only_if_exist regb h Point_t winding char text const char depth char const char Int_t count const char ColorStruct_t color const char Pixmap_t Pixmap_t PictureAttributes_t attr const char char ret_data h unsigned char height h Atom_t Int_t ULong_t ULong_t unsigned char prop_list Atom_t Atom_t target
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void char Point_t Rectangle_t WindowAttributes_t Float_t Float_t Float_t Int_t Int_t UInt_t UInt_t Rectangle_t Int_t Int_t Window_t TString Int_t GCValues_t GetPrimarySelectionOwner GetDisplay GetScreen GetColormap GetNativeEvent const char const char dpyName wid window const char font_name cursor keysym reg const char only_if_exist regb h Point_t winding char text const char depth char const char Int_t count const char ColorStruct_t color const char Pixmap_t Pixmap_t PictureAttributes_t attr const char char ret_data h unsigned char height h length
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void value
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void char Point_t Rectangle_t WindowAttributes_t Float_t Float_t Float_t Int_t Int_t UInt_t UInt_t Rectangle_t Int_t Int_t Window_t TString Int_t GCValues_t GetPrimarySelectionOwner GetDisplay GetScreen GetColormap GetNativeEvent const char const char dpyName wid window const char font_name cursor keysym reg const char only_if_exist regb h Point_t winding char text const char depth char const char Int_t count const char ColorStruct_t color const char Pixmap_t Pixmap_t PictureAttributes_t attr const char char ret_data h unsigned char height h Atom_t Int_t ULong_t ULong_t unsigned char prop_list Atom_t Atom_t Atom_t Time_t type
const_iterator begin() const
const_iterator end() const
const Int_t n
Definition legend1.C:16
bool AreSameShape(const std::vector< size_t > &, const std::vector< size_t > &)
std::vector< size_t > UnidirectionalBroadcastShape(std::vector< size_t > &, std::vector< size_t > &)
std::string Clean_name(std::string input_tensor_name)
std::vector< size_t > MultidirectionalBroadcastShape(std::vector< std::vector< size_t > >)
std::vector< size_t > ComputeStrideFromShape(const std::vector< size_t > &shape)
compute stride of a tensor given its shape (assume layout is row-major)
std::string ConvertDimShapeToString(const std::vector< Dim > &shape)
std::size_t ConvertShapeToLength(const std::vector< size_t > &shape)
std::string ConvertDynamicShapeToLength(const std::vector< Dim > &shape)
std::vector< Dim > ConvertShapeToDim(const std::vector< size_t > &shape)
Convert shape from integer format to dynamic one (based on Dim)
std::vector< size_t > ConvertShapeToInt(const std::vector< Dim > &shape)
Convert shape based on Dim to integer format.
std::string ConvertTypeToString(ETensorType type)
ETensorType ConvertStringToType(std::string type)
std::string ConvertDimShapeToLength(const std::vector< Dim > &shape)
std::string ConvertShapeToString(const std::vector< size_t > &shape)
bool IsInteger(const std::string &s)
create variable transformations