32namespace Experimental {
35template <
typename Backend = BranchlessJittedForest<
float>>
38 using Value_t =
typename Backend::Value_t;
48 RBDT(
const std::string &key,
const std::string &filename)
53 throw std::runtime_error(
"Failed to open input file " + filename);
55 auto numOutputs = Internal::GetObjectSafe<std::vector<int>>(
file.get(), filename, key +
"/num_outputs");
60 auto objective = Internal::GetObjectSafe<std::string>(
file.get(), filename, key +
"/objective");
61 if (objective->compare(
"softmax") == 0)
78 template <
typename Vector>
96 std::vector<Value_t>
Compute(
const std::vector<Value_t> &
x) {
return this->Compute<std::vector<Value_t>>(
x); }
101 const auto rows =
x.GetShape()[0];
103 const bool layout =
x.
GetMemoryLayout() == MemoryLayout::ColumnMajor ? false :
true;
105 fBackends[i].Inference(
x.GetData(), rows, layout, &
y(0, i));
108 for (
int i = 0; i < static_cast<int>(rows); i++) {
static TFile * Open(const char *name, Option_t *option="", const char *ftitle="", Int_t compress=ROOT::RCompressionSetting::EDefaults::kUseCompiledDefault, Int_t netopt=0)
Create / open a file.
Fast boosted decision tree inference.
RTensor< Value_t > Compute(const RTensor< Value_t > &x)
Compute model prediction on input RTensor.
std::vector< Value_t > Compute(const std::vector< Value_t > &x)
Compute model prediction on a single event.
typename Backend::Value_t Value_t
RBDT(const std::string &key, const std::string &filename)
Construct backends from model in ROOT file.
Vector Compute(const Vector &x)
Compute model prediction on a single event.
std::vector< Backend_t > fBackends
RTensor is a container with contiguous memory and shape information.
MemoryLayout GetMemoryLayout() const
create variable transformations