import ROOT
import tensorflow as tf
tree_name = "sig_tree"
batch_size = 128
chunk_size = 5_000
target = "Type"
rdataframe,
batch_size,
chunk_size,
validation_split=0.3,
target=target,
)
num_of_epochs = 2
num_features =
len(input_columns)
[
]
)
model.compile(optimizer=
"adam", loss=loss_fn, metrics=[
"accuracy"])
model.fit(ds_train_repeated, steps_per_epoch=train_batches_per_epoch, validation_data=ds_valid_repeated,\
validation_steps=validation_batches_per_epoch, epochs=num_of_epochs)
ROOT::Detail::TRangeCast< T, true > TRangeDynCast
TRangeDynCast is an adapter class that allows the typed iteration through a TCollection.
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void char Point_t Rectangle_t WindowAttributes_t Float_t Float_t Float_t Int_t Int_t UInt_t UInt_t Rectangle_t Int_t Int_t Window_t TString Int_t GCValues_t GetPrimarySelectionOwner GetDisplay GetScreen GetColormap GetNativeEvent const char const char dpyName wid window const char font_name cursor keysym reg const char only_if_exist regb h Point_t winding char text const char depth char const char Int_t count const char ColorStruct_t color const char Pixmap_t Pixmap_t PictureAttributes_t attr const char char ret_data h unsigned char height h Atom_t Int_t ULong_t ULong_t unsigned char prop_list Atom_t Atom_t Atom_t Time_t UChar_t len
ROOT's RDataFrame offers a modern, high-level interface for analysis of data stored in TTree ,...
Epoch 1/2
␛[1m 1/54␛[0m ␛[37m━━━━━━━━━━━━━━━━━━━━␛[0m ␛[1m2:27␛[0m 3s/step - accuracy: 0.1484 - loss: 0.8795␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈
␛[1m11/54␛[0m ␛[32m━━━━␛[0m␛[37m━━━━━━━━━━━━━━━━␛[0m ␛[1m0s␛[0m 5ms/step - accuracy: 0.7662 - loss: 0.2696 ␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈
␛[1m22/54␛[0m ␛[32m━━━━━━━━␛[0m␛[37m━━━━━━━━━━━━␛[0m ␛[1m0s␛[0m 5ms/step - accuracy: 0.8571 - loss: 0.1666␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈
␛[1m32/54␛[0m ␛[32m━━━━━━━━━━━␛[0m␛[37m━━━━━━━━━␛[0m ␛[1m0s␛[0m 5ms/step - accuracy: 0.8920 - loss: 0.1265␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈
␛[1m42/54␛[0m ␛[32m━━━━━━━━━━━━━━━␛[0m␛[37m━━━━━␛[0m ␛[1m0s␛[0m 5ms/step - accuracy: 0.9123 - loss: 0.1030␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈
␛[1m53/54␛[0m ␛[32m━━━━━━━━━━━━━━━━━━━␛[0m␛[37m━␛[0m ␛[1m0s␛[0m 5ms/step - accuracy: 0.9268 - loss: 0.0861␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈
␛[1m54/54␛[0m ␛[32m━━━━━━━━━━━━━━━━━━━━␛[0m␛[37m␛[0m ␛[1m3s␛[0m 11ms/step - accuracy: 0.9289 - loss: 0.0837 - val_accuracy: 1.0000 - val_loss: 3.3658e-07
Epoch 2/2
␛[1m 1/54␛[0m ␛[37m━━━━━━━━━━━━━━━━━━━━␛[0m ␛[1m0s␛[0m 4ms/step - accuracy: 1.0000 - loss: 3.3670e-07␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈
␛[1m12/54␛[0m ␛[32m━━━━␛[0m␛[37m━━━━━━━━━━━━━━━━␛[0m ␛[1m0s␛[0m 5ms/step - accuracy: 1.0000 - loss: 3.3132e-07␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈
␛[1m22/54␛[0m ␛[32m━━━━━━━━␛[0m␛[37m━━━━━━━━━━━━␛[0m ␛[1m0s␛[0m 5ms/step - accuracy: 1.0000 - loss: 3.2871e-07␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈
␛[1m32/54␛[0m ␛[32m━━━━━━━━━━━␛[0m␛[37m━━━━━━━━━␛[0m ␛[1m0s␛[0m 5ms/step - accuracy: 1.0000 - loss: 3.2606e-07␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈
␛[1m40/54␛[0m ␛[32m━━━━━━━━━━━━━━␛[0m␛[37m━━━━━━␛[0m ␛[1m0s␛[0m 5ms/step - accuracy: 1.0000 - loss: 3.2453e-07␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈
␛[1m50/54␛[0m ␛[32m━━━━━━━━━━━━━━━━━━␛[0m␛[37m━━␛[0m ␛[1m0s␛[0m 5ms/step - accuracy: 1.0000 - loss: 3.2327e-07␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈␈
␛[1m54/54␛[0m ␛[32m━━━━━━━━━━━━━━━━━━━━␛[0m␛[37m␛[0m ␛[1m0s␛[0m 8ms/step - accuracy: 1.0000 - loss: 3.2294e-07 - val_accuracy: 0.9565 - val_loss: 3.0647e-07