Tutorial showing how to parse a GNN from GraphNet and make a SOFIE model The tutorial also generate some data which can serve as input for the tutorial TMVA_SOFIE_GNN_Application.C
import os
import time
import graph_nets as gn
import numpy as np
import psutil
import ROOT
import sonnet as snt
from graph_nets import utils_tf
num_max_nodes=100
num_max_edges=300
node_size=4
edge_size=4
global_size=1
LATENT_SIZE = 100
NUM_LAYERS = 4
processing_steps = 5
numevts = 100
verbose = False
print(s,"memory:",memoryUse,"(MB)")
return {
}
def get_fix_graph_data_dict(num_nodes, num_edges, NODE_FEATURE_SIZE=2, EDGE_FEATURE_SIZE=2, GLOBAL_FEATURE_SIZE=1):
return {
}
snt.nets.MLP([LATENT_SIZE]*NUM_LAYERS, activate_final=
True),
])
def __init__(self, name="MLPGraphIndependent"):
super(MLPGraphIndependent, self).__init__(name=name)
edge_model_fn =
lambda:
snt.nets.MLP([LATENT_SIZE]*NUM_LAYERS, activate_final=
True),
node_model_fn =
lambda:
snt.nets.MLP([LATENT_SIZE]*NUM_LAYERS, activate_final=
True),
global_model_fn =
lambda:
snt.nets.MLP([LATENT_SIZE]*NUM_LAYERS, activate_final=
True))
return self._network(inputs)
def __init__(self, name="MLPGraphNetwork"):
super(MLPGraphNetwork, self).__init__(name=name)
edge_model_fn=make_mlp_model,
node_model_fn=make_mlp_model,
global_model_fn=make_mlp_model)
return self._network(inputs)
def __init__(self,
name="EncodeProcessDecode"):
super(EncodeProcessDecode, self).__init__(name=name)
def __call__(self, input_op, num_processing_steps):
latent = self._encoder(input_op)
latent0 = latent
output_ops = []
for _
in range(num_processing_steps):
latent = self._core(core_input)
decoded_op = self._decoder(latent)
return output_ops
output_gn =
ep_model(input_graph_data, processing_steps)
tree.Branch(
"node_data",
"std::vector<float>" , node_data)
tree.Branch(
"edge_data",
"std::vector<float>" , edge_data)
tree.Branch(
"global_data",
"std::vector<float>" , global_data)
tree.Branch(
"receivers",
"std::vector<int>" , receivers)
print("\n\nSaving data in a ROOT File:")
h1 =
ROOT.TH1D(
"h1",
"GraphNet nodes output",40,1,0)
h2 =
ROOT.TH1D(
"h2",
"GraphNet edges output",40,1,0)
h3 =
ROOT.TH1D(
"h3",
"GraphNet global output",40,1,0)
dataset = []
for i
in range(0,numevts):
s_nodes = graphData['nodes'].size
s_edges = graphData['edges'].size
num_edges = graphData['edges'].shape[0]
if (i < 1 and verbose) :
print("Edges - shape:",num_edges, edge_size,"data: ", edge_data)
print("Globals : ",global_data)
print("Receivers : ",receivers)
print("Senders : ",senders)
output_gnn =
ep_model(dataset[0], processing_steps)
firstEvent = True
for tf_graph_data in dataset:
output_gnn =
ep_model(tf_graph_data, processing_steps)
outgnn[2] =
np.mean(output_globals)
if (firstEvent and verbose) :
print("Output of first event")
firstEvent = False
print("time to evaluate events",end-start)
ROOT::Detail::TRangeCast< T, true > TRangeDynCast
TRangeDynCast is an adapter class that allows the typed iteration through a TCollection.