I’m stuck on figuring what is the exact problem I’m facing as I try to test a simple classification script.
I could run the tmva examples in ROOT. however for some reason I am not able to reproduce the same success on the problem I am working on. Another thing to note is that I am running this in python since I hope to use some of the ML libraries which is mostly in python.
When ever I run my script, my desktop crashes at preparing the Gaussian transformation
with the desktop becoming unresponsive that I have to force restart it.
from ROOT import TMVA, TFile, TTree, TCut, gROOT
from os.path import isfile
import numpy as np
from os import environ
environ['KERAS_BACKEND'] = 'tensorflow'
from keras.models import Sequential
from keras.layers.core import Dense, Activation
from keras.regularizers import l2
from keras import initializers
from keras.optimizers import SGD
# Setup TMVA
TMVA.Tools.Instance()
TMVA.PyMethodBase.PyInitialize()
fout = TFile.Open('output.root','RECREATE')
factory = TMVA.Factory('TMVAClassification', fout,
'!V:!Silent:Color:DrawProgressBar:Transformations=D,G:AnalysisType=Classification')
# Load training data
trainfile = TFile.Open('signal.root')
trainfile2 = TFile.Open('background.root')
signal = trainfile.Get('tree')
background = trainfile2.Get('tree')
dataloader = TMVA.DataLoader('weights')
for branch in signal.GetListOfBranches():
if branch.GetName() not in prok:
print(branch.GetName())
dataloader.AddVariable(branch.GetName())
dataloader.AddTree(signal,'signal')
dataloader.AddTree(background,'background')
# Generate Model\ Neural Network
num_input=5
num_output=2
num_node_hid_layer=3
num_hid_layer=1
l2_val=1e-2
# Input layer
model = Sequential()
model.add(Dense(num_node_hid_layer, kernel_initializer='random_normal', kernel_regularizer=l2(l2_val), input_dim=num_input))
model.add(Activation('softmax'))
# Hidden layers
for k in range(num_hid_layer-1):
model.add(Dense(num_output, kernel_initializer='random_normal', kernel_regularizer=l2(l2_val)))
model.add(Activation('softmax'))
# Output layer
model.add(Dense(num_output, kernel_initializer='random_normal'))
model.add(Activation('softmax'))
# Compile model
model.compile(loss='categorical_crossentropy', optimizer=SGD(lr=0.01,clipnorm=1.), metrics=['accuracy'])
# Save Model
model.save('model.h5')
model.summary()
# # Visualise model
from keras.utils import plot_model
plot_model(model, to_file='model.png')
# # Book Method
# factory.BookMethod(dataloader, TMVA.Types.kDNN, 'DNN','!H:V:VarTransform=N:ErrorStrategy=CROSSENTROPY:WeightInitialization=XAVIERUNIFORM:Layout=TANH|100,TANH|80,TANH|50,TANH|20,LINEAR:TrainingStrategy=LearningRate=1e-1,Momentum=0.7,Repetitions=1,ConvergenceSteps=300,BatchSize=20,DropConfig=0.0+0.5+0.5+0.0,WeightDecay=0.001,Regularization=L2,TestRepetitions=15,Multithreading=True')
factory.BookMethod(dataloader, TMVA.Types.kPyKeras, "Keras_h5",
'!H:!V:VarTransform=D,G:FilenameModel=model.h5:NumEpochs=150:BatchSize=10')
# # Run TMVA
factory.TrainAllMethods()
factory.TestAllMethods()
factory.EvaluateAllMethods()
Is my code was done wrongly? or something I have missed?
Any help are appreciated.
Danny