forked from UTA-HEP-Computing/LArTPCDNN
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathClassificationScanConfig.py
More file actions
161 lines (129 loc) · 4.62 KB
/
ClassificationScanConfig.py
File metadata and controls
161 lines (129 loc) · 4.62 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
import random
import getopt
from DLTools.Permutator import *
import sys,argparse
from numpy import arange
import os
# Input for Mixing Generator
FileSearch="/data/LArIAT/h5_files/*.h5"
#FileSearch="/Users/afarbin/LCD/Data/*/*.h5"
from multiprocessing import cpu_count
from DLTools.Utils import gpu_count
max_threads=12
n_threads=int(min(round(2*cpu_count()/gpu_count()),max_threads))
print "Found",cpu_count(),"CPUs and",gpu_count(),"GPUs. Using",n_threads,"threads. max_threads =",max_threads
Particles= ['electron', 'antielectron',
'pion',
'photon',
'pionPlus', 'pionMinus',
'proton', 'antiproton',
'muon', 'antimuon',
'kaonMinus', 'kaonPlus']
# Generation Model
Config={
"MaxEvents":int(.5e6),
"NTestSamples":25000,
"Particles":Particles,
"NClasses":len(Particles),
"Epochs":1000,
"BatchSize":128,
"DownSampleSize":8,
"ScanWindowSize":256,
"Normalize":True,
"EnergyCut":0.61,
# Configures the parallel data generator that read the input.
# These have been optimized by hand. Your system may have
# more optimal configuration.
"n_threads":n_threads, # Number of workers when using mixing generator.
"n_threads_cache":4, # Number of workers reading cached data.
"multiplier":1, # Read N batches worth of data in each worker
# How weights are initialized
"WeightInitialization":"'normal'",
# Model
"View1":True,
"View2":True,
"Width":32,
"Depth":2,
# No specific reason to pick these. Needs study.
# Note that the optimizer name should be the class name (https://keras.io/optimizers/)
"loss":"'categorical_crossentropy'",
"activation":"'relu'",
"BatchNormLayers":True,
"DropoutLayers":True,
# Specify the optimizer class name as True (see: https://keras.io/optimizers/)
# and parameters (using constructor keywords as parameter name).
# Note if parameter is not specified, default values are used.
"optimizer":"'RMSprop'",
"lr":0.01,
"decay":0.01,
# Parameter monitored by Callbacks
"monitor":"'val_loss'",
# Active Callbacks
# Specify the CallBack class name as True (see: https://keras.io/callbacks/)
# and parameters (using constructor keywords as parameter name,
# with classname added).
"ModelCheckpoint":True,
"Model_Chekpoint_save_best_only":False,
# Configure Running time callback
# Set RunningTime to a value to stop training after N seconds.
"RunningTime": 2*3600,
# Load last trained version of this model configuration. (based on Name var below)
"LoadPreviousModel":True,
}
# Parameters to scan and their scan points.
Params={ "Width":[32,64,128,256,512],
"Depth":range(1,5) }
# Get all possible configurations.
PS=Permutator(Params)
Combos=PS.Permutations()
print "HyperParameter Scan: ", len(Combos), "possible combiniations."
# HyperParameter sets are numbered. You can iterate through them using
# the -s option followed by an integer .
i=0
if "HyperParamSet" in dir():
i=int(HyperParamSet)
for k in Combos[i]: Config[k]=Combos[i][k]
# Build a name for the this configuration using the parameters we are
# scanning.
Name="LArTPCDNN"
for MetaData in Params.keys():
val=str(Config[MetaData]).replace('"',"")
Name+="_"+val.replace("'","")
if "HyperParamSet" in dir():
print "______________________________________"
print "ScanConfiguration"
print "______________________________________"
print "Picked combination: ",i
print "Combo["+str(i)+"]="+str(Combos[i])
print "Model Filename: ",Name
print "______________________________________"
else:
for ii,c in enumerate(Combos):
print "Combo["+str(ii)+"]="+str(c)
# Now put config in the current scope. Must find a prettier way.
if "Config" in dir():
for a in Config:
exec(a+"="+str(Config[a]))
# Use "--Test" to run on less events and epochs.
OutputBase="TrainedModels"
if TestMode:
MaxEvents=int(20e3)
NTestSamples=int(20e2)
Epochs=10
OutputBase+=".Test"
print "Test Mode: Set MaxEvents to",MaxEvents,"and Epochs to", Epochs
if LowMemMode:
n_threads=1
multiplier=1
# Calculate how many events will be used for training/validation.
NSamples=MaxEvents-NTestSamples
# Function to help manage optional configurations. Checks and returns
# if an object is in current scope. Return default value if not.
def TestDefaultParam(Config):
def TestParamPrime(param,default=False):
if param in Config:
return eval(param)
else:
return default
return TestParamPrime
TestDefaultParam=TestDefaultParam(dir())