Variables | |
stream | |
stdout | |
level | |
config = configparser.ConfigParser() | |
SHUFFLE = ast.literal_eval(config['random']['shuffle']) | |
IMAGES_PATH = config['images']['path'] | |
VIEWS = int(config['images']['views']) | |
PLANES = int(config['images']['planes']) | |
CELLS = int(config['images']['cells']) | |
STANDARDIZE = ast.literal_eval(config['images']['standardize']) | |
INTERACTION_LABELS = ast.literal_eval(config['images']['interaction_labels']) | |
FILTERED = ast.literal_eval(config['images']['filtered']) | |
INTERACTION_TYPES = ast.literal_eval(config['dataset']['interaction_types']) | |
list | NEUTRINO_LABELS = [] |
N_LABELS = len(Counter(INTERACTION_LABELS.values())) | |
DATASET_PATH = config['dataset']['path'] | |
PARTITION_PREFIX = config['dataset']['partition_prefix'] | |
LABELS_PREFIX = config['dataset']['labels_prefix'] | |
LOG_PATH = config['log']['path'] | |
LOG_PREFIX = config['log']['prefix'] | |
CHECKPOINT_PATH = config['model']['checkpoint_path'] | |
CHECKPOINT_PREFIX = config['model']['checkpoint_prefix'] | |
CHECKPOINT_SAVE_MANY = ast.literal_eval(config['model']['checkpoint_save_many']) | |
CHECKPOINT_SAVE_BEST_ONLY = ast.literal_eval(config['model']['checkpoint_save_best_only']) | |
CHECKPOINT_PERIOD = int(config['model']['checkpoint_period']) | |
PRINT_SUMMARY = ast.literal_eval(config['model']['print_summary']) | |
RESUME = ast.literal_eval(config['train']['resume']) | |
LEARNING_RATE = float(config['train']['lr']) | |
MOMENTUM = float(config['train']['momentum']) | |
DECAY = float(config['train']['decay']) | |
TRAIN_BATCH_SIZE = int(config['train']['batch_size']) | |
EPOCHS = int(config['train']['epochs']) | |
EARLY_STOPPING_PATIENCE = int(config['train']['early_stopping_patience']) | |
WEIGHTED_LOSS_FUNCTION = ast.literal_eval(config['train']['weighted_loss_function']) | |
CLASS_WEIGHTS_PREFIX = config['train']['class_weights_prefix'] | |
VALIDATION_FRACTION = float(config['validation']['fraction']) | |
VALIDATION_BATCH_SIZE = int(config['validation']['batch_size']) | |
dictionary | TRAIN_PARAMS |
dictionary | VALIDATION_PARAMS |
dictionary | partition = {'train' : [], 'validation' : [], 'test' : []} |
dictionary | labels = {} |
partition_file = open(DATASET_PATH + PARTITION_PREFIX + '.p', 'r') | |
labels_file = open(DATASET_PATH + LABELS_PREFIX + '.p', 'r') | |
class_weights_file = open(DATASET_PATH + CLASS_WEIGHTS_PREFIX + '.p', 'r') | |
class_weights = pickle.load(class_weights_file) | |
training_generator = DataGenerator(**TRAIN_PARAMS).generate(labels, partition['train'], True) | |
validation_generator = DataGenerator(**VALIDATION_PARAMS).generate(labels, partition['validation'], True) | |
list | files = [f for f in os.listdir(CHECKPOINT_PATH) if os.path.isfile(os.path.join(CHECKPOINT_PATH, f))] |
reverse | |
r = re.compile(CHECKPOINT_PREFIX[1:] + '-.*-.*.h5') | |
model = load_model(CHECKPOINT_PATH + '/' + fil) | |
list | input_shape = [PLANES, CELLS, VIEWS] |
opt = optimizers.SGD(lr=LEARNING_RATE, momentum=MOMENTUM, decay=DECAY, nesterov=True) | |
loss | |
optimizer | |
metrics | |
string | filepath = CHECKPOINT_PATH+CHECKPOINT_PREFIX+'.h5' |
string | monitor_acc = 'val_acc' |
string | monitor_loss = 'val_loss' |
checkpoint = ModelCheckpoint(filepath, monitor=monitor_acc, verbose=1, save_best_only=CHECKPOINT_SAVE_BEST_ONLY, mode='max', period=CHECKPOINT_PERIOD) | |
lr_reducer = ReduceLROnPlateau(monitor=monitor_loss, factor=0.1, cooldown=0, patience=3, min_lr=0.5e-6, verbose=1) | |
early_stopping = EarlyStopping(monitor=monitor_acc, patience=EARLY_STOPPING_PATIENCE, mode='auto') | |
csv_logger = CSVLogger(LOG_PATH + LOG_PREFIX + '.log', append=RESUME) | |
my_callback = my_callbacks.MyCallback() | |
list | callbacks_list = [lr_reducer, checkpoint, early_stopping, csv_logger] |
int | initial_epoch = int(re.search(r'\d+', logfile.read().split('\n')[-2]).group())+1 |
generator | |
steps_per_epoch | |
validation_data | |
validation_steps | |
epochs | |
class_weight | |
callbacks | |
verbose | |
training.callbacks |
Definition at line 378 of file training.py.
list training.callbacks_list = [lr_reducer, checkpoint, early_stopping, csv_logger] |
Definition at line 327 of file training.py.
training.CELLS = int(config['images']['cells']) |
Definition at line 48 of file training.py.
training.checkpoint = ModelCheckpoint(filepath, monitor=monitor_acc, verbose=1, save_best_only=CHECKPOINT_SAVE_BEST_ONLY, mode='max', period=CHECKPOINT_PERIOD) |
Definition at line 300 of file training.py.
Definition at line 82 of file training.py.
Definition at line 86 of file training.py.
Definition at line 83 of file training.py.
Definition at line 85 of file training.py.
Definition at line 84 of file training.py.
training.class_weight |
Definition at line 377 of file training.py.
training.class_weights = pickle.load(class_weights_file) |
Definition at line 161 of file training.py.
training.class_weights_file = open(DATASET_PATH + CLASS_WEIGHTS_PREFIX + '.p', 'r') |
Definition at line 160 of file training.py.
training.CLASS_WEIGHTS_PREFIX = config['train']['class_weights_prefix'] |
Definition at line 99 of file training.py.
training.config = configparser.ConfigParser() |
Definition at line 35 of file training.py.
training.csv_logger = CSVLogger(LOG_PATH + LOG_PREFIX + '.log', append=RESUME) |
Definition at line 317 of file training.py.
training.DATASET_PATH = config['dataset']['path'] |
Definition at line 71 of file training.py.
training.DECAY = float(config['train']['decay']) |
Definition at line 94 of file training.py.
training.early_stopping = EarlyStopping(monitor=monitor_acc, patience=EARLY_STOPPING_PATIENCE, mode='auto') |
Definition at line 313 of file training.py.
training.EARLY_STOPPING_PATIENCE = int(config['train']['early_stopping_patience']) |
Definition at line 97 of file training.py.
Definition at line 96 of file training.py.
training.epochs |
Definition at line 376 of file training.py.
string training.filepath = CHECKPOINT_PATH+CHECKPOINT_PREFIX+'.h5' |
Definition at line 278 of file training.py.
list training.files = [f for f in os.listdir(CHECKPOINT_PATH) if os.path.isfile(os.path.join(CHECKPOINT_PATH, f))] |
Definition at line 201 of file training.py.
training.FILTERED = ast.literal_eval(config['images']['filtered']) |
Definition at line 51 of file training.py.
training.generator |
Definition at line 372 of file training.py.
training.IMAGES_PATH = config['images']['path'] |
Definition at line 45 of file training.py.
int training.initial_epoch = int(re.search(r'\d+', logfile.read().split('\n')[-2]).group())+1 |
Definition at line 349 of file training.py.
Definition at line 230 of file training.py.
training.INTERACTION_LABELS = ast.literal_eval(config['images']['interaction_labels']) |
Definition at line 50 of file training.py.
training.INTERACTION_TYPES = ast.literal_eval(config['dataset']['interaction_types']) |
Definition at line 53 of file training.py.
training.labels = {} |
Definition at line 144 of file training.py.
training.labels_file = open(DATASET_PATH + LABELS_PREFIX + '.p', 'r') |
Definition at line 154 of file training.py.
training.LABELS_PREFIX = config['dataset']['labels_prefix'] |
Definition at line 73 of file training.py.
training.LEARNING_RATE = float(config['train']['lr']) |
Definition at line 92 of file training.py.
training.level |
Definition at line 33 of file training.py.
training.LOG_PATH = config['log']['path'] |
Definition at line 77 of file training.py.
training.LOG_PREFIX = config['log']['prefix'] |
Definition at line 78 of file training.py.
training.loss |
Definition at line 252 of file training.py.
training.lr_reducer = ReduceLROnPlateau(monitor=monitor_loss, factor=0.1, cooldown=0, patience=3, min_lr=0.5e-6, verbose=1) |
Definition at line 307 of file training.py.
training.metrics |
Definition at line 252 of file training.py.
training.model = load_model(CHECKPOINT_PATH + '/' + fil) |
Definition at line 208 of file training.py.
training.MOMENTUM = float(config['train']['momentum']) |
Definition at line 93 of file training.py.
string training.monitor_acc = 'val_acc' |
Definition at line 287 of file training.py.
string training.monitor_loss = 'val_loss' |
Definition at line 288 of file training.py.
training.my_callback = my_callbacks.MyCallback() |
Definition at line 321 of file training.py.
training.N_LABELS = len(Counter(INTERACTION_LABELS.values())) |
Definition at line 60 of file training.py.
training.NEUTRINO_LABELS = [] |
Definition at line 59 of file training.py.
training.opt = optimizers.SGD(lr=LEARNING_RATE, momentum=MOMENTUM, decay=DECAY, nesterov=True) |
Definition at line 246 of file training.py.
training.optimizer |
Definition at line 252 of file training.py.
training.partition = {'train' : [], 'validation' : [], 'test' : []} |
Definition at line 143 of file training.py.
training.partition_file = open(DATASET_PATH + PARTITION_PREFIX + '.p', 'r') |
Definition at line 150 of file training.py.
training.PARTITION_PREFIX = config['dataset']['partition_prefix'] |
Definition at line 72 of file training.py.
training.PLANES = int(config['images']['planes']) |
Definition at line 47 of file training.py.
Definition at line 87 of file training.py.
training.r = re.compile(CHECKPOINT_PREFIX[1:] + '-.*-.*.h5') |
Definition at line 204 of file training.py.
training.RESUME = ast.literal_eval(config['train']['resume']) |
Definition at line 91 of file training.py.
training.reverse |
Definition at line 202 of file training.py.
training.SHUFFLE = ast.literal_eval(config['random']['shuffle']) |
Definition at line 41 of file training.py.
training.STANDARDIZE = ast.literal_eval(config['images']['standardize']) |
Definition at line 49 of file training.py.
training.stdout |
Definition at line 33 of file training.py.
training.steps_per_epoch |
Definition at line 373 of file training.py.
training.stream |
Definition at line 33 of file training.py.
training.TRAIN_BATCH_SIZE = int(config['train']['batch_size']) |
Definition at line 95 of file training.py.
dictionary training.TRAIN_PARAMS |
Definition at line 108 of file training.py.
training.training_generator = DataGenerator(**TRAIN_PARAMS).generate(labels, partition['train'], True) |
Definition at line 181 of file training.py.
training.VALIDATION_BATCH_SIZE = int(config['validation']['batch_size']) |
Definition at line 104 of file training.py.
training.validation_data |
Definition at line 374 of file training.py.
training.VALIDATION_FRACTION = float(config['validation']['fraction']) |
Definition at line 103 of file training.py.
training.validation_generator = DataGenerator(**VALIDATION_PARAMS).generate(labels, partition['validation'], True) |
Definition at line 182 of file training.py.
dictionary training.VALIDATION_PARAMS |
Definition at line 123 of file training.py.
training.validation_steps |
Definition at line 375 of file training.py.
training.verbose |
Definition at line 380 of file training.py.
training.VIEWS = int(config['images']['views']) |
Definition at line 46 of file training.py.
training.WEIGHTED_LOSS_FUNCTION = ast.literal_eval(config['train']['weighted_loss_function']) |
Definition at line 98 of file training.py.