This repository has been archived by the owner on Oct 5, 2020. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 5
/
run-A_ds.py
90 lines (64 loc) · 2.79 KB
/
run-A_ds.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Low resolution spectrogram convnet.
Random seed #1.
"""
import os
os.environ['THEANO_FLAGS'] = ('floatX=float32,'
'device=gpu0,'
'dnn.conv.algo_bwd_filter=deterministic,'
'dnn.conv.algo_bwd_data=deterministic')
from badc.common import *
from badc.generics import describe, LearningRateDecay
from badc.dataset import iterbatches, generate_predictions
import badc.monitor
if __name__ == '__main__':
RUN = 'A_ds'
np.random.seed(120170113)
train = pd.concat([freefield, warblr], ignore_index=True)
train = train.iloc[np.random.permutation(len(train))]
validation = train[:1000]
# train = train[1000:]
model = keras.models.Sequential()
model.add(Conv(80, 47, 6, init='he_uniform', W_regularizer=L2(0.001), input_shape=(2, 50, 480)))
model.add(LeakyReLU())
model.add(Pool((4, 3)))
model.add(BatchNormalization(axis=1))
model.add(Conv(160, 1, 2, W_regularizer=L2(0.001), init='he_uniform'))
model.add(LeakyReLU())
model.add(Pool((1, 2)))
model.add(BatchNormalization(axis=1))
model.add(Conv(240, 1, 2, W_regularizer=L2(0.001), init='he_uniform'))
model.add(LeakyReLU())
model.add(Pool((1, 2)))
model.add(BatchNormalization(axis=1))
model.add(Conv(320, 1, 2, W_regularizer=L2(0.001), init='he_uniform'))
model.add(LeakyReLU())
model.add(Pool((1, 2)))
model.add(BatchNormalization(axis=1))
model.add(Conv(400, 1, 2, W_regularizer=L2(0.001), init='he_uniform'))
model.add(LeakyReLU())
model.add(Pool((1, 2)))
model.add(BatchNormalization(axis=1))
model.add(Dropout(0.25))
model.add(Conv(1, 1, 1, init='he_uniform'))
model.add(Activation('sigmoid'))
model.add(GlobalMaxPooling2D())
model.compile(loss='binary_crossentropy',
optimizer=keras.optimizers.SGD(lr=0.01, momentum=0.9, nesterov=True),
metrics=['accuracy'])
describe(model)
if os.path.exists('results/run-' + str(RUN) + '.h5'):
model.load_weights('results/run-' + str(RUN) + '.h5')
else:
validation_batch = next(iterbatches(1000, validation, augment=False, hires=False))
monitor = badc.monitor.Monitor(model, validation_batch, RUN)
learning_rate_decay = LearningRateDecay(100, 0.5)
model.fit_generator(generator=iterbatches(32, train, augment=True, hires=False),
samples_per_epoch=len(train),
nb_epoch=90,
callbacks=[monitor, learning_rate_decay],
verbose=0,
max_q_size=10)
model.save_weights('results/run-' + str(RUN) + '.h5')
generate_predictions(model, RUN, hires=False)