-
Notifications
You must be signed in to change notification settings - Fork 0
/
models.py
33 lines (26 loc) · 1.04 KB
/
models.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
from consts import *
from keras.layers import Dense, Flatten, Input, concatenate, Reshape, Lambda
from keras.models import Model
from keras.layers.advanced_activations import LeakyReLU
def AmplitudeModel():
ain = Input(shape=(FFT_BINS, 2, 11))
aout = Flatten()(ain)
aout = Lambda(lambda x:(x - AMPLITUDE_MEAN)/AMPLITUDE_STD)(aout)
aout = Dense(500, activation=LeakyReLU())(aout)
aout = Dense(500, activation=LeakyReLU())(aout)
return ain, aout
def PhaseModel():
pin = Input(shape=(FFT_BINS, 2 * 2, 11))
pout = Flatten()(pin)
pout = Dense(500, activation=LeakyReLU())(pout)
pout = Dense(500, activation=LeakyReLU())(pout)
return pin, pout
def APModel(_name):
ain, aout = AmplitudeModel()
pin, pout = PhaseModel()
output = concatenate([aout, pout])
output = Dense(2*FFT_BINS, activation=LeakyReLU())(output)
output = Reshape((FFT_BINS, 2))(output)
model = Model(input=[ain, pin], output=output, name=_name)
model.compile(loss='mean_squared_error', optimizer='adam')
return model