-
Notifications
You must be signed in to change notification settings - Fork 4
/
test.py
47 lines (36 loc) · 1.31 KB
/
test.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
import tensorflow as tf
from tensorflow.python.framework import ops
import tdnn as TDNN
minibatch_size = 1
pnorm_input_dim = 128
pnorm_output_dim = 64
input_sequence_length = 13 + 1 + 9 # prev + center + post frames
input_data_dims = 120
tdnn_names = ["conv1", "conv2", "conv3", "conv4", "conv5"]
tdnn_context = [[-2, -1, 0, 1, 2], [-1, 2], [-3, 3], [-7, 2], [0]]
layer_dict = {}
end_layer = layer_dict["input_layer"] = tf.Variable(
tf.random_uniform([minibatch_size, input_sequence_length, input_data_dims],
-1.0,
1.0
)
)
for idx, (layer_name, context) in enumerate(zip(tdnn_names, tdnn_context)):
with ops.name_scope(name = layer_name):
pnorm_name = "pnorm" + str(idx + 1)
renorm_name = "renorm" + str(idx + 1)
end_layer = layer_dict[layer_name] = TDNN.tdnn(
inputs = end_layer,
context = context,
input_dim = pnorm_output_dim,
output_dim = pnorm_input_dim,
layer_name = layer_name,
pnorm_name = pnorm_name,
renorm_name = renorm_name
)
for key, val in layer_dict.items():
print(key, val)
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
logits = sess.run(end_layer)
print(logits)