Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
with tf.name_scope("placeholders"):
x = tf.placeholder('int32', shape=[N, M, J], name='x')
x_mask = tf.placeholder('bool', shape=[N, M, J], name='x_mask')
q = tf.placeholder('int32', shape=[N, J], name='q')
q_mask = tf.placeholder('bool', shape=[N, J], name='q_mask')
y = tf.placeholder('int32', shape=[N], name='y')
is_train = tf.placeholder('bool', shape=[], name='is_train')
placeholders['x'] = x
placeholders['x_mask'] = x_mask
placeholders['q'] = q
placeholders['q_mask'] = q_mask
placeholders['y'] = y
placeholders['is_train'] = is_train
with tf.variable_scope("embedding"):
A = VariableEmbedder(params, wd=wd, initializer=initializer, name='A')
Aq = A(q, name='Aq') # [N, S, J, d]
Ax = A(x, name='Ax') # [N, S, J, d]
with tf.name_scope("encoding"):
encoder = PositionEncoder(J, d)
u = encoder(Aq, q_mask) # [N, d]
m = encoder(Ax, x_mask) # [N, M, d]
with tf.variable_scope("networks"):
m_mask = tf.reduce_max(tf.cast(x_mask, 'int64'), 2, name='m_mask') # [N, M]
gate_mask = tf.expand_dims(m_mask, -1)
m_length = tf.reduce_sum(m_mask, 1, name='m_length') # [N]
prev_u = tf.tile(tf.expand_dims(u, 1), [1, M, 1]) # [N, M, d]
reg_layer = VectorReductionLayer(N, M, d) if use_vector_gate else ReductionLayer(N, M, d)
gate_size = d if use_vector_gate else 1
h = None # [N, M, d]