Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
import tensorflow as tf
from babi.base_model import BaseTower, BaseRunner
from my.tensorflow.nn import linear
import numpy as np
class Embedder(object):
def __call__(self, content):
raise Exception()
class VariableEmbedder(Embedder):
def __init__(self, params, wd=0.0, initializer=None, name="variable_embedder"):
V, d = params.vocab_size, params.hidden_size
with tf.variable_scope(name):
self.emb_mat = tf.get_variable("emb_mat", dtype='float', shape=[V, d], initializer=initializer)
# TODO : not sure wd is appropriate for embedding matrix
if wd:
weight_decay = tf.mul(tf.nn.l2_loss(self.emb_mat), wd, name='weight_loss')
tf.add_to_collection('losses', weight_decay)
def __call__(self, word, name="embedded_content"):
out = tf.nn.embedding_lookup(self.emb_mat, word, name=name)
return out
class PositionEncoder(object):
def __init__(self, max_sent_size, hidden_size):