Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def encode_decode_1(self, x, reuse=False):
with tf.variable_scope("encode_decode_1") as scope:
if reuse == True:
scope.reuse_variables()
conv1 = lrelu(instance_norm(conv2d(x, output_dim=64, k_w=5, k_h=5, d_w=1, d_h=1, name='e_c1'), scope='e_in1'))
conv2 = lrelu(instance_norm(conv2d(conv1, output_dim=128, name='e_c2'), scope='e_in2'))
conv3 = lrelu(instance_norm(conv2d(conv2, output_dim=256, name='e_c3'), scope='e_in3'))
# for x_{1}
de_conv1 = lrelu(instance_norm(de_conv(conv3, output_shape=[self.batch_size, 64, 64, 128]
, name='e_d1', k_h=3, k_w=3), scope='e_in4'))
de_conv2 = lrelu(instance_norm(de_conv(de_conv1, output_shape=[self.batch_size, 128, 128, 64]
, name='e_d2', k_w=3, k_h=3), scope='e_in5'))
x_tilde1 = conv2d(de_conv2, output_dim=3, d_h=1, d_w=1, name='e_c4')
return x_tilde1
conv1 = tf.nn.relu(
instance_norm(conv2d(x, output_dim=sn, k_w=7, k_h=7, d_w=1, d_h=1, name='e_c1'), scope='e_in1'))
conv2 = tf.nn.relu(
instance_norm(conv2d(conv1, output_dim=sn*2, k_w=4, k_h=4, d_w=2, d_h=2, name='e_c2'), scope='e_in2'))
conv3 = tf.nn.relu(
instance_norm(conv2d(conv2, output_dim=sn*4, k_w=4, k_h=4, d_w=2, d_h=2, name='e_c3'), scope='e_in3'))
r1 = Residual(conv3, residual_name='re_1')
r2 = Residual(r1, residual_name='re_2')
r3 = Residual(r2, residual_name='re_3')
r4 = Residual(r3, residual_name='re_4')
r5 = Residual(r4, residual_name='re_5')
r6 = Residual(r5, residual_name='re_6')
g_deconv1 = tf.nn.relu(instance_norm(de_conv(r6, output_shape=[self.batch_size,
self.output_size/2, self.output_size/2, sn*2], name='gen_deconv1'), scope="gen_in"))
# for 1
g_deconv_1_1 = tf.nn.relu(instance_norm(de_conv(g_deconv1,
output_shape=[self.batch_size, self.output_size, self.output_size, sn], name='g_deconv_1_1'), scope='gen_in_1_1'))
#Refined Residual Image learning
g_deconv_1_1_x = tf.concat([g_deconv_1_1, x], axis=3)
x_tilde1 = conv2d(g_deconv_1_1_x, output_dim=self.channel, k_w=7, k_h=7, d_h=1, d_w=1, name='gen_conv_1_2')
# for 2
g_deconv_2_1 = tf.nn.relu(instance_norm(de_conv(g_deconv1,
output_shape=[self.batch_size, self.output_size, self.output_size, sn]
, name='g_deconv_2_1'), scope='gen_in_2_1'))
g_deconv_2_1_x = tf.concat([g_deconv_2_1, x], axis=3)
x_tilde2 = conv2d(g_deconv_2_1_x, output_dim=self.channel, k_w=7, k_h=7, d_h=1, d_w=1, name='gen_conv_2_2')
def encode_decode(self, x_var, x_exemplar, img_mask, exemplar_mask, reuse=False):
with tf.variable_scope("encode_decode") as scope:
if reuse == True:
scope.reuse_variables()
x_var = tf.concat([x_var, img_mask, x_exemplar, exemplar_mask], axis=3)
conv1 = tf.nn.relu(
instance_norm(conv2d(x_var, output_dim=64, k_w=7, k_h=7, d_w=1, d_h=1, name='e_c1'), scope='e_in1'))
conv2 = tf.nn.relu(
instance_norm(conv2d(conv1, output_dim=128, k_w=4, k_h=4, d_w=2, d_h=2, name='e_c2'), scope='e_in2'))
conv3 = tf.nn.relu(
instance_norm(conv2d(conv2, output_dim=256, k_w=4, k_h=4, d_w=2, d_h=2, name='e_c3'), scope='e_in3'))
r1 = Residual(conv3, residual_name='re_1')
r2 = Residual(r1, residual_name='re_2')
r3 = Residual(r2, residual_name='re_3')
r4 = Residual(r3, residual_name='re_4')
r5 = Residual(r4, residual_name='re_5')
r6 = Residual(r5, residual_name='re_6')
g_deconv1 = tf.nn.relu(instance_norm(de_conv(r6, output_shape=[self.batch_size,
self.output_size/2, self.output_size/2, 128], name='gen_deconv1'), scope="gen_in"))
# for 1
g_deconv_1_1 = tf.nn.relu(instance_norm(de_conv(g_deconv1,
tf.get_variable_scope().reuse_variables()
else:
assert tf.get_variable_scope().reuse is False
# down sampling
x = relu(instance_norm(conv2d(images, options.nf, ks=7, s=1, name='gen_ds_conv1'), 'in1_1'))
x = relu(instance_norm(conv2d(x, 2*options.nf, ks=4, s=2, name='gen_ds_conv2'), 'in1_2'))
x = relu(instance_norm(conv2d(x, 4*options.nf, ks=4, s=2, name='gen_ds_conv3'), 'in1_3'))
# bottleneck
x = relu(instance_norm(conv2d(x, 4*options.nf, ks=3, s=1, name='gen_bn_conv1'), 'in2_1'))
x = relu(instance_norm(conv2d(x, 4*options.nf, ks=3, s=1, name='gen_bn_conv2'), 'in2_2'))
x = relu(instance_norm(conv2d(x, 4*options.nf, ks=3, s=1, name='gen_bn_conv3'), 'in2_3'))
x = relu(instance_norm(conv2d(x, 4*options.nf, ks=3, s=1, name='gen_bn_conv4'), 'in2_4'))
x = relu(instance_norm(conv2d(x, 4*options.nf, ks=3, s=1, name='gen_bn_conv5'), 'in2_5'))
x = relu(instance_norm(conv2d(x, 4*options.nf, ks=3, s=1, name='gen_bn_conv6'), 'in2_6'))
# up sampling
x = relu(instance_norm(deconv2d(x, 2*options.nf, ks=4, s=2, name='gen_us_deconv1'), 'in3_1'))
x = relu(instance_norm(deconv2d(x, options.nf, ks=4, s=2, name='gen_us_deconv2'), 'in3_2'))
x = tanh(deconv2d(x, 3, ks=7, s=1, name='gen_us_dwconv3'))
return x
def encode_decode(self, x_var, x_exemplar, img_mask, exemplar_mask, reuse=False):
with tf.variable_scope("encode_decode") as scope:
if reuse == True:
scope.reuse_variables()
x_var = tf.concat([x_var, img_mask, x_exemplar, exemplar_mask], axis=3)
conv1 = tf.nn.relu(
instance_norm(conv2d(x_var, output_dim=64, k_w=7, k_h=7, d_w=1, d_h=1, name='e_c1'), scope='e_in1'))
conv2 = tf.nn.relu(
instance_norm(conv2d(conv1, output_dim=128, k_w=4, k_h=4, d_w=2, d_h=2, name='e_c2'), scope='e_in2'))
conv3 = tf.nn.relu(
instance_norm(conv2d(conv2, output_dim=256, k_w=4, k_h=4, d_w=2, d_h=2, name='e_c3'), scope='e_in3'))
r1 = Residual(conv3, residual_name='re_1')
r2 = Residual(r1, residual_name='re_2')
r3 = Residual(r2, residual_name='re_3')
r4 = Residual(r3, residual_name='re_4')
r5 = Residual(r4, residual_name='re_5')
r6 = Residual(r5, residual_name='re_6')
g_deconv1 = tf.nn.relu(instance_norm(de_conv(r6, output_shape=[self.batch_size,
self.output_size/2, self.output_size/2, 128], name='gen_deconv1'), scope="gen_in"))
# for 1
g_deconv_1_1 = tf.nn.relu(instance_norm(de_conv(g_deconv1,
output_shape=[self.batch_size, self.output_size, self.output_size, 32], name='g_deconv_1_1'), scope='gen_in_1_1'))
g_deconv_1_1_x = tf.concat([g_deconv_1_1, x_var], axis=3)
x_tilde1 = conv2d(g_deconv_1_1_x, output_dim=self.channel, k_w=7, k_h=7, d_h=1, d_w=1, name='gen_conv_1_2')
def residule_block(x, dim, ks=3, s=1, name='res'):
p = int((ks - 1) / 2)
y = tf.pad(x, [[0, 0], [p, p], [p, p], [0, 0]], "CONSTANT") #CONSTANT
y = instance_norm(conv2d(y, dim, ks, s, padding='VALID', name=name+'_c1'), name+'_in1')
y = tf.pad(tf.nn.relu(y), [[0, 0], [p, p], [p, p], [0, 0]], "CONSTANT")
y = instance_norm(conv2d(y, dim, ks, s, padding='VALID', name=name+'_c2'), name+'_in2')
return y + x
def encode_decode(self, x, sn=64, reuse=False):
print sn
with tf.variable_scope("encode_decode") as scope:
if reuse == True:
scope.reuse_variables()
conv1 = tf.nn.relu(
instance_norm(conv2d(x, output_dim=sn, k_w=7, k_h=7, d_w=1, d_h=1, name='e_c1'), scope='e_in1'))
conv2 = tf.nn.relu(
instance_norm(conv2d(conv1, output_dim=sn*2, k_w=4, k_h=4, d_w=2, d_h=2, name='e_c2'), scope='e_in2'))
conv3 = tf.nn.relu(
instance_norm(conv2d(conv2, output_dim=sn*4, k_w=4, k_h=4, d_w=2, d_h=2, name='e_c3'), scope='e_in3'))
r1 = Residual(conv3, residual_name='re_1')
r2 = Residual(r1, residual_name='re_2')
r3 = Residual(r2, residual_name='re_3')
r4 = Residual(r3, residual_name='re_4')
r5 = Residual(r4, residual_name='re_5')
r6 = Residual(r5, residual_name='re_6')
g_deconv1 = tf.nn.relu(instance_norm(de_conv(r6, output_shape=[self.batch_size,
self.output_size/2, self.output_size/2, sn*2], name='gen_deconv1'), scope="gen_in"))
# for 1
g_deconv_1_1 = tf.nn.relu(instance_norm(de_conv(g_deconv1,
output_shape=[self.batch_size, self.output_size, self.output_size, sn], name='g_deconv_1_1'), scope='gen_in_1_1'))
#Refined Residual Image learning
g_deconv_1_1_x = tf.concat([g_deconv_1_1, x], axis=3)
def encode2(self, x, reuse=False):
with tf.variable_scope("encode") as scope:
if reuse == True:
scope.reuse_variables()
conv1 = tf.nn.relu(
instance_norm(conv2d(x, output_dim=32, k_w=7, k_h=7, d_w=1, d_h=1, name='e_c1'), scope='e_in1'))
conv2 = tf.nn.relu(
instance_norm(conv2d(conv1, output_dim=64, k_w=4, k_h=4, d_w=2, d_h=2, name='e_c2'), scope='e_in2'))
conv3 = tf.nn.relu(
instance_norm(conv2d(conv2, output_dim=128, k_w=4, k_h=4, d_w=2, d_h=2, name='e_c3'), scope='e_in3'))
conv4 = tf.nn.relu(
instance_norm(conv2d(conv3, output_dim=128, k_w=4, k_h=4, d_w=2, d_h=2, name='e_c4'), scope='e_in4'))
bottleneck = tf.reshape(conv4, [self.batch_size, -1])
content = fully_connect(bottleneck, output_size=128, scope='e_ful1')
#rotation = fully_connect(bottleneck, output_size=1, scope='e_ful2')
return content#, rotation
def generator(images, options, reuse=False, name='gen'):
# reuse or not
with tf.variable_scope(name):
if reuse:
tf.get_variable_scope().reuse_variables()
else:
assert tf.get_variable_scope().reuse is False
# down sampling
x = relu(instance_norm(conv2d(images, options.nf, ks=7, s=1, name='gen_ds_conv1'), 'in1_1'))
x = relu(instance_norm(conv2d(x, 2*options.nf, ks=4, s=2, name='gen_ds_conv2'), 'in1_2'))
x = relu(instance_norm(conv2d(x, 4*options.nf, ks=4, s=2, name='gen_ds_conv3'), 'in1_3'))
# bottleneck
x = relu(instance_norm(conv2d(x, 4*options.nf, ks=3, s=1, name='gen_bn_conv1'), 'in2_1'))
x = relu(instance_norm(conv2d(x, 4*options.nf, ks=3, s=1, name='gen_bn_conv2'), 'in2_2'))
x = relu(instance_norm(conv2d(x, 4*options.nf, ks=3, s=1, name='gen_bn_conv3'), 'in2_3'))
x = relu(instance_norm(conv2d(x, 4*options.nf, ks=3, s=1, name='gen_bn_conv4'), 'in2_4'))
x = relu(instance_norm(conv2d(x, 4*options.nf, ks=3, s=1, name='gen_bn_conv5'), 'in2_5'))
x = relu(instance_norm(conv2d(x, 4*options.nf, ks=3, s=1, name='gen_bn_conv6'), 'in2_6'))
# up sampling
x = relu(instance_norm(deconv2d(x, 2*options.nf, ks=4, s=2, name='gen_us_deconv1'), 'in3_1'))
x = relu(instance_norm(deconv2d(x, options.nf, ks=4, s=2, name='gen_us_deconv2'), 'in3_2'))
x = tanh(deconv2d(x, 3, ks=7, s=1, name='gen_us_dwconv3'))
return x