Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
block_args = kwargs.pop('blocks')
downsample_args = kwargs.pop('downsample')
self.encoder_b, self.encoder_d = nn.ModuleList(), nn.ModuleList()
for i in range(num_stages):
for letter in encoder_layout:
if letter in ['b']:
args = {**kwargs, **block_args, **unpack_args(block_args, i, num_stages)}
layer = ConvBlock(inputs=inputs, **args)
inputs = layer(inputs)
self.encoder_b.append(layer)
elif letter in ['d', 'p']:
args = {**kwargs, **downsample_args, **unpack_args(downsample_args, i, num_stages)}
layer = ConvBlock(inputs=inputs, **args)
inputs = layer(inputs)
self.encoder_d.append(layer)
elif letter in ['s']:
pass
else:
raise ValueError('Unknown letter in order {}, use one of "b", "d", "p", "s"'
.format(letter))
skip_layer = True
elif layer == 'm':
args = dict(depth=kwargs.get('depth'), data_format=data_format)
elif layer in ['b', 'B', 'N', 'X']:
args = dict(factor=kwargs.get('factor'), shape=kwargs.get('shape'), data_format=data_format)
if kwargs.get('upsampling_layout'):
args['layout'] = kwargs.get('upsampling_layout')
else:
raise ValueError('Unknown layer symbol - %s' % layer)
if not skip_layer:
args = {**args, **layer_args}
args = unpack_args(args, *layout_dict[C_GROUPS[layer]])
with tf.variable_scope('layer-%d' % i):
tensor = layer_fn(tensor, **args)
tensor = tf.identity(tensor, name='_output')
if context is not None:
context.__exit__(None, None, None)
return tensor
skip_layer = True
elif layer == 'm':
args = dict(depth=kwargs.get('depth'), data_format=data_format)
elif layer in ['b', 'B', 'N', 'X']:
args = dict(factor=kwargs.get('factor'), shape=kwargs.get('shape'), data_format=data_format)
if kwargs.get('upsampling_layout'):
args['layout'] = kwargs.get('upsampling_layout')
else:
raise ValueError('Unknown layer symbol - %s' % layer)
if not skip_layer:
args = {**args, **layer_args}
args = unpack_args(args, *layout_dict[C_GROUPS[layer]])
with tf.variable_scope('layer-%d' % i):
tensor = layer_fn(tensor, **args)
tensor = tf.identity(tensor, name='_output')
if context is not None:
context.__exit__(None, None, None)
return tensor
def fill_layer_params(self, layer_name, layer_class, inputs, counters):
""" Inspect which parameters should be passed to the layer and get them from instance. """
layer_params = inspect.getfullargspec(layer_class.__init__)[0]
layer_params.remove('self')
args = {param: getattr(self, param) if hasattr(self, param) else self.kwargs.get(param, None)
for param in layer_params
if (hasattr(self, param) or (param in self.kwargs))}
if 'inputs' in layer_params:
args['inputs'] = inputs
layer_args = unpack_args(self.kwargs, *counters)
layer_args = layer_args.get(layer_name, {})
args = {**args, **layer_args}
args = unpack_args(args, *counters)
return args
def fill_layer_params(self, layer_name, layer_class, inputs, counters):
""" Inspect which parameters should be passed to the layer and get them from instance. """
layer_params = inspect.getfullargspec(layer_class.__init__)[0]
layer_params.remove('self')
args = {param: getattr(self, param) if hasattr(self, param) else self.kwargs.get(param, None)
for param in layer_params
if (hasattr(self, param) or (param in self.kwargs))}
if 'inputs' in layer_params:
args['inputs'] = inputs
layer_args = unpack_args(self.kwargs, *counters)
layer_args = layer_args.get(layer_name, {})
args = {**args, **layer_args}
args = unpack_args(args, *counters)
return args
x = inputs
# Entry flow: downsample the inputs
with tf.variable_scope('entry'):
entry_stages = entry.pop('num_stages', 0)
for i in range(entry_stages):
with tf.variable_scope('group-'+str(i)):
args = {**kwargs, **entry, **unpack_args(entry, i, entry_stages)}
x = cls.block(x, name='block-'+str(i), **args)
x = tf.identity(x, name='output')
# Middle flow: thorough processing
with tf.variable_scope('middle'):
middle_stages = middle.pop('num_stages', 0)
for i in range(middle_stages):
args = {**kwargs, **middle, **unpack_args(middle, i, middle_stages)}
x = cls.block(x, name='block-'+str(i), **args)
# Exit flow: final increase in number of feature maps
with tf.variable_scope('exit'):
exit_stages = exit.pop('num_stages', 0)
for i in range(exit_stages):
args = {**kwargs, **exit, **unpack_args(exit, i, exit_stages)}
x = cls.block(x, name='block-'+str(i), **args)
return x
def fill_layer_params(self, layer_name, layer_class, inputs, counters):
""" Inspect which parameters should be passed to the layer and get them from instance. """
layer_params = inspect.getfullargspec(layer_class.__init__)[0]
layer_params.remove('self')
args = {param: getattr(self, param) if hasattr(self, param) else self.kwargs.get(param, None)
for param in layer_params
if (hasattr(self, param) or (param in self.kwargs))}
if 'inputs' in layer_params:
args['inputs'] = inputs
layer_args = unpack_args(self.kwargs, *counters)
layer_args = layer_args.get(layer_name, {})
args = {**args, **layer_args}
args = unpack_args(args, *counters)
return args
for layer in layout:
if C_GROUPS[layer] not in layout_dict:
layout_dict[C_GROUPS[layer]] = [-1, 0]
layout_dict[C_GROUPS[layer]][1] += 1
residuals = []
tensor = inputs
for i, layer in enumerate(layout):
layout_dict[C_GROUPS[layer]][0] += 1
layer_name = C_LAYERS[layer]
layer_fn = FUNC_LAYERS[layer_name]
if layer == 'a':
args = dict(activation=activation)
layer_fn = unpack_args(args, *layout_dict[C_GROUPS[layer]])['activation']
if layer_fn is not None:
tensor = layer_fn(tensor)
elif layer == 'R':
residuals += [tensor]
elif layer == 'A':
args = dict(factor=kwargs.get('factor'), data_format=data_format)
args = unpack_args(args, *layout_dict[C_GROUPS[layer]])
t = FUNC_LAYERS['resize_bilinear_additive'](tensor, **args, name='rba-%d' % i)
residuals += [t]
elif layer == '+':
tensor = tensor + residuals[-1]
residuals = residuals[:-1]
elif layer == '.':
axis = -1 if data_format == 'channels_last' else 1
tensor = tf.concat([tensor, residuals[-1]], axis=axis, name='concat-%d' % i)
residuals = residuals[:-1]