Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def get_logits(self, image):
with argscope([Conv2D, MaxPooling, AvgPooling, GlobalAvgPooling, BatchNorm], data_format='channels_first'), \
argscope(Conv2D, use_bias=False):
group = args.group
if not args.v2:
# Copied from the paper
channels = {
3: [240, 480, 960],
4: [272, 544, 1088],
8: [384, 768, 1536]
}
mul = group * 4 # #chan has to be a multiple of this number
channels = [int(math.ceil(x * args.ratio / mul) * mul)
for x in channels[group]]
# The first channel must be a multiple of group
first_chan = int(math.ceil(24 * args.ratio / group) * group)
else:
def backbone_scope(freeze):
"""
Args:
freeze (bool): whether to freeze all the variables under the scope
"""
def nonlin(x):
x = get_norm()(x)
return tf.nn.relu(x)
with argscope([Conv2D, MaxPooling, BatchNorm], data_format='channels_first'), \
argscope(Conv2D, use_bias=False, activation=nonlin,
kernel_initializer=tf.variance_scaling_initializer(
scale=2.0, mode='fan_out')), \
ExitStack() as stack:
if cfg.BACKBONE.NORM in ['FreezeBN', 'SyncBN']:
if freeze or cfg.BACKBONE.NORM == 'FreezeBN':
stack.enter_context(argscope(BatchNorm, training=False))
else:
stack.enter_context(argscope(
BatchNorm, sync_statistics='nccl' if cfg.TRAINER == 'replicated' else 'horovod'))
if freeze:
stack.enter_context(freeze_variables(stop_gradient=False, skip_collection=True))
else:
# the layers are not completely freezed, but we may want to only freeze the affine
if cfg.BACKBONE.FREEZE_AFFINE:
stack.enter_context(custom_getter_scope(freeze_affine_getter))
def get_logits(self, image):
group_func = resnet_group
block_func = resnet_bottleneck
num_blocks = [3, 4, 6, 3]
with argscope(
[Conv2D, MaxPooling, GlobalAvgPooling, BatchNorm],
data_format='NCHW'), \
argscope(Conv2D, nl=tf.identity, use_bias=False,
W_init=tf.variance_scaling_initializer(scale=2.0, mode='fan_out')):
logits = (LinearWrap(image)
.Conv2D('conv0', 64, 7, stride=2, nl=BNReLU)
.MaxPooling('pool0', shape=3, stride=2, padding='SAME')
.apply(group_func, 'group0', block_func, 64, num_blocks[0], 1)
.apply(group_func, 'group1', block_func, 128, num_blocks[1], 2)
.apply(group_func, 'group2', block_func, 256, num_blocks[2], 2)
.apply(group_func, 'group3', block_func, 512, num_blocks[3], 2)
.GlobalAvgPooling('gap')
.FullyConnected('linear', 1000, nl=tf.identity)())
return logits
def backbone_scope(freeze):
"""
Args:
freeze (bool): whether to freeze all the variables under the scope
"""
def nonlin(x):
x = get_norm()(x)
return tf.nn.relu(x)
with argscope([Conv2D, MaxPooling, BatchNorm], data_format='channels_first'), \
argscope(Conv2D, use_bias=False, activation=nonlin,
kernel_initializer=tf.variance_scaling_initializer(
scale=2.0, mode='fan_out')), \
ExitStack() as stack:
if cfg.BACKBONE.NORM in ['FreezeBN', 'SyncBN']:
if freeze or cfg.BACKBONE.NORM == 'FreezeBN':
stack.enter_context(argscope(BatchNorm, training=False))
else:
stack.enter_context(argscope(
BatchNorm, sync_statistics='nccl' if cfg.TRAINER == 'replicated' else 'horovod'))
if freeze:
stack.enter_context(freeze_variables(stop_gradient=False, skip_collection=True))
else:
# the layers are not completely freezed, but we may want to only freeze the affine
if cfg.BACKBONE.FREEZE_AFFINE:
def get_logits(self, image):
gauss_init = tf.random_normal_initializer(stddev=0.01)
with argscope(Conv2D,
kernel_initializer=tf.variance_scaling_initializer(scale=2.)), \
argscope([Conv2D, FullyConnected], activation=tf.nn.relu), \
argscope([Conv2D, MaxPooling], data_format='channels_last'):
# necessary padding to get 55x55 after conv1
image = tf.pad(image, [[0, 0], [2, 2], [2, 2], [0, 0]])
l = Conv2D('conv1', image, filters=96, kernel_size=11, strides=4, padding='VALID')
# size: 55
visualize_conv1_weights(l.variables.W)
l = tf.nn.lrn(l, 2, bias=1.0, alpha=2e-5, beta=0.75, name='norm1')
l = MaxPooling('pool1', l, 3, strides=2, padding='VALID')
# 27
l = Conv2D('conv2', l, filters=256, kernel_size=5, split=2)
l = tf.nn.lrn(l, 2, bias=1.0, alpha=2e-5, beta=0.75, name='norm2')
l = MaxPooling('pool2', l, 3, strides=2, padding='VALID')
# 13
l = Conv2D('conv3', l, filters=384, kernel_size=3)
l = Conv2D('conv4', l, filters=384, kernel_size=3, split=2)
l = Conv2D('conv5', l, filters=256, kernel_size=3, split=2)
l = MaxPooling('pool3', l, 3, strides=2, padding='VALID')
def backbone_scope(freeze):
"""
Args:
freeze (bool): whether to freeze all the variables under the scope
"""
def nonlin(x):
x = get_norm()(x)
return tf.nn.relu(x)
with argscope([Conv2D, MaxPooling, BatchNorm], data_format='channels_first'), \
argscope(Conv2D, use_bias=False, activation=nonlin,
kernel_initializer=tf.variance_scaling_initializer(
scale=2.0, mode='fan_out')), \
ExitStack() as stack:
if cfg.BACKBONE.NORM in ['FreezeBN', 'SyncBN']:
if freeze or cfg.BACKBONE.NORM == 'FreezeBN':
stack.enter_context(argscope(BatchNorm, training=False))
else:
stack.enter_context(argscope(
BatchNorm, sync_statistics='nccl' if cfg.TRAINER == 'replicated' else 'horovod'))
if freeze:
stack.enter_context(freeze_variables(stop_gradient=False, skip_collection=True))
else:
# the layers are not completely freezed, but we may want to only freeze the affine
if cfg.BACKBONE.FREEZE_AFFINE:
stack.enter_context(custom_getter_scope(freeze_affine_getter))
def backbone_scope(freeze):
"""
Args:
freeze (bool): whether to freeze all the variables under the scope
"""
def nonlin(x):
x = get_norm()(x)
return tf.nn.relu(x)
with argscope([Conv2D, MaxPooling, BatchNorm], data_format='channels_first'), \
argscope(Conv2D, use_bias=False, activation=nonlin,
kernel_initializer=tf.variance_scaling_initializer(
scale=2.0, mode='fan_out')), \
ExitStack() as stack:
if cfg.BACKBONE.NORM in ['FreezeBN', 'SyncBN']:
if freeze or cfg.BACKBONE.NORM == 'FreezeBN':
stack.enter_context(argscope(BatchNorm, training=False))
else:
stack.enter_context(argscope(
BatchNorm, sync_statistics='nccl' if cfg.TRAINER == 'replicated' else 'horovod'))
if freeze:
stack.enter_context(freeze_variables(stop_gradient=False, skip_collection=True))
else:
# the layers are not completely freezed, but we may want to only freeze the affine
if cfg.BACKBONE.FREEZE_AFFINE:
freeze (bool): whether to freeze all the variables under the scope
"""
def nonlin(x):
x = get_norm()(x)
return tf.nn.relu(x)
with argscope([Conv2D, MaxPooling, BatchNorm], data_format='channels_first'), \
argscope(Conv2D, use_bias=False, activation=nonlin,
kernel_initializer=tf.variance_scaling_initializer(
scale=2.0, mode='fan_out')), \
ExitStack() as stack:
if cfg.BACKBONE.NORM in ['FreezeBN', 'SyncBN']:
if freeze or cfg.BACKBONE.NORM == 'FreezeBN':
stack.enter_context(argscope(BatchNorm, training=False))
else:
stack.enter_context(argscope(
BatchNorm, sync_statistics='nccl' if cfg.TRAINER == 'replicated' else 'horovod'))
if freeze:
stack.enter_context(freeze_variables(stop_gradient=False, skip_collection=True))
else:
# the layers are not completely freezed, but we may want to only freeze the affine
if cfg.BACKBONE.FREEZE_AFFINE:
stack.enter_context(custom_getter_scope(freeze_affine_getter))
yield
"""
Args:
freeze (bool): whether to freeze all the variables under the scope
"""
def nonlin(x):
x = get_norm()(x)
return tf.nn.relu(x)
with argscope([Conv2D, MaxPooling, BatchNorm], data_format='channels_first'), \
argscope(Conv2D, use_bias=False, activation=nonlin,
kernel_initializer=tf.variance_scaling_initializer(
scale=2.0, mode='fan_out')), \
ExitStack() as stack:
if cfg.BACKBONE.NORM in ['FreezeBN', 'SyncBN']:
if freeze or cfg.BACKBONE.NORM == 'FreezeBN':
stack.enter_context(argscope(BatchNorm, training=False))
else:
stack.enter_context(argscope(
BatchNorm, sync_statistics='nccl' if cfg.TRAINER == 'replicated' else 'horovod'))
if freeze:
stack.enter_context(freeze_variables(stop_gradient=False, skip_collection=True))
else:
# the layers are not completely freezed, but we may want to only freeze the affine
if cfg.BACKBONE.FREEZE_AFFINE:
stack.enter_context(custom_getter_scope(freeze_affine_getter))
yield