Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
num_layers, str(resnext_spec.keys()))
layers = resnext_spec[num_layers]
net = ResNext(layers, cardinality, bottleneck_width, use_se=use_se, deep_stem=deep_stem,
avg_down=avg_down, **kwargs)
if pretrained:
from .model_store import get_model_file
if not use_se:
net.load_parameters(get_model_file('resnext%d_%dx%dd' % (num_layers, cardinality,
bottleneck_width),
tag=pretrained, root=root), ctx=ctx)
else:
net.load_parameters(get_model_file('se_resnext%d_%dx%dd' % (num_layers, cardinality,
bottleneck_width),
tag=pretrained, root=root), ctx=ctx)
from ..data import ImageNet1kAttr
attrib = ImageNet1kAttr()
net.synset = attrib.synset
net.classes = attrib.classes
net.classes_long = attrib.classes_long
return net
norm_kwargs : dict
Additional `norm_layer` arguments, for example `num_devices=4`
for :class:`mxnet.gluon.contrib.nn.SyncBatchNorm`.
"""
net = MobileNetV2(multiplier, norm_layer=norm_layer, norm_kwargs=norm_kwargs, **kwargs)
if pretrained:
from .model_store import get_model_file
version_suffix = '{0:.2f}'.format(multiplier)
if version_suffix in ('1.00', '0.50'):
version_suffix = version_suffix[:-1]
net.load_parameters(get_model_file('mobilenetv2_%s' % version_suffix,
tag=pretrained,
root=root), ctx=ctx)
from ..data import ImageNet1kAttr
attrib = ImageNet1kAttr()
net.synset = attrib.synset
net.classes = attrib.classes
net.classes_long = attrib.classes_long
return net
[5, 576, 96, True, 'hard_swish', 1],
]
cls_ch_squeeze = 576
cls_ch_expand = 1280
else:
raise NotImplementedError
net = _MobileNetV3(cfg, cls_ch_squeeze, \
cls_ch_expand, multiplier=multiplier, \
final_drop=0.2, norm_layer=norm_layer, **kwargs)
if pretrained:
from .model_store import get_model_file
net.load_parameters(get_model_file('mobilenetv3_%s' % model_name,
tag=pretrained,
root=root), ctx=ctx)
from ..data import ImageNet1kAttr
attrib = ImageNet1kAttr()
net.synset = attrib.synset
net.classes = attrib.classes
net.classes_long = attrib.classes_long
return net
norm_layer : object
Normalization layer used (default: :class:`mxnet.gluon.nn.BatchNorm`)
Can be :class:`mxnet.gluon.nn.BatchNorm` or :class:`mxnet.gluon.contrib.nn.SyncBatchNorm`.
last_gamma : bool, default False
Whether to initialize the gamma of the last BatchNorm layer in each bottleneck to zero.
use_global_stats : bool, default False
Whether forcing BatchNorm to use global statistics instead of minibatch statistics;
optionally set to True if finetuning using ImageNet classification pretrained models.
"""
model = ResNetV1b(BasicBlockV1b, [3, 4, 6, 3], name_prefix='resnetv1b_', **kwargs)
if pretrained:
from .model_store import get_model_file
model.load_parameters(get_model_file('resnet%d_v%db'%(34, 1),
tag=pretrained, root=root), ctx=ctx)
from ..data import ImageNet1kAttr
attrib = ImageNet1kAttr()
model.synset = attrib.synset
model.classes = attrib.classes
model.classes_long = attrib.classes_long
return model
block_type, layers, channels = resnet_spec[num_layers]
assert 1 <= version <= 2, \
"Invalid resnet version: %d. Options are 1 and 2."%version
resnet_class = resnet_net_versions[version-1]
block_class = resnet_block_versions[version-1][block_type]
net = resnet_class(block_class, layers, channels, use_se=use_se, **kwargs)
if pretrained:
from .model_store import get_model_file
if not use_se:
net.load_parameters(get_model_file('resnet%d_v%d'%(num_layers, version),
tag=pretrained, root=root), ctx=ctx)
else:
net.load_parameters(get_model_file('se_resnet%d_v%d'%(num_layers, version),
tag=pretrained, root=root), ctx=ctx)
from ..data import ImageNet1kAttr
attrib = ImageNet1kAttr()
net.synset = attrib.synset
net.classes = attrib.classes
net.classes_long = attrib.classes_long
return net
Can be :class:`mxnet.gluon.nn.BatchNorm` or :class:`mxnet.gluon.contrib.nn.SyncBatchNorm`.
norm_kwargs : dict
Additional `norm_layer` arguments, for example `num_devices=4`
for :class:`mxnet.gluon.contrib.nn.SyncBatchNorm`.
"""
assert num_layers in resnext_spec, \
"Invalid number of layers: %d. Options are %s" % (
num_layers, str(resnext_spec.keys()))
layers = resnext_spec[num_layers]
net = SENet(layers, cardinality, bottleneck_width, avg_down, **kwargs)
if pretrained:
from .model_store import get_model_file
net.load_parameters(get_model_file('senet_%d' % (num_layers + 2),
root=root), ctx=ctx)
from ..data import ImageNet1kAttr
attrib = ImageNet1kAttr()
net.synset = attrib.synset
net.classes = attrib.classes
net.classes_long = attrib.classes_long
return net
ctx : Context, default CPU
The context in which to load the pretrained weights.
dilated: bool, default False
Whether to apply dilation strategy to ResNetV1b, yielding a stride 8 model.
norm_layer : object
Normalization layer used (default: :class:`mxnet.gluon.nn.BatchNorm`).
Can be :class:`mxnet.gluon.nn.BatchNorm` or :class:`mxnet.gluon.contrib.nn.SyncBatchNorm`.
"""
model = ResNetV1b(BottleneckV1b, [3, 4, 23, 3], deep_stem=True,
name_prefix='resnetv1c_', **kwargs)
if pretrained:
from .model_store import get_model_file
model.load_parameters(get_model_file('resnet%d_v%dc'%(101, 1),
tag=pretrained, root=root), ctx=ctx)
from ..data import ImageNet1kAttr
attrib = ImageNet1kAttr()
model.synset = attrib.synset
model.classes = attrib.classes
model.classes_long = attrib.classes_long
return model
num_sync_bn_devices=num_sync_bn_devices,
**kwargs)
if pretrained:
from gluoncv.model_zoo.model_store import get_model_file
version_suffix = '{0:.2f}'.format(multiplier)
if version_suffix in ('1.00', '0.50'):
version_suffix = version_suffix[:-1]
net.load_parameters(
get_model_file(
'mobilenet%s' %
version_suffix,
tag=pretrained,
root=root),
ctx=ctx)
from gluoncv.data import ImageNet1kAttr
attrib = ImageNet1kAttr()
net.synset = attrib.synset
net.classes = attrib.classes
net.classes_long = attrib.classes_long
return net
ctx : Context, default CPU
The context in which to load the pretrained weights.
dilated: bool, default False
Whether to apply dilation strategy to ResNetV1b, yielding a stride 8 model.
norm_layer : object
Normalization layer used (default: :class:`mxnet.gluon.nn.BatchNorm`).
Can be :class:`mxnet.gluon.nn.BatchNorm` or :class:`mxnet.gluon.contrib.nn.SyncBatchNorm`.
"""
model = ResNetV1b(BottleneckV1b, [3, 4, 6, 3], deep_stem=True, avg_down=True,
name_prefix='resnetv1d_', **kwargs)
if pretrained:
from .model_store import get_model_file
model.load_parameters(get_model_file('resnet%d_v%dd'%(50, 1),
tag=pretrained, root=root), ctx=ctx)
from ..data import ImageNet1kAttr
attrib = ImageNet1kAttr()
model.synset = attrib.synset
model.classes = attrib.classes
model.classes_long = attrib.classes_long
return model