Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def get_dataloader(val_dataset, data_shape, batch_size, num_workers, ctx):
"""Get dataloader."""
width, height = data_shape, data_shape
val_batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
#val_batchify_fn = Tuple(Stack(), Stack())
val_loader = gluon.data.DataLoader( val_dataset,
batch_size, False, batchify_fn=val_batchify_fn, last_batch='rollover', num_workers=num_workers)
return val_loader
def get_dataloader(net, train_dataset, val_dataset, data_shape, batch_size, num_workers):
"""Get dataloader."""
width, height = data_shape, data_shape
batchify_fn = Tuple(*([Stack() for _ in range(6)] + [Pad(axis=0, pad_val=-1) for _ in range(1)])) # stack image, all targets generated
transform_fns = [YOLO3DefaultTrainTransform(x * 32, x * 32, net) for x in range(10, 20)]
train_loader = RandomTransformDataLoader(
transform_fns, train_dataset, batch_size=batch_size, interval=10, last_batch='rollover',
shuffle=True, batchify_fn=batchify_fn, num_workers=num_workers)
# train_loader = gluon.data.DataLoader(
# train_dataset.transform(YOLO3DefaultTrainTransform(width, height, net)),
# batch_size, True, batchify_fn=batchify_fn, last_batch='rollover', num_workers=num_workers)
val_batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
val_loader = gluon.data.DataLoader(
val_dataset.transform(YOLO3DefaultValTransform(width, height)),
batch_size, False, batchify_fn=val_batchify_fn, last_batch='keep', num_workers=num_workers)
return train_loader, val_loader
#
# Because the number of objects varys a lot across images, we also have
# varying label sizes. As a result, we need to pad those labels to the same size.
# To deal with this problem, GluonCV provides :py:class:`gluoncv.data.batchify.Pad`,
# which handles padding automatically.
# :py:class:`gluoncv.data.batchify.Stack` in addition, is used to stack NDArrays with consistent shapes.
# :py:class:`gluoncv.data.batchify.Tuple` is used to handle different behaviors across multiple outputs from transform functions.
from gluoncv.data.batchify import Tuple, Stack, Pad
from mxnet.gluon.data import DataLoader
batch_size = 2 # for tutorial, we use smaller batch-size
num_workers = 0 # you can make it larger(if your CPU has more cores) to accelerate data loading
# behavior of batchify_fn: stack images, and pad labels
batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
train_loader = DataLoader(train_dataset.transform(train_transform), batch_size, shuffle=True,
batchify_fn=batchify_fn, last_batch='rollover', num_workers=num_workers)
val_loader = DataLoader(val_dataset.transform(val_transform), batch_size, shuffle=False,
batchify_fn=batchify_fn, last_batch='keep', num_workers=num_workers)
for ib, batch in enumerate(train_loader):
if ib > 3:
break
print('data 0:', batch[0][0].shape, 'label 0:', batch[1][0].shape)
print('data 1:', batch[0][1].shape, 'label 1:', batch[1][1].shape)
##########################################################
# YOLOv3 Network
# -------------------
# GluonCV's YOLOv3 implementation is a composite Gluon HybridBlock.
# In terms of structure, YOLOv3 networks are composed of base feature extraction
def get_dataloader(val_dataset, data_shape, batch_size, num_workers):
"""Get dataloader."""
width, height = data_shape, data_shape
batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
val_loader = gluon.data.DataLoader(
val_dataset.transform(SSDDefaultValTransform(width, height)), batchify_fn=batchify_fn,
batch_size=batch_size, shuffle=False, last_batch='rollover', num_workers=num_workers)
return val_loader
x * 32,
x * 32,
net,
mixup=args.mixup) for x in range(
10,
20)]
train_loader = RandomTransformDataLoader(
transform_fns,
train_dataset,
batch_size=batch_size,
interval=10,
last_batch='rollover',
shuffle=True,
batchify_fn=batchify_fn,
num_workers=num_workers)
val_batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
val_loader = gluon.data.DataLoader(
val_dataset.transform(
YOLO3DefaultValTransform(
width,
height)),
batch_size,
False,
batchify_fn=val_batchify_fn,
last_batch='keep',
num_workers=num_workers)
return train_loader, val_loader
pipelines = [SSDDALIPipeline(device_id=device_id, batch_size=batch_size,
data_shape=data_shape, anchors=anchors,
num_workers=num_workers,
dataset_reader = train_dataset[i]) for i, device_id in enumerate(devices)]
epoch_size = train_dataset[0].size()
if horovod:
epoch_size //= hvd.size()
train_loader = DALIGenericIterator(pipelines, [('data', DALIGenericIterator.DATA_TAG),
('bboxes', DALIGenericIterator.LABEL_TAG),
('label', DALIGenericIterator.LABEL_TAG)],
epoch_size, auto_reset=True)
# validation
if (not horovod or hvd.rank() == 0):
val_batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
val_loader = gluon.data.DataLoader(
val_dataset.transform(SSDDefaultValTransform(width, height)),
global_batch_size, False, batchify_fn=val_batchify_fn, last_batch='keep', num_workers=num_workers)
else:
val_loader = None
return train_loader, val_loader
def get_dataloader(net, train_dataset, val_dataset, data_shape, batch_size, num_workers, args):
"""Get dataloader."""
width, height = data_shape, data_shape
batchify_fn = Tuple(*([Stack() for _ in range(6)] + [Pad(axis=0, pad_val=-1) for _ in range(1)])) # stack image, all targets generated
if args.no_random_shape:
train_loader = gluon.data.DataLoader(
train_dataset.transform(YOLO3DefaultTrainTransform(width, height, net, mixup=args.mixup)),
batch_size, True, batchify_fn=batchify_fn, last_batch='rollover', num_workers=num_workers)
else:
transform_fns = [YOLO3DefaultTrainTransform(x * 32, x * 32, net, mixup=args.mixup) for x in range(10, 20)]
train_loader = RandomTransformDataLoader(
transform_fns, train_dataset, batch_size=batch_size, interval=10, last_batch='rollover',
shuffle=True, batchify_fn=batchify_fn, num_workers=num_workers)
val_batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
val_loader = gluon.data.DataLoader(
val_dataset.transform(YOLO3DefaultValTransform(width, height)),
batch_size, False, batchify_fn=val_batchify_fn, last_batch='keep', num_workers=num_workers)
return train_loader, val_loader
def get_dataloader(val_dataset, data_shape, batch_size, num_workers):
"""Get dataloader."""
width, height = data_shape, data_shape
batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
val_loader = gluon.data.DataLoader(
val_dataset.transform(YOLO3DefaultValTransform(width, height)),
batch_size, False, last_batch='rollover', num_workers=num_workers, batchify_fn=batchify_fn,)
return val_loader