Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def validate(net, val_data, async_eval_processes, ctx, eval_metric, logger, epoch, best_map, args):
"""Test on validation dataset."""
clipper = gcv.nn.bbox.BBoxClipToImage()
eval_metric.reset()
if not args.disable_hybridization:
net.hybridize(static_alloc=args.static_alloc)
tic = time.time()
for ib, batch in enumerate(val_data):
batch = split_and_load(batch, ctx_list=ctx)
det_bboxes = []
det_ids = []
det_scores = []
det_masks = []
det_infos = []
for x, im_info in zip(*batch):
# get prediction results
ids, scores, bboxes, masks = net(x)
det_bboxes.append(clipper(bboxes, x))
det_ids.append(ids)
def validate(net, val_data, ctx, eval_metric, size):
"""Test on validation dataset."""
clipper = gcv.nn.bbox.BBoxClipToImage()
eval_metric.reset()
net.hybridize(static_alloc=True)
with tqdm(total=size) as pbar:
for ib, batch in enumerate(val_data):
batch = split_and_load(batch, ctx_list=ctx)
det_bboxes = []
det_ids = []
det_scores = []
det_masks = []
det_infos = []
for x, im_info in zip(*batch):
# get prediction results
ids, scores, bboxes, masks = net(x)
det_bboxes.append(clipper(bboxes, x))
det_ids.append(ids)
det_scores.append(scores)
def validate(net, val_data, ctx, eval_metric, size):
"""Test on validation dataset."""
clipper = gcv.nn.bbox.BBoxClipToImage()
eval_metric.reset()
net.hybridize(static_alloc=True)
with tqdm(total=size) as pbar:
for ib, batch in enumerate(val_data):
batch = split_and_load(batch, ctx_list=ctx)
det_bboxes = []
det_ids = []
det_scores = []
gt_bboxes = []
gt_ids = []
gt_difficults = []
for x, y, im_scale in zip(*batch):
# get prediction results
ids, scores, bboxes = net(x)
det_ids.append(ids)
det_scores.append(scores)
def validate(net, val_data, ctx, eval_metric, args):
"""Test on validation dataset."""
clipper = gcv.nn.bbox.BBoxClipToImage()
eval_metric.reset()
if not args.disable_hybridization:
# input format is differnet than training, thus rehybridization is needed.
net.hybridize(static_alloc=args.static_alloc)
for batch in val_data:
batch = split_and_load(batch, ctx_list=ctx)
det_bboxes = []
det_ids = []
det_scores = []
gt_bboxes = []
gt_ids = []
gt_difficults = []
for x, y, im_scale in zip(*batch):
# get prediction results
ids, scores, bboxes = net(x)
det_ids.append(ids)
def validate(net, val_data, ctx, eval_metric):
"""Test on validation dataset."""
clipper = gcv.nn.bbox.BBoxClipToImage()
eval_metric.reset()
net.hybridize(static_alloc=True)
for batch in val_data:
batch = split_and_load(batch, ctx_list=ctx)
det_bboxes = []
det_ids = []
det_scores = []
gt_bboxes = []
gt_ids = []
gt_difficults = []
for x, y, im_scale in zip(*batch):
# get prediction results
ids, scores, bboxes = net(x)
det_ids.append(ids)
det_scores.append(scores)
# clip to image size
def validate(net, val_data, ctx, eval_metric, size):
"""Test on validation dataset."""
clipper = gcv.nn.bbox.BBoxClipToImage()
eval_metric.reset()
net.hybridize(static_alloc=True)
with tqdm(total=size) as pbar:
for ib, batch in enumerate(val_data):
batch = split_and_load(batch, ctx_list=ctx)
det_bboxes = []
det_ids = []
det_scores = []
gt_bboxes = []
gt_ids = []
gt_difficults = []
for x, y, im_scale in zip(*batch):
# get prediction results
ids, scores, bboxes = net(x)
det_ids.append(ids)
det_scores.append(scores)
def validate(net, val_data, ctx, eval_metric, size):
"""Test on validation dataset."""
clipper = gcv.nn.bbox.BBoxClipToImage()
eval_metric.reset()
net.hybridize(static_alloc=True)
with tqdm(total=size) as pbar:
for ib, batch in enumerate(val_data):
batch = split_and_load(batch, ctx_list=ctx)
det_bboxes = []
det_ids = []
det_scores = []
gt_bboxes = []
gt_ids = []
gt_difficults = []
for x, y, im_scale in zip(*batch):
# get prediction results
ids, scores, bboxes = net(x)
det_ids.append(ids)
det_scores.append(scores)
def validate(net, val_data, ctx, eval_metric, args):
"""Test on validation dataset."""
clipper = gcv.nn.bbox.BBoxClipToImage()
eval_metric.reset()
net.hybridize(static_alloc=True)
for ib, batch in enumerate(val_data):
batch = split_and_load(batch, ctx_list=ctx)
det_bboxes = []
det_ids = []
det_scores = []
det_masks = []
det_segms = []
det_infos = []
for x, im_info in zip(*batch):
# get prediction results
ids, scores, bboxes, masks, segms = net(x)
det_bboxes.append(clipper(bboxes, x))
det_ids.append(ids)
det_scores.append(scores)