Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
input_time_length=input_time_length,
n_preds_per_input=n_preds_per_input)
stop_criterion = Or([MaxEpochs(max_epochs),
NoDecrease('valid_misclass', max_increase_epochs)])
monitors = [LossMonitor(), MisclassMonitor(col_suffix='sample_misclass'),
CroppedTrialMisclassMonitor(
input_time_length=input_time_length), RuntimeMonitor()]
model_constraint = MaxNormDefaultConstraint()
loss_function = lambda preds, targets: F.nll_loss(
th.mean(preds, dim=2, keepdim=False), targets)
exp = Experiment(model, train_set, valid_set, test_set, iterator=iterator,
loss_function=loss_function, optimizer=optimizer,
model_constraint=model_constraint,
monitors=monitors,
stop_criterion=stop_criterion,
remember_best_column='valid_misclass',
run_after_early_stop=True, cuda=cuda)
exp.run()
return exp
stop_criterion = MaxEpochs(0)
train_set = SignalAndTarget(X, y)
model_constraint = None
valid_set = None
test_set = None
loss_function = self.loss
if self.cropped:
loss_function = lambda outputs, targets: self.loss(
th.mean(outputs, dim=2), targets
)
# reset runtime monitor if exists...
for monitor in self.monitors:
if hasattr(monitor, "last_call_time"):
monitor.last_call_time = time.time()
exp = Experiment(
self.network,
train_set,
valid_set,
test_set,
iterator=self.iterator,
loss_function=loss_function,
optimizer=self.optimizer,
model_constraint=model_constraint,
monitors=self.monitors,
stop_criterion=stop_criterion,
remember_best_column=None,
run_after_early_stop=False,
cuda=self.cuda,
log_0_epoch=True,
do_early_stop=False,
)
optimizer = optim.Adam(model.parameters())
iterator = BalancedBatchSizeIterator(batch_size=batch_size)
stop_criterion = Or(
[
MaxEpochs(max_epochs),
NoDecrease("valid_misclass", max_increase_epochs),
]
)
monitors = [LossMonitor(), MisclassMonitor(), RuntimeMonitor()]
model_constraint = MaxNormDefaultConstraint()
exp = Experiment(
model,
train_set,
valid_set,
test_set,
iterator=iterator,
loss_function=F.nll_loss,
optimizer=optimizer,
model_constraint=model_constraint,
monitors=monitors,
stop_criterion=stop_criterion,
remember_best_column="valid_misclass",
run_after_early_stop=True,
cuda=cuda,
)
exp.run()
return exp
if validation_data is not None:
valid_X = _ensure_float32(validation_data[0])
valid_y = validation_data[1]
valid_set = SignalAndTarget(valid_X, valid_y)
else:
valid_set = None
test_set = None
self.monitors = [LossMonitor()]
if self.cropped:
self.monitors.append(CroppedTrialMisclassMonitor(input_time_length))
else:
self.monitors.append(MisclassMonitor())
if self.extra_monitors is not None:
self.monitors.extend(self.extra_monitors)
self.monitors.append(RuntimeMonitor())
exp = Experiment(
self.network,
train_set,
valid_set,
test_set,
iterator=self.iterator,
loss_function=loss_function,
optimizer=optimizer,
model_constraint=model_constraint,
monitors=self.monitors,
stop_criterion=stop_criterion,
remember_best_column=remember_best_column,
run_after_early_stop=False,
cuda=self.cuda,
log_0_epoch=log_0_epoch,
do_early_stop=(remember_best_column is not None),
)