Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
trainer.each_epoch.append(report_progress)
trainer.nb_epoch = nb_epoch
trainer.dropout = 0.3
trainer.batch_size = 128
trainer.dropout_decay = 0.0
train_X = model.ops.asarray(train_X, dtype="float32")
y_onehot = to_categorical(train_y)
for X, y in trainer.iterate(train_X, y_onehot):
yh, backprop = model.begin_update(X, drop=trainer.dropout)
loss = ((yh - y) ** 2.0).sum() / y.shape[0]
backprop(yh - y, optimizer)
epoch_loss[-1] += loss
with model.use_params(optimizer.averages):
print("Avg dev.: %.3f" % model.evaluate(dev_X, dev_y))
with open("out.pickle", "wb") as file_:
pickle.dump(model, file_, -1)