Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
attn = Reshape((seq_maxlen, QA_EMBED_SIZE))(attn)
qenc_attn = merge([qenc, attn], mode="sum")
qenc_attn = Flatten()(qenc_attn)
output = Dense(2, activation="softmax")(qenc_attn)
model = Model(input=[qin, ain], output=[output])
print("Compiling model...")
model.compile(optimizer="adam", loss="categorical_crossentropy",
metrics=["accuracy"])
print("Training...")
best_model_filename = os.path.join(MODEL_DIR,
kaggle.get_model_filename(sys.argv[0], "best"))
checkpoint = ModelCheckpoint(filepath=best_model_filename,
verbose=1, save_best_only=True)
model.fit([Xqtrain, Xatrain], [Ytrain], batch_size=BATCH_SIZE,
nb_epoch=NBR_EPOCHS, validation_split=0.1,
callbacks=[checkpoint])
print("Evaluation...")
loss, acc = model.evaluate([Xqtest, Xatest], [Ytest], batch_size=BATCH_SIZE)
print("Test loss/accuracy final model = %.4f, %.4f" % (loss, acc))
final_model_filename = os.path.join(MODEL_DIR,
kaggle.get_model_filename(sys.argv[0], "final"))
json_model_filename = os.path.join(MODEL_DIR,
kaggle.get_model_filename(sys.argv[0], "json"))
kaggle.save_model(model, json_model_filename, final_model_filename)
best_model_filename = os.path.join(MODEL_DIR,
kaggle.get_model_filename(sys.argv[0], "best"))
checkpoint = ModelCheckpoint(filepath=best_model_filename,
verbose=1, save_best_only=True)
model.fit([Xqtrain, Xatrain], [Ytrain], batch_size=BATCH_SIZE,
nb_epoch=NBR_EPOCHS, validation_split=0.1,
callbacks=[checkpoint])
print("Evaluation...")
loss, acc = model.evaluate([Xqtest, Xatest], [Ytest], batch_size=BATCH_SIZE)
print("Test loss/accuracy final model = %.4f, %.4f" % (loss, acc))
final_model_filename = os.path.join(MODEL_DIR,
kaggle.get_model_filename(sys.argv[0], "final"))
json_model_filename = os.path.join(MODEL_DIR,
kaggle.get_model_filename(sys.argv[0], "json"))
kaggle.save_model(model, json_model_filename, final_model_filename)
best_model = kaggle.load_model(json_model_filename, best_model_filename)
best_model.compile(optimizer="adam", loss="categorical_crossentropy",
metrics=["accuracy"])
loss, acc = best_model.evaluate([Xqtest, Xatest], [Ytest], batch_size=BATCH_SIZE)
print("Test loss/accuracy best model = %.4f, %.4f" % (loss, acc))
print("Training...")
best_model_filename = os.path.join(MODEL_DIR,
kaggle.get_model_filename(sys.argv[0], "best"))
checkpoint = ModelCheckpoint(filepath=best_model_filename,
verbose=1, save_best_only=True)
model.fit([Xqtrain, Xatrain], [Ytrain], batch_size=BATCH_SIZE,
nb_epoch=NBR_EPOCHS, validation_split=0.1,
callbacks=[checkpoint])
print("Evaluation...")
loss, acc = model.evaluate([Xqtest, Xatest], [Ytest], batch_size=BATCH_SIZE)
print("Test loss/accuracy final model = %.4f, %.4f" % (loss, acc))
final_model_filename = os.path.join(MODEL_DIR,
kaggle.get_model_filename(sys.argv[0], "final"))
json_model_filename = os.path.join(MODEL_DIR,
kaggle.get_model_filename(sys.argv[0], "json"))
kaggle.save_model(model, json_model_filename, final_model_filename)
best_model = kaggle.load_model(json_model_filename, best_model_filename)
best_model.compile(optimizer="adam", loss="categorical_crossentropy",
metrics=["accuracy"])
loss, acc = best_model.evaluate([Xqtest, Xatest], [Ytest], batch_size=BATCH_SIZE)
print("Test loss/accuracy best model = %.4f, %.4f" % (loss, acc))