Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
predictor=BoundsPredictor(
ChainBiMapper(
first_layer=recurrent_layer,
second_layer=recurrent_layer
),
)
)
with open(__file__, "r") as f:
notes = f.read()
corpus = SquadCorpus()
train_batching = ClusteredBatcher(45, ContextLenBucketedKey(3), True, False)
eval_batching = ClusteredBatcher(45, ContextLenKey(), False, False)
data = DocumentQaTrainingData(corpus, None, train_batching, eval_batching)
eval = [LossEvaluator(), BoundedSquadSpanEvaluator(bound=[17])]
trainer.start_training(data, model, train_params, eval, model_dir.ModelDir(out), notes)
save_period=900, eval_samples=dict(train=6000, dev=6000))
model = ContextOnly(
DocumentAndQuestionEncoder(SingleSpanAnswerEncoder()),
FixedWordEmbedder(vec_name="glove.6B.100d", word_vec_init_scale=0, learn_unk=False),
None,
FullyConnected(50),
BoundsPredictor(NullBiMapper())
)
corpus = SquadCorpus()
train_batching = ClusteredBatcher(45, ContextLenBucketedKey(3), True, False)
eval_batching = ClusteredBatcher(45, ContextLenKey(), False, False)
data = DocumentQaTrainingData(corpus, None, train_batching, eval_batching)
eval = [LossEvaluator(), BoundedSquadSpanEvaluator(bound=[17])]
trainer.start_training(data, model, train_params, eval, model_dir.ModelDir(out), "")
predictor=BoundsPredictor(
ChainBiMapper(
first_layer=recurrent_layer,
second_layer=recurrent_layer
),
)
)
with open(__file__, "r") as f:
notes = f.read()
train_batching = ClusteredBatcher(45, ContextLenBucketedKey(3), True, False)
eval_batching = ClusteredBatcher(45, ContextLenKey(), False, False)
data = DocumentQaTrainingData(SquadCorpus(), None, train_batching, eval_batching)
eval = [LossEvaluator(), SpanProbability(), BoundedSquadSpanEvaluator(bound=[17])]
trainer.start_training(data, model, train_params, eval, trainer.ModelDir(out), notes)