Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
StaticAttentionSelf(TriLinear(bias=True), ConcatWithProduct()),
FullyConnected(100 * 2, activation="relu"),
)),
VariationalDropoutLayer(0.8)),
predictor=BoundsPredictor(ChainBiMapper(
first_layer=recurrent_layer,
second_layer=recurrent_layer
), span_predictor=ForwardSpansOnly())
)
with open(__file__, "r") as f:
notes = f.read()
train_batching = ClusteredBatcher(45, ContextLenBucketedKey(3), True, False)
eval_batching = ClusteredBatcher(45, ContextLenKey(), False, False)
data = DocumentQaTrainingData(SquadCorpus(), None, train_batching, eval_batching)
eval = [LossEvaluator(), SpanProbability(), SpanEvaluator(bound=[17], text_eval="squad")]
trainer.start_training(data, model, params, eval, model_dir.ModelDir(out), notes, None)
)),
VariationalDropoutLayer(0.8)),
predictor=BoundsPredictor(
ChainBiMapper(
first_layer=recurrent_layer,
second_layer=recurrent_layer
),
)
)
with open(__file__, "r") as f:
notes = f.read()
train_batching = ClusteredBatcher(45, ContextLenBucketedKey(3), True, False)
eval_batching = ClusteredBatcher(45, ContextLenKey(), False, False)
data = DocumentQaTrainingData(SquadCorpus(), None, train_batching, eval_batching)
eval = [LossEvaluator(), SpanProbability(), BoundedSquadSpanEvaluator(bound=[17])]
trainer.start_training(data, model, train_params, eval, trainer.ModelDir(out), notes)
dropout,
),
predictor=BoundsPredictor(
ChainBiMapper(
first_layer=recurrent_layer,
second_layer=recurrent_layer
),
)
)
with open(__file__, "r") as f:
notes = f.read()
corpus = SquadCorpus()
train_batching = ClusteredBatcher(45, ContextLenBucketedKey(3), True, False)
eval_batching = ClusteredBatcher(45, ContextLenKey(), False, False)
data = DocumentQaTrainingData(corpus, None, train_batching, eval_batching)
eval = [LossEvaluator(), BoundedSquadSpanEvaluator(bound=[17])]
trainer.start_training(data, model, train_params, eval, model_dir.ModelDir(out), notes)
num_epochs=16, eval_period=900, log_period=30,
async_encoding=5,
save_period=900, eval_samples=dict(train=6000, dev=6000))
model = ContextOnly(
DocumentAndQuestionEncoder(SingleSpanAnswerEncoder()),
FixedWordEmbedder(vec_name="glove.6B.100d", word_vec_init_scale=0, learn_unk=False),
None,
FullyConnected(50),
BoundsPredictor(NullBiMapper())
)
corpus = SquadCorpus()
train_batching = ClusteredBatcher(45, ContextLenBucketedKey(3), True, False)
eval_batching = ClusteredBatcher(45, ContextLenKey(), False, False)
data = DocumentQaTrainingData(corpus, None, train_batching, eval_batching)
eval = [LossEvaluator(), BoundedSquadSpanEvaluator(bound=[17])]
trainer.start_training(data, model, train_params, eval, model_dir.ModelDir(out), "")