Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
),
]
)
with tf.device("/cpu:0"):
train_data = dataset.train_data(hparams.batch_size)
validation_data = dataset.validation_data(hparams.batch_size)
with utils.get_distribution_scope(hparams.batch_size):
model = build_model(hparams, **dataset.preprocessing.kwargs)
model.compile(
optimizer=hparams.optimizer,
loss="categorical_crossentropy",
metrics=["categorical_accuracy", "top_k_categorical_accuracy"],
)
lq.models.summary(model)
if initial_epoch > 0:
model.load_weights(model_path)
click.echo(f"Loaded model from epoch {initial_epoch}")
model.fit(
train_data,
epochs=hparams.epochs,
steps_per_epoch=dataset.train_examples // hparams.batch_size,
validation_data=validation_data,
validation_steps=dataset.validation_examples // hparams.batch_size,
verbose=2 if tensorboard else 1,
initial_epoch=initial_epoch,
callbacks=callbacks,
)
decoders=self.preprocessing.decoders
)
validation_data = (
validation_data.cache()
.repeat()
.map(self.preprocessing, num_parallel_calls=tf.data.experimental.AUTOTUNE)
.batch(self.batch_size)
.prefetch(1)
)
with utils.get_distribution_scope(self.batch_size):
self.model.compile(
optimizer=self.optimizer, loss=self.loss, metrics=self.metrics,
)
lq.models.summary(self.model)
if initial_epoch > 0:
self.model.load_weights(str(self.model_path))
print(f"Loaded model from epoch {initial_epoch}.")
click.secho(str(self))
self.model.fit(
train_data,
epochs=self.epochs,
steps_per_epoch=math.ceil(num_train_examples / self.batch_size),
validation_data=validation_data,
validation_steps=math.ceil(num_validation_examples / self.batch_size),
validation_freq=self.validation_frequency,
verbose=1 if self.use_progress_bar else 2,
initial_epoch=initial_epoch,