Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
export async function loadGraphModel(modelName: string):
Promise {
if (inNodeJS()) {
// tslint:disable-next-line:no-require-imports
const fileSystem = require('@tensorflow/tfjs-node/dist/io/file_system');
return tfconverter.loadGraphModel(
fileSystem.fileSystem(`./data/${modelName}/model.json`));
} else {
return tfconverter.loadGraphModel(
`${DATA_SERVER_ROOT}/${modelName}/model.json`);
}
}
async function run(
dataPath, metadataPath, outputFolder, epochs, validationSplit = 0.15) {
const {xsArr, ysArr} = loadJSON(dataPath);
const metadata = loadJSON(metadataPath);
const xs = tf.tensor(xsArr, metadata.xsShape);
const ys = tf.tensor(ysArr, metadata.ysShape);
const model = getModel(metadata.labels);
// We use model.fit as the whole dataset comfortably fits in memory.
await model.fit(xs, ys, {epochs, validationSplit});
mkdirp(outputFolder);
await model.save(fileIO.fileSystem(outputFolder));
const metaOutPath = path.resolve(outputFolder, 'intent_metadata.json');
const metadataStr = JSON.stringify(metadata, null, 2);
fs.writeFileSync(metaOutPath, metadataStr, {encoding: 'utf8'});
}
embeddingsPath,
taggedTokensPath,
modelOpts.sequenceLength,
trainingOpts.batchSize,
);
const dataset = {iterator: dataIterator};
const model = getModel(modelOpts);
console.log('Start training', trainingOpts.epochs);
await model.fitDataset(dataset, {
epochs: trainingOpts.epochs,
});
mkdirp(outFolder);
console.log(`Saving model to ${outFolder}`);
await model.save(fileIO.fileSystem(outFolder));
// Write out the related metadata
const metaOutPath = path.resolve(outFolder, 'tagger_metadata.json');
const metadata = {
labels: TAGS,
sequenceLength: modelOpts.sequenceLength,
embeddingSize: EMBEDDING_SIZE,
};
const metadataStr = JSON.stringify(metadata, null, 2);
fs.writeFileSync(metaOutPath, metadataStr, {encoding: 'utf8'});
}