Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
const xs = tf.tensor(X.slice(i*batchSize,(i+1)*batchSize))
const ys = tf.tensor(y.slice(i*batchSize,(i+1)*batchSize))
return [xs,ys];
});
const history = await model.fit(xs, ys, {batchSize: batchSize, epochs: 1});
lastBatchLoss = history.history.loss[0];
tf.dispose([xs, ys]);
await tf.nextFrame();
i++;
}
}catch(err){
// End of epoch.
//console.log("Epoch "+epoch+"/"+epochs+" ended.");
const xs = tf.tensor(X);
const pred = model.predict(xs).dataSync();
updatePredictions(pred);
const accuracy = _.sum(_.map(_.zip(pred,y),(x)=> (Math.round(x[0]) == x[1]) ? 1 : 0))/pred.length;
lossValues.push(lastBatchLoss);
plotLoss(lossValues,accuracy);
}
}
trainState.s = true;
createTrainBttn("train",data);
console.log("End Training");
// Enable Form Controls
d3.select("#modelParameters").selectAll(".form-control").attr('disabled', null);
export function parseTensor(
tensor: onnx.TensorProto, transpose = true): Tensor {
const shape = tensor.dims as number[];
const dtype = parseTensorDtype(tensor);
const typedArray = parseTensorData(tensor);
const data = tf.tensor(typedArray, shape, dtype);
// convert to channelsLast
// -----------------------------------------------------
// onnx shape: batchSize, inChannels, inHeight, inWidth
// tfjs shape: batchSize, inHeight, inWidth, inChannels
if (transpose) {
switch (shape.length) {
case 4:
return data.transpose([0, 2, 3, 1]);
case 3:
return data.transpose([1, 2, 0]);
default:
return data;
}
}
return data;
return tf.tidy(() => {
let imageData = tf.tensor(s.imageData);
let previous = s.previousImageData;
if (previous) {
previous = tf.tensor(previous);
} else {
previous = tf.zerosLike(imageData);
}
// Subtract the previous image, multiplied by 0.5
// This makes sure some information about movement is included.
previous = tf.mul(previous, 0.5);
imageData = tf.sub(imageData, previous);
if (side === 'right') {
// Mirror features
imageData = tf.reverse2d(imageData, 0);
it('can be trained', async () => {
const x = tf.randomNormal([1, 40, 40, 3]);
const y = tf.tensor([[0, 1]]);
for(let i = 0;i < 5; ++i) {
await nmodel.fit(x, y);
}
let results = [];
for(let i = 0;i < 10; ++i)
results.push(nmodel.predict(x).getAction());
expect(results.reduce((p, c) => p + c)).to.be.greaterThan(7);
});
});
test(testFeatures, testLabels) {
const predictions = this.predict(testFeatures);
testLabels = tf.tensor(testLabels).argMax(1);
const incorrect = predictions
.notEqual(testLabels)
.sum()
.get();
return (predictions.shape[0] - incorrect) / predictions.shape[0];
}
const [xs,ys] = tf.tidy(() => {
const xs = tf.tensor(X.slice(i*batchSize,(i+1)*batchSize))
const ys = tf.tensor(y.slice(i*batchSize,(i+1)*batchSize))
return [xs,ys];
});
setChromosome(chromosome) {
let weight = chromosome.slice(0, 3 * 6);
let bias = chromosome.slice(3 * 6, 3 * 6 + 1);
this.weights[0].assign(tf.tensor(weight, [3, 6]));
this.biases[0].assign(tf.tensor(bias[0]));
weight = chromosome.slice(3 * 6 + 1, 3 * 6 + 1 + 6 * 2);
bias = chromosome.slice(3 * 6 + 1 + 6 * 2, 3 * 6 + 1 + 6 * 2 + 1);
this.weights[1].assign(tf.tensor(weight, [6, 2]));
this.biases[1].assign(tf.tensor(bias[0]));
}
}
async transitionsToX(transitions) {
const x = transitions.map(t => this.stateToArray(t.state, t.side));
return tf.tensor(x);
}
private calculateMultiVariateCoeff(X, y): number[] {
const [q, r] = tf.linalg.qr(tf.tensor2d(X));
const rawR = reshape(Array.from(r.dataSync()), r.shape);
const validatedR = validateMatrix2D(rawR);
const weights = tf
.tensor(numeric.inv(validatedR))
.dot(q.transpose())
.dot(tf.tensor(y))
.dataSync();
return Array.from(weights);
}
}
static _labelData(data:number[][], label_index:number):Tensor[] {
if (label_index < 0) {
label_index = data[0].length - 1;
}
var trainXs:number[][] = data;
var trainYs:number[] = trainXs.map(row => row[label_index]);
trainXs.forEach(function(x) {x.splice(label_index, 1)});
return [tensor(trainXs), tensor(trainYs)]
}