Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
'use strict';
// Ref: http://www.r-bloggers.com/computing-and-visualizing-pca-in-r/
const Matrix = require('ml-matrix').Matrix;
const Stat = require('ml-stat/matrix');
const mean = Stat.mean;
const stdev = Stat.standardDeviation;
const iris = require('ml-dataset-iris').getNumbers();
const PCA = require('..');
const expectedLoadings = [
[0.521, 0.269, 0.580, 0.565],
[0.377, 0.923, 0.024, 0.067],
[0.720, 0.244, 0.142, 0.634],
[0.261, 0.124, 0.801, 0.524]
];
describe('iris dataset', function () {
var pca = new PCA(iris, {scale: true, useCovarianceMatrix: false});
it('loadings', function () {
checkLoadings(pca);
});
it('standard deviation', function () {
pca.getStandardDeviations().should.approximatelyDeep([1.7084, 0.9560, 0.3831, 0.1439], 1e-4);
// Ref: http://www.r-bloggers.com/computing-and-visualizing-pca-in-r/
import { Matrix } from 'ml-matrix';
import { getNumbers } from 'ml-dataset-iris';
import { toBeDeepCloseTo } from 'jest-matcher-deep-close-to';
import { PCA } from '../pca';
expect.extend({ toBeDeepCloseTo });
const iris = getNumbers();
const expectedLoadings = [
[0.521, 0.269, 0.58, 0.565],
[0.377, 0.923, 0.024, 0.067],
[0.72, 0.244, 0.142, 0.634],
[0.261, 0.124, 0.801, 0.524],
];
const expectedLoadingsNIPALS = [
[0.5211, -0.2693, 0.5804, 0.5649],
[0.3774, 0.9233, 0.0245, 0.067],
[0.7196, -0.2444, -0.1421, -0.6343],
[-0.2613, 0.1235, 0.8014, -0.5236],
];
describe('iris dataset test method covarianceMatrix', function () {
function exec(SVM, time) {
const MILISECONDS = time * 1000;
const features = data.getNumbers();
let labels = data.getClasses();
const classes = data.getDistinctClasses();
const c = {};
classes.forEach((v, idx) => (c[v] = idx));
labels = labels.map(l => c[l]);
// We precompute the gaussian kernel
const kernel = new Kernel('gaussian', {sigma: 1 / Math.sqrt(gamma)});
const KData = kernel.compute(features).addColumn(0, range(1, labels.length + 1));
const t1 = Date.now();
let t2 = Date.now();
let count = 0;
while (t2 - t1 < MILISECONDS) {
const svm = new SVM({
quiet: true,
async function exec() {
const SVM = await require(`../../${argv[0]}`);
const data = require('ml-dataset-iris');
const features = data.getNumbers();
let labels = data.getClasses();
const classes = data.getDistinctClasses();
const c = {};
classes.forEach((v, idx) => c[v] = idx);
labels = labels.map(l => c[l]);
const cost = Array.from({length: COST_GRID_SIZE}).map(normalize(COST_GRID_SIZE, COST_MIN, COST_MAX)).map(pow10);
const gamma = Array.from({length: GAMMA_GRID_SIZE}).map(normalize(GAMMA_GRID_SIZE, GAMMA_MIN, GAMMA_MAX)).map(pow10);
const timeLabel = `iris-cv ${argv[0]}: `;
console.time(timeLabel);
for(let c of cost) {
for(let g of gamma) {
const svm = new SVM({
quiet: true,
function exec(SVM, precomputed) {
const data = require('ml-dataset-iris');
var trainData;
const features = data.getNumbers();
let labels = data.getClasses();
const classes = data.getDistinctClasses();
const c = {};
classes.forEach((v, idx) => (c[v] = idx));
labels = labels.map((l) => c[l]);
if (precomputed) {
const kernel = new Kernel('gaussian', { sigma: 1 / Math.sqrt(gamma) });
trainData = kernel
.compute(features)
.addColumn(0, range(1, labels.length + 1));
} else {
trainData = features;
}
const svm = new SVM({
function exec(time) {
const MILISECONDS = time * 1000;
const data = require('ml-dataset-iris');
const features = data.getNumbers();
let labels = data.getClasses();
const classes = data.getDistinctClasses();
const c = {};
classes.forEach((v, idx) => (c[v] = idx));
labels = labels.map(l => c[l]);
const startTime = Date.now();
let endTime = Date.now();
let count = 0;
while (endTime - startTime < MILISECONDS) {
for (let c of cost) {
for (let g of gamma) {
const svm = new SVM({
C: c,
kernel: 'rbf',
function exec(SVM, time) {
const MILISECONDS = time * 1000;
const data = require('ml-dataset-iris');
const features = data.getNumbers();
let labels = data.getClasses();
const classes = data.getDistinctClasses();
const c = {};
classes.forEach((v, idx) => (c[v] = idx));
labels = labels.map(l => c[l]);
const t1 = Date.now();
let t2 = Date.now();
let count = 0;
while (t2 - t1 < MILISECONDS) {
const svm = new SVM({
quiet: true,
cost: cost,
gamma: gamma
function exec(SVM, time) {
const MILISECONDS = time * 1000;
const data = require('ml-dataset-iris');
const features = data.getNumbers();
let labels = data.getClasses();
const classes = data.getDistinctClasses();
const c = {};
classes.forEach((v, idx) => (c[v] = idx));
labels = labels.map(l => c[l]);
const startTime = Date.now();
let endTime = Date.now();
let count = 0;
while (endTime - startTime < MILISECONDS) {
for (let c of cost) {
for (let g of gamma) {
const svm = new SVM({
quiet: true,
cost: c,
function exec(SVM, time, precomputed) {
const MILISECONDS = time * 1000;
const data = require('ml-dataset-iris');
var trainData;
const features = data.getNumbers();
let labels = data.getClasses();
const classes = data.getDistinctClasses();
const c = {};
classes.forEach((v, idx) => (c[v] = idx));
labels = labels.map((l) => c[l]);
let result;
const t1 = Date.now();
let t2 = Date.now();
let count = 0;
while (t2 - t1 < MILISECONDS) {
if (precomputed) {
const kernel = new Kernel('gaussian', { sigma: 1 / Math.sqrt(gamma) });
trainData = kernel.compute(features).addColumn(0, range(1, labels.length + 1));
} else {