Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
exports.run = async function run(version, benchmark, ...cmdArgs) {
const options = cmdArgs.splice(cmdArgs.length - 1, 1)[0];
const host = await new_host();
// const port = host._server.address().port;
let benchmark_name = options.output || "benchmark";
console.log(chalk`\n{whiteBright Running v.${version} (${cmdArgs.join(",")})}`);
let version_index = 1;
let table = undefined;
if (options.read && fs.existsSync(`${benchmark_name}.arrow`)) {
const buffer = fs.readFileSync(`${benchmark_name}.arrow`, null).buffer;
table = perspective.table(buffer);
const view = table.view({row_pivots: ["version"], columns: []});
const json = await view.to_json();
version_index = json.length;
}
const RUN_TEST = fs.readFileSync(path.resolve(benchmark)).toString();
let bins;
if (options.puppeteer) {
const puppeteer = require("puppeteer");
let browser = await puppeteer.launch({
headless: true,
args: ["--no-sandbox"]
});
bins = await run_version(browser, cmdArgs, RUN_TEST);
async function init_dynamic() {
// Create a `table`.
const table = perspective.table(newRows(TABLE_SIZE), {limit: TABLE_SIZE});
// The `table` needs to be registered to a name with the Perspective
// `WebSocketServer` in order for the client to get a proxy handle to it.
host.host_view("data_source_one", table.view());
// Loop and update the `table` oocasionally.
(function postRow() {
table.update(newRows());
setTimeout(postRow, TICK_RATE);
})();
}
function infer_table(buffer) {
if (buffer.slice(0, 6).toString() === "ARROW1") {
console.log("Loaded Arrow");
return table(buffer.buffer);
} else {
let text = buffer.toString();
try {
let json = JSON.parse(text);
console.log("Loaded JSON");
return table(json);
} catch (e) {
console.log("Loaded CSV");
return table(text);
}
}
}
function infer_table(buffer) {
if (buffer.slice(0, 6).toString() === "ARROW1") {
console.log("Loaded Arrow");
return table(buffer.buffer);
} else {
let text = buffer.toString();
try {
let json = JSON.parse(text);
console.log("Loaded JSON");
return table(json);
} catch (e) {
console.log("Loaded CSV");
return table(text);
}
}
}
function infer_table(buffer) {
if (buffer.slice(0, 6).toString() === "ARROW1") {
console.log("Loaded Arrow");
return table(buffer.buffer);
} else {
let text = buffer.toString();
try {
let json = JSON.parse(text);
console.log("Loaded JSON");
return table(json);
} catch (e) {
console.log("Loaded CSV");
return table(text);
}
}
}
async function init_cached() {
await populate_cache();
const tbl = perspective.table(newRows(TABLE_SIZE), {limit: TABLE_SIZE});
host.host_view("data_source_one", tbl.view());
(function postRow() {
const entry = __CACHE__[Math.floor(Math.random() * __CACHE__.length)];
tbl.update(entry);
setTimeout(postRow, TICK_RATE);
})();
}
async function newArrow(total_rows) {
var table = perspective.table(newRows(total_rows));
var vw = table.view();
var arrow = await vw.to_arrow();
vw.delete();
table.delete();
return arrow;
}
async function host(filename, options) {
let files = [path.join(__dirname, "html")];
if (options.assets) {
files = [options.assets, ...files];
}
const server = new WebSocketServer({assets: files, port: options.port});
let file;
if (filename) {
file = table(fs.readFileSync(filename).toString());
} else {
file = await read_stdin();
}
server.host_table("data_source_one", file);
if (options.open) {
open_browser(options.port);
}
}