Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
// Get input arguments or default values
this.namespace = flags.namespace;
this.majorversion = flags.majorversion;
this.minorversion = flags.minorversion;
this.folder = flags.folder || '.';
if (flags.verbose) {
this.verbose = true;
}
console.log(`Initialize update of dependencies in ${this.folder} with ${this.namespace} ${this.majorversion}.${this.minorversion}`);
// Read files
const fileList = glob.sync('**/*.xml');
// Progress bar
// @ts-ignore
this.progressBar = new cliProgress.SingleBar({
format: '{name} [{bar}] {percentage}% | {value}/{total} | {file} ',
stopOnComplete: true
});
if (this.progressBar.terminal.isTTY()) {
this.progressBar.start(fileList.length, 0, { name: 'Progress', file: 'N/A' });
}
// Replace dependencies in files
let updatedNb = 0;
const parser = new xml2js.Parser();
const promises = [];
for (const sfdxXmlFile of fileList) {
const filePromise = new Promise((resolve, reject) => {
fs.readFile(sfdxXmlFile, (err, data) => {
// Parse XML file
parser.parseString(data, (err2, parsedXmlFile) => {
this.verbose = true;
}
console.log('Starting sfdx essentials:uncomment with uncomment key "' + this.uncommentKey + '"');
// List apex classes
const fetchClassesExpression = this.folder + '/classes/*.cls';
const customApexClassFileNameList = glob.sync(fetchClassesExpression);
// List aura items
const fetchAuraExpression = this.folder + '/aura/**/*.js';
const customAuraFileNameList = glob.sync(fetchAuraExpression);
// Progress bar
// @ts-ignore
this.progressBar = new cliProgress.SingleBar({
format: '{name} [{bar}] {percentage}% | {value}/{total} | {file} ',
stopOnComplete: true
});
if (this.progressBar.terminal.isTTY()) {
this.progressBar.start(customApexClassFileNameList.length + customAuraFileNameList.length, 0, { name: 'Progress', file: 'N/A' });
}
// Replace commented lines in each class
customApexClassFileNameList.forEach((customApexClassFileName) => {
this.processFile(customApexClassFileName);
if (!this.verbose && this.progressBar.terminal.isTTY()) {
this.progressBar.increment();
}
});
// Replace commented lines in each aura item
const concatenateDataFlows = async (fileNames: string[], imgSize: number[], oneHotMap: any, dataFlows: any[], type: string, imagePath: string) => {
console.log(`access ${type} image data...`);
const bar1 = new _cliProgress.SingleBar({}, _cliProgress.Presets.shades_classic);
bar1.start(fileNames.length, 0);
for (let j = 0; j < fileNames.length; j++) {
const jsonData = await parseAnnotation(fileNames[j]);
bar1.update(j);
let image = await Jimp.read(path.join(imagePath, jsonData.annotation.filename[0]));
image = image.resize(imgSize[0], imgSize[1]);
const trainImageBuffer = await image.getBufferAsync(Jimp.MIME_JPEG);
const imageArray = new Uint8Array(trainImageBuffer);
let label:any = jsonData.annotation.object[0].name[0];
if (Object.keys(oneHotMap).length > 1) {
label = tf.oneHot(tf.scalar(oneHotMap[label], 'int32'), Object.keys(oneHotMap).length);
}
dataFlows.push({
xs: tf.cast(tf.node.decodeImage(imageArray, 3), 'float32'),
ys: label
})
function syncFileWithSource(pathToTargetFile, pathToOutputFile) {
const progressBar = new _cliProgress.SingleBar({}, _cliProgress.Presets.shades_classic);
progressBar.start(100, 0);
const sourceLines = [];
const targetLines = [];
const existingTargetFile = readFileIfExists(pathToTargetFile);
existingTargetFile.toString().split("\n").forEach((function (line) {
targetLines.push(line.trim());
}));
progressBar.update(10);
const sourceFile = readFileIfExists(program.sourceFile);
sourceFile.toString().split("\n").forEach((function (line) {
sourceLines.push(line.trim());
}));
progressBar.update(20);
const sourceChunks = createChunks(sourceLines, progressBar, false);
const targetChunks = createChunks(targetLines, progressBar, true);
exports.handler = async function (argv) {
const userFunctions = require(argv.module)
var userConfig = require(argv.config)
if (argv.operations) userConfig.numOperations = argv.operations
if (argv.seed) userConfig.seedNumber = argv.seed
if (argv.debug) userConfig.debug = argv.debug
if (argv.iterations) userConfig.numIterations = argv.iterations
const failingTestRoot = p.join(p.dirname(argv.module), 'test', 'autogenerated','failing')
const { events, run } = create(userFunctions, userConfig)
var bar = null
if (!argv.debug) {
bar = new progress.SingleBar()
bar.start(argv.iterations || userConfig.numIterations, 0)
events.on('progress', () => bar.increment())
}
try {
await run()
if (bar) bar.stop()
console.log()
console.log('Fuzzing Succeeded.')
console.log()
} catch (err) {
if (bar) bar.stop()
if (!err[consts.FuzzError]) {
console.error('Fuzzing produced unexpected error:', err)
} else {
const { testCase, signature } = await generateTestCase(err, failingTestRoot, argv)
console.log('\nFailing Test:\n')
const bayesianClassifierModelTrain: ModelTrainType = async (data: UniformTfSampleData, model: PipcookModel): Promise => {
const {trainData, metaData} = data;
assertionTest(data, trainData, metaData);
const trainModel = model.model;
let count = 0;
await trainData.forEachAsync((e: any) => {
count++;
});
const bar1 = new _cliProgress.SingleBar({}, _cliProgress.Presets.shades_classic);
bar1.start(count, 0);
count = 0;
await trainData.forEachAsync((e: any) => {
count = count + 1;
bar1.update(count);
const trainX = e[metaData.feature.name].dataSync()[0];
const trainY = e[metaData.label.name].dataSync()[0];
trainModel.learn(trainX, trainY);
});
bar1.stop();
return {
...model,
model: trainModel
}
}
constructor(session, size, fd) {
this.session = session
this.size = size
this.fd = fd
if (size > 4 * 1024 * 1024) {
this.bar = new progress.SingleBar(BAR_OPTS)
this.bar.start(size, 0)
this.verbose = true
}
}
return new Promise((resolve, reject) => {
const bar1 = new _cliProgress.SingleBar({}, _cliProgress.Presets.shades_classic);
const file = fs.createWriteStream(fileName);
let receivedBytes = 0
request.get(url)
.on('response', (response: any) => {
const totalBytes = response.headers['content-length'];
bar1.start(totalBytes, 0);
})
.on('data', (chunk: any) => {
receivedBytes += chunk.length;
bar1.update(receivedBytes);
})
.pipe(file)
.on('error', (err: Error) => {
fs.unlink(fileName);
bar1.stop();
reject(err);