Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
async extractPage(pageNum) {
try {
return this.extractPageWithLib(pageNum);
} catch (e) {
console.error(`Could not extract file ${e.message}`);
}
// the convert command takes zero-indexed page numbers
const page = pageNum - 1;
const file = await tmp.file({ postfix: ".png" });
const { filePath, cleanup } = await createTempSymlink(this.file);
const command = [
"convert",
"-density",
"400",
`${filePath}[${page}]`,
file.path
];
try {
await exec(command);
cleanup();
} catch (e) {
console.error("Failed extracting image", e);
let source;
if (this._contextual) {
const contextualParaphrase = this._downloadParaphrase(true)
.pipe(new TypecheckStream(this._schemas));
const basicSource = StreamUtils.chain([basicParaphrase, basicSynthetic], { objectMode: true });
// Spool the basic (non-contextual, not augmented) dataset to disk
// We need to do this because:
// 1) We don't want to run to many generation/processing steps as a pipeline, because that
// would use too much memory
// 2) We need to do multiple passes over the basic dataset for different reasons, and
// we can't cache it in memory
const { path: basicDataset, fd: basicDatasetFD } =
await tmp.file({ mode: 0o600, dir: '/var/tmp' });
await StreamUtils.waitFinish(basicSource
.pipe(new Genie.DatasetStringifier())
.pipe(fs.createWriteStream(basicDataset, { fd: basicDatasetFD })));
// basicDatasetFD is closed here
let contexts = await
fs.createReadStream(basicDataset, { encoding: 'utf8' })
.pipe(byline())
.pipe(new Genie.DatasetParser({ contextual: false }))
.pipe(new Genie.ContextExtractor(this._schemas))
.read();
const contextualized =
fs.createReadStream(basicDataset, { encoding: 'utf8' })
.pipe(byline())
// Get path to simple tsconfig file which should be used for build
const tsconfigPath = join(__dirname, '../../src/tsconfig-build.json');
// Read the tsconfi file
const tsConfigString = await fsReadFileAsync(tsconfigPath, { encoding: 'utf8'}) as string;
const tsConfig = JSON.parse(tsConfigString);
// Set absolute include paths
const newIncludeFiles = [];
for (const includeFile of tsConfig.include) {
newIncludeFiles.push(join(process.cwd(), includeFile));
}
tsConfig.include = newIncludeFiles;
// Write new custom tsconfig file
const { fd, path, cleanup } = await file();
await fsWriteAsync(fd, Buffer.from(JSON.stringify(tsConfig, null, 2), 'utf8'));
return {
path,
cleanup,
};
}
module.exports = function generateScriptFile( input, output ) {
// We need to create a temp file that ESTK can run, this file will have
// all paths that are going to be converted
return tmp.file({ postfix: '.jsx' })
// "tmp.file" returns an object with the more properties, but we are only
// interested in the path property
.then( ({ path: file }) => {
log.verbose( 'Created temp file at', file )
const script = createScriptContent( input, output )
// Write script contents to temp file
return new Promise( ( resolve, reject ) => {
writeFile( file, script, err => {
if ( err ) {
return reject( err )
}
// Send file path to next function in the promise chain
async function getModule(mf, messages) {
const src = compileModule(mf, messages);
const options = { plugins: ['@babel/plugin-transform-modules-commonjs'] };
const { code } = await babel.transformAsync(src, options);
const { cleanup, fd, path } = await tmp.file({
dir: __dirname,
postfix: '.js'
});
await write(fd, code, 0, 'utf8');
try {
return require(path).default;
} finally {
cleanup();
}
}
function createUpload (inputStream) {
console.log('Creating upload...')
return tmp.file({
discardDescriptor: true
}).then((tempFile) => {
const tempFileName = tempFile.path
const gzip = zlib.createGzip()
const writeStream = fs.createWriteStream(tempFileName)
const fileStream = inputStream.pipe(new stream.PassThrough())
var mimeType
fileStream.on('data', chunk => {
let type = filetype(chunk)
if (!mimeType) {
async extractFile(file) {
const { filePath, cleanup: cleanupSymlink } = await createTempSymlink(
this.path
);
const { path, cleanup } = await tmp.file({
postfix: pathLib.extname(file).toLowerCase()
});
await exec(["unrar", "p", "-idq", filePath, file], { stdoutFile: path });
cleanupSymlink();
return {
path,
cleanup
};
}
async apply(flow) {
const app = flow.children[0];
const script = await app.open(app, { platform: 'osx', wm: 'terminal' }, app.children);
const { path, fd } = await file({
prefix: 'workflow-wm-terminal-',
postfix: '.sh',
mode: 0o755,
});
await outputFile(path, script);
await close(fd);
kexec(path);
}
async function createClientSideEntryTmp(data) {
const entryFile = await tmp.file({ prefix: 'pattern-', postfix: '.js' });
const entryFileContents = createClientSideEntry(data);
fs.writeSync(entryFile.fd, entryFileContents, { encoding: 'utf8' });
return entryFile;
}
const options = {
'cwd': cwd,
'env': Object.assign({}, process.env, env),
'stdio': ['ignore', this.stdout, this.stderr],
'shell': shell
};
if (!stdio) {
stdio = {
'stdout': (data) => process.stdout.write(data),
'stderr': (data) => process.stderr.write(data)
};
}
if (this.script) {
const tmp = await tmpPromise.file();
try {
await fsExtra.writeFile(tmp.path, this.script, 'utf8');
await fsExtra.close(tmp.fd);
await fsExtra.chmod(tmp.path, 0o700);
logger.debug(`Script written: ${tmp.path}`);
await this._spawn(tmp.path, [], options, stdio);
} finally {
await fsExtra.unlink(tmp.path);
logger.debug(`Script deleted: ${tmp.path}`);
}
} else {
if (shell) {
await this._spawn(this.command, [], options, stdio);
} else {
await this._spawn(this.command[0], this.command.slice(1), options, stdio);