Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
for (const file of (this.config.files || [])) {
if (file.filename) {
const name = file.filename || path.basename(file.Key);
const fullpath = path.join(paths.input, name);
fs.writeFileSync(fullpath, file.contents, { mode: 0o600 });
}
else {
await aws.downloadS3Files([file], paths.input);
}
}
const messageInfo = `(${message.payload.length} files from ${this.message.meta.key})`;
await aws.downloadS3Files(message.payload, paths.input);
log.info(`Completed source download ${messageInfo}`);
log.info('==== MRF CONFIG ====');
log.info(mrfConfig);
log.info('========');
await this.runMrfgen(paths.mrfgenConfig);
log.info(`Completed MRF generation ${messageInfo}`);
const fullPaths = fs.readdirSync(paths.output).map((f) => path.join(paths.output, f));
// Upload under the destKey bucket, inserting an underscore before the file extension
const destKeyFn = (filename) =>
path.join(destKey, path.basename(filename).replace(/\.([^\.]*)$/, '_.$1'));
await aws.uploadS3Files(fullPaths, destBucket, destKeyFn);
}
finally {
execSync(`rm -rf ${tempDir}`);
}
}
try {
unarchivedFiles = validations.validateArchiveContents(archiveDirPath);
log.debug(`UNARCHIVED FILES: ${JSON.stringify(unarchivedFiles)}`);
}
catch (e) {
log.error(e);
return Object.assign(returnValue, { error: e.message });
}
// Upload expanded files to S3
const s3Files = await uploadArchiveFilesToS3(
unarchivedFiles,
archiveDirPath,
fileAttrs
);
log.info('S3 FILES:');
log.info(JSON.stringify(s3Files));
const imgFiles = s3Files.map((s3File) => ({ Bucket: s3File.bucket, Key: s3File.key }));
if (imgFiles.length > 0) {
imageSources.push({ archive: archiveFileName, images: imgFiles });
}
// delete the local expanded files
deleteExpandedFiles(unarchivedFiles, archiveDirPath);
// Delete the archive files from S3
await aws.deleteS3Files(downloadRequest);
}
catch (e) {
log.error(e);
async consume(fn) {
let messageLimit = this.messageLimit;
log.info(`Attempting to process up to ${messageLimit} messages...`);
let sum = 0;
/* eslint-disable no-await-in-loop */
// Only request up to the original messageLimit messages on subsequent `processMessages` calls
while (messageLimit > 0 && !this.timeLapsed) {
let results;
if (messageLimit > 10) {
results = await this.processMessages(fn, 10, this.visibilityTimeout);
messageLimit -= 10;
} else if (messageLimit > 0) {
results = await this.processMessages(fn, messageLimit, this.visibilityTimeout);
messageLimit -= messageLimit;
}
sum += results;
// if the function is running for longer than the timeLimit, stop it
const timeSpent = (Date.now() - this.now);
async parse(pdrLocalPath) {
const collectionConfigStore = new CollectionConfigStore(this.bucket, this.stack);
const parsed = await parsePdr(pdrLocalPath, collectionConfigStore, this.pdr.name);
// each group represents a Granule record.
// After adding all the files in the group to the Queue
// we create the granule record (moment of inception)
log.info(
{ pdrName: this.pdr.name },
`There are ${parsed.granulesCount} granules in ${this.pdr.name}`
);
log.info(
{ pdrName: this.pdr.name },
`There are ${parsed.filesCount} files in ${this.pdr.name}`
);
return parsed;
}
}
async removeGranuleFromCmrByGranule(granule) {
log.info(`granules.removeGranuleFromCmrByGranule ${granule.granuleId}`);
if (!granule.published || !granule.cmrLink) {
throw new Error(`Granule ${granule.granuleId} is not published to CMR, so cannot be removed from CMR`);
}
const params = {
provider: process.env.cmr_provider,
clientId: process.env.cmr_client_id
};
if (process.env.cmr_oauth_provider === 'launchpad') {
const config = {
api: process.env.launchpad_api,
passphrase: process.env.launchpad_passphrase,
certificate: process.env.launchpad_certificate
};
body: {
actions: [
{ remove: { index: currentIndex, alias: aliasName } },
{ add: { index: newIndex, alias: aliasName } }
]
}
}).then(() => {
log.info(`Removed alias ${aliasName} from index ${currentIndex} and added alias to ${newIndex}`);
}).catch((err) =>
res.boom.badRequest(`Error removing alias ${aliasName} from index ${currentIndex} and adding alias to ${newIndex}: ${err}`));
let message = `Reindex success - alias ${aliasName} now pointing to ${newIndex}`;
if (deleteSource) {
await esClient.indices.delete({ index: currentIndex });
log.info(`Deleted index ${currentIndex}`);
message = `${message} and index ${currentIndex} deleted`;
}
return res.send({ message });
}
const accessTokenModel = new AccessToken();
await accessTokenModel.create({
accessToken,
refreshToken
});
const jwtToken = createJwtToken({ accessToken, username, expirationTime });
if (state) {
return buildPermanentRedirectResponse(
`${decodeURIComponent(state)}?token=${jwtToken}`,
response
);
}
log.info('Log info: No state specified, responding 200');
return response.send({ message: { token: jwtToken } });
} catch (e) {
if (e.statusCode === 400) {
return response.boom.unauthorized('Failed to get authorization token');
}
log.error('Error caught when checking code', e);
return response.boom.unauthorized(e.message);
}
}
const errorMessage = 'Request requires a code';
return response.boom.unauthorized(errorMessage);
}
RequestId: event.RequestId,
LogicalResourceId: event.LogicalResourceId,
Data: data
});
log.info('RESPONSE BODY:\n', body);
log.info('SENDING RESPONSE...\n');
const r = await got.put(event.ResponseURL, {
body,
headers: {
'content-type': '',
'content-length': body.length
}
});
log.info(r.body);
}
async function discoverGranules(event) {
const protocol = event.config.provider.protocol;
const Discoverer = granule.selector('discover', protocol);
const discoverer = new Discoverer(event);
try {
const granules = await discoverer.discover();
log.info(`Discovered ${granules.length} granules.`);
return { granules };
} finally {
if (discoverer.connected) await discoverer.end();
}
}