Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
assert.object(opts.log, 'opts.log');
var configPath = path.resolve(__dirname, '..', 'etc', 'config.json');
opts.log.info('Loading config from "%s"', configPath);
var config = JSON.parse(fs.readFileSync(configPath, 'utf-8'));
// config-agent doesn't support arrays so ENABLED_LOG_DRIVERS is stored
// as a comma-separated list in a string. We'll unmangle that here and
// make sure that no driver was specified that doesn't actually exist.
if (config.enabledLogDrivers) {
assert.string(config.enabledLogDrivers, 'config.enabledLogDrivers-raw');
config.enabledLogDrivers = config.enabledLogDrivers.split(',');
} else {
config.enabledLogDrivers = ['json-file'];
}
assert.arrayOfString(config.enabledLogDrivers, 'config.enabledLogDrivers');
config.enabledLogDrivers.forEach(function _checkLogDriver(driver) {
assert.ok(common.LOG_DRIVERS.hasOwnProperty(driver),
'config.enabledLogDrivers.' + driver + ' is not a valid driver');
});
if (config.hasOwnProperty('fwrule_version')) {
assert.number(config.fwrule_version, 'config.fwrule_version');
} else {
config.fwrule_version = 1;
}
if (config.dcMaintEta) {
var d = new Date(config.dcMaintEta);
if (d.toString() !== 'Invalid Date') {
config.dcMaintUtcEta = d.toUTCString();
}
function imgUuidFromDockerDigests(digests) {
assert.arrayOfString(digests, 'digests');
// Sanity check the digests.
var badDigests = digests.filter(function (d) {
var sp = d.split(':');
if (sp.length !== 2 || sp[0] !== 'sha256' || sp[1].length !== 64) {
return true;
}
return false;
});
if (badDigests.length > 0) {
throw new Error(
'docker digests should be of the form "sha256:xxx", got: ' +
badDigests);
}
var sha256sum = crypto.createHash('sha256');
function execFilePlus(args, cb) {
assert.object(args, 'args');
assert.arrayOfString(args.argv, 'args.argv');
assert.optionalObject(args.execOpts, 'args.execOpts');
assert.object(args.log, 'args.log');
assert.func(cb);
var argv = args.argv;
var execOpts = args.execOpts;
// args.log.trace({exec: true, argv: argv, execOpts: execOpts},
// 'exec start');
execFile(argv[0], argv.slice(1), execOpts, function (err, stdout, stderr) {
args.log.trace({exec: true, argv: argv, execOpts: execOpts, err: err,
stdout: stdout, stderr: stderr}, 'exec done');
if (err) {
var msg = format(
'exec error:\n'
+ '\targv: %j\n'
+ '\texit status: %s\n'
function genTestSubcmd(funcs, subcmd, subcmdinfo)
{
assertplus.string(subcmd, 'subcmd');
assertplus.object(subcmdinfo, 'subcmdinfo');
assertplus.arrayOfString(subcmdinfo.required, 'subcmdinfo.required');
/*
* For each required field, test invoking the command without that field.
* Test twice: once with the other required fields specified in the
* environment and once with the other required fields specified on the
* command line.
*/
subcmdinfo.required.forEach(function (required, i) {
funcs.push(function (callback) {
testSubcmdMissingRequired(subcmd, subcmdinfo, required, true,
callback);
});
funcs.push(function (callback) {
testSubcmdMissingRequired(subcmd, subcmdinfo, required, false,
callback);
function testSubcmdMissingRequired(subcmd, subcmdinfo, required, useenv,
callback)
{
var args;
assertplus.string(subcmd, 'subcmd');
assertplus.object(subcmdinfo, 'subcmdinfo');
assertplus.arrayOfString(subcmdinfo.required, 'subcmdinfo.required');
assertplus.string(required, 'required');
assertplus.bool(useenv, 'useenv');
testStart('subcmd "%s" without required arg "%s" (%s)',
subcmd, required, useenv ? 'using env vars' : 'using args');
args = subcmdinfo.required.filter(function (a) { return (a != required); });
assertplus.equal(args.length, subcmdinfo.required.length - 1);
execChildForTest(subcmd, args, useenv, function (cmdresult) {
if (!cmdResultCompleted(cmdresult))
testFail('command timed out', cmdresult);
else if (!cmdResultExitUsage(cmdresult))
testFail('command exited with wrong status', cmdresult);
else if (!cmdResultMissingRequired(cmdresult, required))
testFail('no message about missing arg', cmdresult);
else if (!cmdResultHasUsage(cmdresult, 'stderr'))
function zfsExecCommon(log, argv, callback) {
mod_assert.object(log, 'log');
mod_assert.arrayOfString(argv, 'args');
mod_assert.func(callback, 'callback');
/*
* Note that we do not pass our environment on to the "zfs" command, in
* order to avoid environment-dependent behaviour; e.g., locale-specific
* error messages or output formatting. Buffer up to 2MB of output from
* the ZFS command.
*/
var opts = {
argv: [ '/sbin/zfs' ].concat(argv),
env: {},
maxBuffer: 2 * 1024 * 1024,
includeStderr: true
};
log.debug({ argv: argv }, 'exec zfs start');
Request.prototype.acceptsEncoding = function acceptsEncoding(types) {
if (typeof types === 'string') {
types = [types];
}
assert.arrayOfString(types, 'types');
negotiator(this);
return this._negotiator.preferredEncoding(types);
};
function checkRequiredIndexes(req, cb) {
var log = req.log;
var unusableIndexes;
if (req.opts && (
req.opts.requireIndexes === true ||
req.opts.requireOnlineReindexing === true)) {
unusableIndexes =
common.getUnusableIndexes(req.filter, req.bucket, log);
assert.object(unusableIndexes, 'unusableIndexes');
assert.arrayOfString(unusableIndexes.unindexedFields,
'unusableIndexes.unindexedFields');
assert.arrayOfString(unusableIndexes.reindexingFields,
'unusableIndexes.reindexingFields');
if (unusableIndexes.unindexedFields.length > 0 ||
unusableIndexes.reindexingFields.length > 0) {
log.error('filter uses unusable indexes');
cb(new errors.NotIndexedError({}, req.bucket.name, req.rawFilter, {
unindexedFields: unusableIndexes.unindexedFields,
reindexingFields: unusableIndexes.reindexingFields
}));
return;
} else {
log.trace('filter does not use unusable indexes');
}
}
cb();
function AmbiguousDockerImageIdError(imgId, registries) {
assert.string(imgId, 'imgId');
assert.arrayOfString(registries, 'registries');
var message = fmt('image id "%s" does not unambiguously identify a '
+ 'single image because it has been pulled from multiple '
+ 'registries: %s; use "repo:tag" if possible', imgId,
registries.join(', '));
_DockerBaseError.call(this, {
restCode: this.constructor.restCode,
statusCode: this.constructor.statusCode,
message: message
});
}
util.inherits(AmbiguousDockerImageIdError, _DockerBaseError);
function rowToObject(bucket, ignore, row) {
assert.object(bucket, 'bucket');
assert.arrayOfString(ignore, 'ignore');
assert.object(row, 'row');
var obj = {
bucket: bucket.name,
key: row._key,
value: JSON.parse(row._value),
_id: rowExtractId(bucket, row._key, row),
_etag: row._etag,
_mtime: parseInt(row._mtime, 10),
_txn_snap: row._txn_snap,
_count: parseInt(row._count, 10)
};
/*
* Moray supports 'update', which updates the Postgres columns, but not
* the serialized JSON. Here, we do the following: