Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
[_istream] (stream) {
// everyone will need one of these, either for verifying or calculating
// We always set it, because we have might only have a weak legacy hex
// sha1 in the packument, and this MAY upgrade it to a stronger algo.
// If we had an integrity, and it doesn't match, then this does not
// override that error; the istream will raise the error before it
// gets to the point of re-setting the integrity.
const istream = ssri.integrityStream(this.opts)
istream.on('integrity', i => this.integrity = i)
return stream.on('error', er => istream.emit('error', er)).pipe(istream)
}
return;
}
fs.futimes(fd, atime, mtime, err => {
if (err) {
fs.close(fd, () => cb(err));
} else {
fs.close(fd, err => cb(err));
}
});
});
});
},
});
const hashValidateStream = new ssri.integrityStream(hashInfo);
const integrityValidateStream = new ssri.integrityStream(integrityInfo);
const untarStream = tarFs.extract(this.dest, {
strip: 1,
dmode: 0o755, // all dirs should be readable
fmode: 0o644, // all files should be readable
chown: false, // don't chown. just leave as it is
map: header => {
header.mtime = now;
return header;
},
fs: patchedFs,
});
const extractorStream = gunzip();
hashValidateStream.once('error', err => {
this.validateError = err;
cb(err);
return;
}
fs.futimes(fd, atime, mtime, err => {
if (err) {
fs.close(fd, () => cb(err));
} else {
fs.close(fd, err => cb(err));
}
});
});
});
},
});
const hashValidateStream = new ssri.integrityStream(hashInfo);
const integrityValidateStream = new ssri.integrityStream(integrityInfo);
const untarStream = tarFs.extract(this.dest, {
strip: 1,
dmode: 0o755, // all dirs should be readable
fmode: 0o644, // all files should be readable
chown: false, // don't chown. just leave as it is
map: header => {
header.mtime = now;
return header;
},
fs: patchedFs,
});
const extractorStream = gunzip();
hashValidateStream.once('error', err => {
const readPipeline = (cpath, size, sri, stream) => {
stream.push(
new fsm.ReadStream(cpath, {
size,
readSize: MAX_SINGLE_READ_SIZE
}),
ssri.integrityStream({
integrity: sri,
size
})
)
return stream
}
return BB.resolve().then(() => {
let integrity
let size
const hashStream = ssri.integrityStream({
integrity: opts.integrity,
algorithms: opts.algorithms,
size: opts.size
}).on('integrity', s => {
integrity = s
}).on('size', s => {
size = s
})
const outStream = fs.createWriteStream(tmpTarget, {
flags: 'wx'
})
errCheck()
return pipe(inputStream, hashStream, outStream).then(() => {
return {integrity, size}
}, err => {
return rimraf(tmpTarget).then(() => { throw err })
return Promise.resolve().then(() => {
let integrity
let size
const hashStream = ssri
.integrityStream({
integrity: opts.integrity,
algorithms: opts.algorithms,
size: opts.size
})
.on('integrity', (s) => {
integrity = s
})
.on('size', (s) => {
size = s
})
const outStream = fs.createWriteStream(tmpTarget, {
flags: 'wx'
})
errCheck()
return pipe(
return JSON.parse(info.metadata)
}
}
const tarballStream = libnpm.tarball.stream(spec, opts.concat({
integrity,
resolved,
log: npmlog,
cache: null
}))
let unpacker = new CacacheUnpacker({
strip: 1,
cache,
warn: err => npmlog.warn('ensure-package', err.message)
})
if (!integrity) {
unpacker = ssri.integrityStream({
algorithms: ['sha256']
}).on('integrity', i => { integrity = i }).pipe(unpacker)
}
return new Promise((resolve, reject) => {
unpacker.on('error', reject)
tarballStream.on('error', reject)
let metadata
unpacker.on('metadata', (m) => { metadata = m })
unpacker.on('finish', async () => {
try {
const key = pkglock.depKey(name, dep)
const doc = Object.assign({}, metadata, {
name: name,
version: dep.version,
integrity: integrity.toString(),
resolved