Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
extract.on('entry', function onEntry (header, fileStream, next) {
if (header.name === 'package/npm-shrinkwrap.json') {
opts.log.silly('extract-shrinkwrap', 'found shrinkwrap')
// got a shrinkwrap! Now we don't need to look for entries anymore.
extract.removeListener('entry', onEntry)
// Grab all the file data off the entry fileStream.
var data = ''
fileStream.on('data', function (d) { data += d })
finished(fileStream, function (err) {
if (err) { return extract.emit('error', err) }
try {
shrinkwrap = JSON.parse(data)
} catch (e) {
extract.emit('error', e)
}
// By destroying `unzipped`, this *should* stop `tar-stream`
// from continuing to waste resources on tarball parsing.
unzipped.unpipe()
cb(null, shrinkwrap)
})
} else {
// Not a shrinkwrap. Autodrain this entry, and move on to the next.
fileStream.resume()
next()
}
cacache.put(CACHE, KEY, CONTENTS, OPTS, function (err) {
if (err) { throw err }
t.comment('mock cache contents inserted')
var contents = ''
var meta
var stream = cache.get.stream(
CACHE, KEY, OPTS
).on('data', function (d) {
contents += d
}).on('metadata', function (m) {
meta = m
})
finished(stream, function (err) {
if (err) { throw err }
t.equal(contents, CONTENTS, 'stream extracted cache contents correctly')
t.deepEqual(meta, OPTS.metadata, 'metadata contents correct')
t.done()
})
})
})
debug('Response headers: %o', inputStream.headers)
const jsonStream = miss.pipeline(
inputStream,
logFirstChunk(),
split(JSON.parse),
rejectOnApiError(),
filterSystemDocuments(),
assetStreamHandler,
filterDocumentTypes(options.types),
options.drafts ? miss.through.obj() : filterDrafts(),
stringifyStream(),
miss.through(reportDocumentCount)
)
miss.finished(jsonStream, async err => {
if (err) {
return
}
onProgress({
step: 'Exporting documents...',
current: documentCount,
total: documentCount,
update: true
})
if (!options.raw && options.assets) {
onProgress({step: 'Downloading assets...'})
}
let prevCompleted = 0
return BB.fromNode(cb => {
var raw = ''
var stream = getStream(uri, registry, opts)
stream.on('data', function (d) { raw += d })
stream.on('reset', function () { raw = '' })
finished(stream, function (err) {
if (err) { return cb(err) }
try {
var parsed = JSON.parse(raw)
} catch (e) {
return cb(e)
}
return cb(null, parsed)
})
})
})
return new Promise(async (resolve, reject) => {
miss.finished(archive, async archiveErr => {
if (archiveErr) {
debug('Archiving errored! %s', archiveErr.stack)
await cleanup()
reject(archiveErr)
return
}
debug('Archive finished!')
})
debug('Getting dataset export stream')
onProgress({step: 'Exporting documents...'})
let documentCount = 0
let lastReported = Date.now()
const reportDocumentCount = (chunk, enc, cb) => {
return new Promise((resolve, reject) => {
const hash = crypto.createHash('sha1')
const chunks = []
stream.on('data', chunk => {
chunks.push(chunk)
hash.update(chunk)
})
miss.finished(stream, err => {
if (err) {
reject(err)
return
}
resolve({
buffer: Buffer.concat(chunks),
sha1hash: hash.digest('hex')
})
})
})
}