Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
export function unpack (archivePath, progressGroups, targetPath) {
validate('SOS', arguments)
const name = basename(archivePath)
let gauge = progressGroups.get(name)
if (!gauge) {
gauge = log.newGroup(name)
progressGroups.set(name, gauge)
}
const unpacked = join(targetPath, createHash('sha1').update(archivePath).digest('hex'))
return Bluebird.join(
mkdirp(unpacked),
stat(archivePath)
).spread((p, stats) => new Bluebird((resolve, reject) => {
gauge.verbose('unzip', 'unpacking', archivePath, 'to', unpacked)
// openZip can't be promisified because the object stream it yields doesn't
export default function transcode (track, destination, encoder, profile) {
validate('OSSS', arguments)
// ensure we've been given a FLAC
if (extname(track.file.path) !== '.flac') {
throw new TypeError('Transcode only FLAC files, for now.')
}
const working = join(destination, fingerprint64(basename(track.file.path, '.flac')) + '.mp3')
const final = join(destination, basename(track.file.path, '.flac') + '.mp3')
const runTranscode = Bluebird.all([which('flac'), which(encoder)]).then(([flac, encoder]) => {
log.silly('transcode', 'flac binary lives at', flac)
log.verbose('transcode', 'encoder binary lives at', encoder)
// ensure destination exists
mkdirp(destination).then(() => {
// then run a streaming transcode
export default function validateGenreTag (track, warnings) {
validate('OA', arguments)
if (track.tags && track.tags.genre) {
const genre = track.tags.genre
if (!genres.has(genre)) {
warnings.push('has unknown genre ' + genre)
}
if (genre.match(/^[ A-Z/-]+$/) && !genres.has(genre)) {
warnings.push('has all-caps genre ' + genre)
}
} else {
warnings.push('has no genre set')
}
}
function toUncompressedFile (md, path, onFinish, onError) {
validate('OSFF', arguments)
gauge.silly('unzip.toUncompressedFile', 'writing', path, '(' + md.uncompressedSize + 'B)')
const filename = basename(md.fileName)
const writeGauge = progressGroups.get(filename).newStream(
'writing: ' + path,
md.uncompressedSize,
3
).on('error', onError)
const extracted = createWriteStream(path)
.on('error', onError)
.on('finish', () => {
gauge.verbose('unzip.toUncompressedFile', 'wrote', path)
onFinish({ path })
})
export default function blockSizeFromPath (path) {
validate('S', arguments)
const base = resolve(path)
const probe = join(
base,
'packzzz-test-' + pseudoRandomBytes(4).toString('hex')
)
return mkdirp(base)
.then(() => writeFile(probe, 'lol'))
.then(() => stat(probe))
.then((stats) => rimraf(probe).return(stats.blksize))
}
function toMetadataReader (md, path, onFinish, onError) {
validate('OSFF', arguments)
const zipStats = {
size: md.uncompressedSize,
atime: md.getLastModDate(),
mtime: md.getLastModDate(),
ctime: md.getLastModDate(),
birthtime: md.getLastModDate(),
uid: process.getuid(),
gid: process.getgid()
}
return reader(
{ path: path, stats: zipStats },
progressGroups,
onFinish,
onError