Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
function tests(opts, list) {
return new TaskGroup({
tasks: list.map(function(item) {
Object.keys(opts).forEach(function(key) {
item[key] = opts[key]
})
return test(item)
})
})
.done(function(err) {
if (err) {
/* eslint no-console:0 */
console.error(err.stack || err.message || err)
process.exit(1)
}
})
.run()
}
function test(opts) {
const { expected, script, reporter } = opts
return new Task(function(complete) {
// Prepare
const expectedCleaned = expected.trim()
// Test Reporter
let output = ''
const env = { KAVA_REPORTER: reporter, ...process.env }
const args = [script, '--no-colors']
const str = args.join(' ')
process.stdout.write(sep(str.length, '=') + '\n' + str + '\n\n')
const runner = spawn('node', args, { env })
runner.stdout.on('data', function(data) {
output += data
process.stdout.write(data)
})
runner.stderr.on('data', function(data) {
output += data
this.listenerTaskGroup = null
this.listenerTimeout = null
tasks.run()
} else {
this.emit('error', new Error('unexpected state'))
}
}, config.catchupDelay || 0)
// We are a subsequent listener, in which case, just listen to the first listener tasks
if (this.listenerTaskGroup != null) {
this.listenerTaskGroup.done(next)
return this
}
// Start the detection process
const tasks = (this.listenerTaskGroup = new TaskGroup(
`listener tasks for ${this.path}`,
{ domain: false }
).done(next))
tasks.addTask('check if the file still exists', complete => {
// Log
this.log(
'debug',
`watch evaluating on: ${this.path} [state: ${this.state}]`
)
// Check if this is still needed
if (this.state !== 'active') {
this.log('debug', `watch discarded on: ${this.path}`)
tasks.clearRemaining()
return complete()
}
loadPaths(next /* :function */) /* :this */ {
// Create the parallel task group and once they've all completed fire our completion callback
const tasks = new TaskGroup().setConfig({ concurrency: 0 }).done(next)
// Apply our determined paths for packages
const packages = Object.keys(this.filenamesForPackageFiles)
const readmes = Object.keys(this.filenamesForReadmeFiles)
// Load
tasks.addTask(complete => {
readdir(this.cwd, (err, files) => {
if (err) return complete(err)
files.forEach(file => {
const filePath = join(this.cwd, file)
packages.forEach(key => {
const basename = file
.toLowerCase()
.split('.')
save(next /* :function */) /* :this */ {
// Prepare
this.log('info', 'Writing changes...')
const tasks = new TaskGroup().setConfig({ concurrency: 0 }).done(err => {
if (err) return next(err)
this.log('info', 'Wrote changes')
return next()
})
// Save package files
eachr(this.filenamesForPackageFiles, (filename, name) => {
if (!filename || name === 'projectz') return
const filepath = join(this.cwd, filename)
const message = `Saving package file: ${filepath}`
tasks.addTask(message, complete => {
this.log('info', message)
const data =
JSON.stringify(this.dataForPackageFilesEnhanced[name], null, ' ') +
'\n'
writeFile(filepath, data, complete)
load(next /* :function */) /* :this */ {
// Reset
this.reset()
// Create our serial task group to allot our tasks into and once it completes continue to the next handler
const tasks = new TaskGroup().done(next)
// Load readme and package data
tasks.addTask('loadPaths', this.loadPaths.bind(this))
// Merge our package data
tasks.addTask('mergeData', this.mergeData.bind(this))
// Fetch the latest contributors. This is after the merging as we access merged properties to be able to do this.
tasks.addTask('loadGithubContributors', complete => {
this.loadGithubContributors(err => {
if (err)
this.log(
'warn',
'Loading contributer data failed, continuing anyway. Here was the error:\n' +
err.stack
)
next(err, list) {
if (err) return next(err)
const tasks = new TaskGroup(`scandir tasks for ${path}`, {
domain: false,
concurrency: 0
}).done(next)
Object.keys(list).forEach(function(relativePath) {
tasks.addTask(function(complete) {
const fullPath = pathUtil.join(path, relativePath)
// Check we are still relevant
if (watchr.state !== 'active') return complete()
// Watch this child
watchr.watchChild({ fullPath, relativePath }, complete)
})
})
tasks.run()
}
})
function writeFeed(response, data, writeFeedComplete) {
// Log
me.log('debug', `Feedr === writing [${feed.url}] to [${feed.path}]`)
// Prepare
const writeTasks = TaskGroup.create({ concurrency: 0 }).done(function(
err
) {
if (err) {
// Log
me.log(
'warn',
`Feedr === writing [${feed.url}] to [${feed.path}], write failed`,
err.stack
)
// Exit
writeFeedComplete(err)
return
}
// Log
function viaCache(viaCacheComplete) {
// Log
me.log('debug', `Feedr === remembering [${feed.url}] from cache`)
// Prepare
let meta = null
let data = null
const readTasks = TaskGroup.create().done(function(err) {
viaCacheComplete(err, data, meta && meta.headers)
})
readTasks.addTask('read the meta data in a cache somewhere', function(
viaCacheTaskComplete
) {
readMetaFile(feed.metaPath, function(err, result) {
if (err || !result) {
viaCacheTaskComplete(err)
return
}
meta = result
viaCacheTaskComplete()
})
})
} else if (typeChecker.isArray(arg)) {
feeds = arg
} else if (typeChecker.isPlainObject(arg)) {
if (index === 0) {
feeds = arg
} else {
extendr.extend(defaultfeed, arg)
}
}
})
// Extract
const results = {}
// Tasks
const tasks = TaskGroup.create({
concurrency: 0,
abortOnError: false
}).done(function() {
let message = 'Feedr finished fetching'
let err = null
if (failures.length !== 0) {
message +=
`with ${failures.length} failures:\n` +
failures
.map(function(i) {
return i.message
})
.join('\n')
err = new Error(message)
me.log('warn', err)