Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
var https = require('https')
var util = require('util')
// set global number of sockets if in the config
// node version > 0.12 sets max sockets to infinity
if (config.sockets) {
http.globalAgent.maxSockets = Math.floor(config.sockets / 2)
https.globalAgent.maxSockets = Math.floor(config.sockets / 2)
}
// Init Koop with things it needs like a log and Cache
koop.log = new koop.Logger(config)
koop.Cache = new koop.DataCache(koop)
// registers a DB modules
koop.Cache.db = pgcache.connect(config.db.conn, koop)
// Create the job queue for this worker process
// connects to the redis same redis
var jobs = kue.createQueue({
prefix: config.redis.prefix,
redis: {
port: config.redis.port,
host: config.redis.host
}
})
process.once('SIGINT', function (sig) {
jobs.active(function (err, ids) {
if (err) {
koop.log.error(util.inspect(err))
}
domain.run(function () {
koop.log.info('starting job ' + job.id + ': ' + job.data.itemId + '_' + job.data.layerId)
var completed = 0
var len = job.data.pages.length
var featureService = new FeatureService(job.data.serviceUrl, {logger: koop.log})
// aggregate responses into one json and call done we have all of them
// start the requests
featureService.pageQueue.push(job.data.pages, function (error, json) {
if (error || !json) {
featureService.pageQueue.kill()
throw error
}
// transform the data into geojson
koop.GeoJSON.fromEsri(job.data.fields || [], json, function (err, geojson) {
if (err) return callback(err)
koop.Cache.insertPartial('agol', job.data.itemId, geojson, job.data.layerId, function (err) {
if (err) {
featureService.pageQueue.kill()
koop.log.error('err')
koop.Cache.insertPartial('agol', job.data.itemId, geojson, job.data.layerId, function (err) {
if (err) {
featureService.pageQueue.kill()
koop.log.error('err')
var error = new Error('Error inserting rows into the db')
error.type = 'db'
throw error
}
completed++
koop.log.info(completed + ' pages of ' + len + ' completed. ' + job.id + ': ' + job.data.itemId + '_' + job.data.layerId)
job.progress(completed, len)
if (completed === len) {
var key = ['agol', job.data.itemId, job.data.layerId].join(':')
koop.Cache.getInfo(key, function (err, info) {
if (err) throw err
if (info && info.status) delete info.status
// if we've made it this far it's a success so return done with no argument
koop.Cache.updateInfo(key, info, function () {
return callback()
})
})
}
})
})
var pgcache = require('koop-pgcache')
var config = require('config')
var http = require('http')
var https = require('https')
var util = require('util')
// set global number of sockets if in the config
// node version > 0.12 sets max sockets to infinity
if (config.sockets) {
http.globalAgent.maxSockets = Math.floor(config.sockets / 2)
https.globalAgent.maxSockets = Math.floor(config.sockets / 2)
}
// Init Koop with things it needs like a log and Cache
koop.log = new koop.Logger(config)
koop.Cache = new koop.DataCache(koop)
// registers a DB modules
koop.Cache.db = pgcache.connect(config.db.conn, koop)
// Create the job queue for this worker process
// connects to the redis same redis
var jobs = kue.createQueue({
prefix: config.redis.prefix,
redis: {
port: config.redis.port,
host: config.redis.host
}
})
process.once('SIGINT', function (sig) {
jobs.active(function (err, ids) {
featureService.pageQueue.push(job.data.pages, function (error, json) {
if (error || !json) {
featureService.pageQueue.kill()
throw error
}
// transform the data into geojson
koop.GeoJSON.fromEsri(job.data.fields || [], json, function (err, geojson) {
if (err) return callback(err)
koop.Cache.insertPartial('agol', job.data.itemId, geojson, job.data.layerId, function (err) {
if (err) {
featureService.pageQueue.kill()
koop.log.error('err')
var error = new Error('Error inserting rows into the db')
error.type = 'db'
throw error
}
completed++
koop.log.info(completed + ' pages of ' + len + ' completed. ' + job.id + ': ' + job.data.itemId + '_' + job.data.layerId)
job.progress(completed, len)
if (completed === len) {
var key = ['agol', job.data.itemId, job.data.layerId].join(':')
koop.Cache.getInfo(key, function (err, info) {
var FeatureService = require('featureservice')
var pgcache = require('koop-pgcache')
var config = require('config')
var http = require('http')
var https = require('https')
var util = require('util')
// set global number of sockets if in the config
// node version > 0.12 sets max sockets to infinity
if (config.sockets) {
http.globalAgent.maxSockets = Math.floor(config.sockets / 2)
https.globalAgent.maxSockets = Math.floor(config.sockets / 2)
}
// Init Koop with things it needs like a log and Cache
koop.log = new koop.Logger(config)
koop.Cache = new koop.DataCache(koop)
// registers a DB modules
koop.Cache.db = pgcache.connect(config.db.conn, koop)
// Create the job queue for this worker process
// connects to the redis same redis
var jobs = kue.createQueue({
prefix: config.redis.prefix,
redis: {
port: config.redis.port,
host: config.redis.host
}
})
process.once('SIGINT', function (sig) {
koop.GeoJSON.fromEsri(job.data.fields || [], json, function (err, geojson) {
if (err) return callback(err)
koop.Cache.insertPartial('agol', job.data.itemId, geojson, job.data.layerId, function (err) {
if (err) {
featureService.pageQueue.kill()
koop.log.error('err')
var error = new Error('Error inserting rows into the db')
error.type = 'db'
throw error
}
completed++
koop.log.info(completed + ' pages of ' + len + ' completed. ' + job.id + ': ' + job.data.itemId + '_' + job.data.layerId)
job.progress(completed, len)
if (completed === len) {
var key = ['agol', job.data.itemId, job.data.layerId].join(':')
koop.Cache.getInfo(key, function (err, info) {
if (err) throw err
if (info && info.status) delete info.status