Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
global.Promise = require('bluebird');
var _ = require('underscore');
var AWS = require('aws-sdk');
try {
var sqs = new AWS.SQS({
region: process.env.AWS_SQS_REGION,
accessKeyId: process.env.AWS_SQS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SQS_SECRET_ACCESS_KEY
});
var client = require('./../client/loopback.js');
// the remote datasource
var remoteDS = client.dataSources.remoteDS;
// the strong-remoting RemoteObjects instance
var remotes = remoteDS.connector.remotes;
var ReportModel = client.models.ReportModel;
}
catch (e) {
console.error('[MessageId : '+messageId+'] ' +'initiate-generate-sales-worker', e);
import log from 'gitpunch-lib/log'
import { SQS } from 'aws-sdk'
// how often to fetch all repos, ignoring message queue
const FETCH_ALL_REPOS_INTERVAL = +process.env.FETCH_ALL_REPOS_INTERVAL || 60 // minutes
const SQS_QUEUE_URL = process.env.SQS_QUEUE_URL
const RECEIVE_MAX_EVENTS = +process.env.RECEIVE_MAX_EVENTS || 40
const SQS_REQUEST_TIMEOUT = +process.env.SQS_REQUEST_TIMEOUT || 2000
const DONT_USE_QUEUE = process.env.DONT_USE_QUEUE;
const sqs = new SQS({
apiVersion: '2012-11-05',
region: process.env.SQS_REGION
})
export default async function getRelevantRepos () {
try {
if (DONT_USE_QUEUE) {
return null
}
const now = new Date()
const minutes = now.getUTCHours() * 60 + now.getUTCMinutes()
// if it's time to fetch all repos
if (minutes % FETCH_ALL_REPOS_INTERVAL === 0) {
await purgeMessageQueue()
return null // means all are relevant
}
AWS.config.update({ region: process.env.GTM_AWS_REGION });
let DDB;
if (process.env.IAM_ENABLED) {
AWS.config.update({
httpOptions: {
agent: proxy(process.env.HTTP_PROXY)
}
});
} else {
// due to serverless .env restrictions
process.env.AWS_ACCESS_KEY_ID = KmsUtils.getDecrypted(process.env.GTM_CRYPT_AGENT_AWS_ACCESS_KEY_ID);
process.env.AWS_SECRET_ACCESS_KEY = KmsUtils.getDecrypted(process.env.GTM_CRYPT_AGENT_AWS_SECRET_ACCESS_KEY);
}
let sqs = new AWS.SQS({ apiVersion: '2012-11-05' });
let sns = new AWS.SNS({ apiVersion: '2010-03-31' });
require('babel-polyfill');
const safeJsonStringify = require('safe-json-stringify');
export class AgentUtils {
static agentId() {
return AgentLogger.AGENT_ID;
}
static sse() {
return AgentLogger.SSE;
}
static stream(group, stream) {
AgentLogger.stream(group, stream);
}
QueueMessageDispatcher.prototype.addConsumer = function (endpoint) {
var that = this;
AWS.config.update({
region: endpoint.region,
accessKeyId: endpoint.key,
secretAccessKey: endpoint.secretKey
});
var consumer = Consumer.create({
sqs: new AWS.SQS(),
region: endpoint.region,
queueUrl: endpoint.url,
batchSize: 1,
visibilityTimeout: 10,
waitTimeSeconds: 20,
handleMessage: function (message, done) {
that.eventEmitter.emit('queue-message', endpoint, message);
done();
}
});
consumer.on('error', function (err) {
that.eventEmitter.emit('queue-error', endpoint, err);
});
console.log('[CONTRACT UPDATE] Before Contract Mined, id: ' + data.contractId + ', transactionHash: ' + instance.transactionHash);
});
var transactionReceiptMessage = {
"transactionHash": instance.transactionHash
};
sqsHelper.send(JSON.stringify(transactionReceiptMessage),
process.env.AWS_TRANSACTION_RECEIPT_QUEUE_URL, 10,
'transactionreceipt');
}
});
}
}).catch(function(err) {
console.log(err.message, err.stack);
});
},
sqs: new AWS.SQS()
});
consumer.on('error', function (err) {
console.log(err.message);
});
consumer.start();
getClient: ({ config }) => new AWS.SQS(config),
isReady: (client) => client.listQueues().promise()
function SQSSender(region, queueURL) {
if (region === '' || queueURL === '') {
logger.error(
'SQSSender.send',
'No SQS region or queueURL provided, SQS features will be disabled'
);
return;
}
this.sqs = new AWS.SQS({ region: region });
this.queueUrl = queueURL;
}
public constructor(publisherProperties: PublisherModel) {
super(publisherProperties);
this.sqsSend = new AWS.SQS(publisherProperties.awsConfiguration);
this.params = publisherProperties.messageParams || {};
this.params.MessageBody = publisherProperties.payload;
}
import { SQS } from 'aws-sdk';
import { Message, MessageList } from 'aws-sdk/clients/sqs';
import logger from '../logger';
import { BlockWithTransactionHashes } from '@ethercast/model';
import { NETWORK_ID, NEW_BLOCK_QUEUE_NAME } from '../env';
import { BlockQueueMessage } from '../model';
export const sqs = new SQS();
const QUEUE_URL_CACHE: { [queueName: string]: Promise } = {};
export const getQueueUrl: (QueueName: string) => Promise =
async function (QueueName: string) {
if (QUEUE_URL_CACHE[QueueName]) {
return QUEUE_URL_CACHE[QueueName];
}
return (
QUEUE_URL_CACHE[QueueName] =
sqs.getQueueUrl({ QueueName }).promise()
.then(
({ QueueUrl }) => {
if (!QueueUrl) {
throw new Error('could not find queue url: ' + QueueName);