Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
export function createTempFiles(files: { [file: string]: string }): string {
// deterministic dirName [as this path used in test cases output (see generated snapshot)]
const dir = path.join(TEST_TEMP_DIR, objectHash(files));
Object.keys(files).forEach(file => {
const content = files[file];
fs.outputFileSync(path.join(dir, file), content);
});
return dir;
}
return new Promise((resolve, reject) => {
const Query = {
store: storeCode, // TODO: add grouped prodduct and bundled product support
type: entityType,
searchQuery: query,
size: size,
from: start,
sort: sort
}
if (excludeFields) Query._sourceExclude = excludeFields
if (includeFields) Query._sourceInclude = includeFields
const cache = global.$VS.db.elasticCacheCollection // switch to appcache?
const cacheKey = hash(Query)
let servedFromCache = false
const benchmarkTime = new Date()
cache.getItem(cacheKey, (err, res) => {
if (err) {
console.log(err)
}
if (res !== null) {
res.cache = true
res.noresults = false
res.offline = !isOnline() // TODO: refactor it to checking ES heartbit
resolve(res)
console.debug('Result from cache for ' + cacheKey + ' (' + entityType + '), ms=' + (new Date().getTime() - benchmarkTime.getTime()))
servedFromCache = true
} else {
if (!isOnline()) {
function checkDemoAPI(toSend, backupUrl = null, backupPayload = null) {
const hsh = hash.sha1(toSend);
console.log("CHECKING DEMOAPI: " + hsh);
if (DemoAPI.hasOwnProperty(hsh)) {
// Relies on a symbolic link being present in the dist folder to the demo folder
const path = './demo/' + DemoAPI[hsh]
console.log("TRYING TO SENDING STATIC: ", path);
const follow = (response) => responseJson(response, backupUrl, backupPayload)
return fetch(path).then(follow)
}
return d3.json(backupUrl, backupPayload)
}
track(options, inputsABI, gteBlockNum, networkId){
const eventKey = 'logs-' + hash(Object.assign({networkId}, options || {}));
const filterConditions = Object.assign({fromBlock: 0, toBlock: "latest"}, options || {});
this.db.deleteNewestBlocks(eventKey, gteBlockNum);
const eventSummary = this.db.getLastKnownEvent(eventKey);
const sub = new ReplaySubject();
const logObserver = fromEvent(this.events, eventKey)
logObserver.subscribe((e) => {
if (!e) return;
const id = hash({eventName: eventKey, blockNumber: e.blockNumber, transactionIndex: e.transactionIndex, logIndex: e.logIndex});
// TODO: would be nice if this was smart enough to understand the type of returnValues and do the needed conversions
const eventData = {
id: hash({eventName: eventKey, blockNumber: e.blockNumber, transactionIndex: e.transactionIndex, logIndex: e.logIndex}),
request(service, method, params, options) {
const that = this;
const credentials = Object.assign({}, that.getCredentials());
// Make sure options is an object (honors wrong calls of request)
const requestOptions = _.isObject(options) ? options : {};
const shouldCache = _.get(requestOptions, 'useCache', false);
const paramsWithRegion = _.merge({}, params, {
region: _.get(options, 'region'),
});
const paramsHash = objectHash.sha1(paramsWithRegion);
const MAX_TRIES = 4;
const persistentRequest = f =>
new BbPromise((resolve, reject) => {
const doCall = numTry => {
f()
// We're resembling if/else logic, therefore single `then` instead of `then`/`catch` pair
.then(resolve, e => {
if (
numTry < MAX_TRIES &&
e.statusCode !== 403 && // Invalid credentials
((e.providerError && e.providerError.retryable) || e.statusCode === 429)
) {
that.serverless.cli.log(
_.join(
[
`Recoverable error occurred (${e.message}), sleeping for 5 seconds.`,
});
map.set(media, [...mediaRulesArr, ...newRules]);
} else {
// Fresh media rules can be created
map.set(media, mRules.map(mRule => {
const objHash = hash.MD5(mRule);
return {
hash: objHash,
rule: mRule
}
}));
}
} else {
const ruleKey = Rule.generateRuleKey(ruleObj);
const rulesArray = map.get(ruleKey);
const objHash = hash.MD5(ruleObj);
if (rulesArray) {
// If this rule object (hash) already exists in this ruleKey ignore else insert
if (!rulesArray.some(ruleObj => ruleObj.hash === objHash)) {
rulesArray.push({
hash: objHash,
rule: ruleObj
});
}
} else {
map.set(ruleKey, [
{
hash: objHash,
rule: ruleObj
}
]);
process: function (req, res) {
var tempObj = req.body;
sails.log.debug("REQUEST ",req.body)
// response type doesn't matter - we compute every time
delete tempObj.response_type;
var md5 = require('object-hash').MD5(tempObj);
ProcData.findOneByHash(md5).then(function (json) {
var obj_to_process = {};
// json, corresponding to md5 hash of the request already
// exists in a database, so there is no need to process it
// again, just send back previous computational result
if (json) {
if (!json.parent) {
delete json.parent;
}
json.data = json.value;
delete json.value;
if (req.body.response_type === 'data_id') {
delete json.data;
}
delete json.hash;
function Aggregator (conn) {
// make "new" optional
if ( !(this instanceof Aggregator) ) {
return new Aggregator(conn);
}
conn.on('data', _.bind(onSocketData, this));
conn.on('close', _.bind(onSocketClose, this));
this.connection = conn;
this.polledResources = new HashTable();
this.bodyCache = {};
this.log('init');
}
it('clears the stored state of the query request', () => {
const storedStateOfQuery = new ImmutableMap({
status: 'complete',
data: { a: { b: 'hello' } },
});
const originalState = new ImmutableMap({
endpoints: new ImmutableMap({
[endpointName]: new ImmutableMap({
queries: new ImmutableMap({
[hash(query)]: new ImmutableMap({
typenames: new ImmutableSet(['A']),
variables: new ImmutableMap({
[hash(null)]: storedStateOfQuery,
}),
}),
}),
}),
}),
});
const action = { type, query, endpointName };
const newState = reducer(originalState, action);
expect(newState.getIn(['endpoints', endpointName, 'queries', queryHash, 'variables', variableHash]))
.toEqual(undefined);
});
const parseJson = require("json-parse-better-errors");
const hash = require("object-hash");
const fs = require("fs");
const getLogger = require("./dev-logger");
const INITIAL_EVENT = { type: "@@INIT" };
const initialChecksum = hash.MD5(INITIAL_EVENT);
exports.checksumFile = function checksumFile(filePath) {
const logger = getLogger({ service: "queue", verbose: true });
if (!fs.existsSync(filePath)) {
logger.info("creating new data file");
fs.writeFileSync(filePath, `### BEGIN checksum: ${initialChecksum} ###`);
return [initialChecksum, initialChecksum, []];
}
const data = String(fs.readFileSync(filePath));
const rows = data.length ? data.split("\n") : [];
const events = [];
if (!rows[0] || !rows[0].length) {
logger.info("data file is empty");