Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
data.Groups.map(group => {filter.push({match: {"groups": group.GroupName}});});
accessible_packages.query.bool.filter = {
bool: {
should: filter
}
};
}
body.push({});
body.push(owned_packages);
body.push({});
body.push(accessible_packages);
//-------------------------------------------------------------
// Execute Search
//-------------------------------------------------------------
let client = require('elasticsearch').Client({
hosts: `${config.Item.setting.esurl}`,
connectionClass: require('http-aws-es'),
amazonES: {
region: process.env.AWS_REGION,
credentials: creds
}
});
client.msearch({
body: body
}).then(function(body) {
console.log(body);
let _results = {
owned_packages: body.responses[0].hits.total,
accessible_packages: body.responses[1].hits.total
};
cb(null, _results);
async function connect() {
let esConfig
let client
// use local client
if (!process.env.ES_HOST) {
client = new elasticsearch.Client({ host: 'localhost:9200' })
} else {
await new Promise((resolve, reject) => AWS.config.getCredentials((err) => {
if (err) return reject(err)
return resolve()
}))
AWS.config.update({
credentials: new AWS.Credentials(process.env.AWS_ACCESS_KEY_ID,
process.env.AWS_SECRET_ACCESS_KEY),
region: process.env.AWS_REGION || 'us-east-1'
})
esConfig = {
hosts: [process.env.ES_HOST],
connectionClass: httpAwsEs,
awsConfig: new AWS.Config({ region: process.env.AWS_REGION || 'us-east-1' }),
var aggspretion = require('../parser/function.js');
var _ = require('lodash');
var Promise = require('bluebird');
var elasticsearch = require('elasticsearch');
var client = new elasticsearch.Client({
host: 'localhost:9200',
});
// Contains the parsed sheet;
var sheet;
function getRequest (config) {
var body = {
query: {
query_string: {
query: config.query
}
},
aggs: {
series: {
date_histogram: {
const fs = require('fs')
var es = new require('elasticsearch').Client({
host: process.env.ES_URL,
log: false,
requestTimeout: 3000,
})
var search = function(query, size, sort, filters, isApp, from, req, callback) {
var fields = ["artist.artist^15", "artist.folded^15", "title^11", "title.folded^5", "description^3", "text.*^2", "accession_number", "_all", "artist.ngram^2", "title.ngram"]
if(query.match(/".*"/)) fields = fields.slice(0, -2)
if(filters) query += ' '+filters
var limitToPublicAccess = req.query.token != process.env.PRIVATE_ACCESS_TOKEN
if(limitToPublicAccess && [query, filters].indexOf('deaccessioned:true') + [query, filters].indexOf('deaccessioned:"true"') === -2) query += ' public_access:1'
// if(isApp) query += ' room:G*' // restrict searches from the journeys app to only on view objects
var isMoreArtsmia = req.headers.origin && req.headers.origin.match('//more.artsmia.org')
|| req.query.tag && req.query.tag == "more"
var boostOnViewArtworks = isApp || isMoreArtsmia
before(async function() {
elasticitems = ElasticItems({
host: HOST,
index: INDEX,
type: INDEX,
}, search_config);
elastic = new elasticsearch.Client({
host: HOST,
defer: function () {
return Promise.defer();
}
});
await elastic.indices.delete({
index: INDEX
})
.catch(v => {
})
await elasticbulk.import(movies, {
index: INDEX,
host: HOST
}, search_config.schema)
import elasticsearch from 'elasticsearch';
var client = new elasticsearch.Client({
host: 'localhost:9200',
log: 'trace'
});
let ela = {};
ela.searchArt = function (key, page) {
return client.search({
index: 'article',
type: 'article',
body: {
size: page.pageSize,
from: page.pageSize * (page.currentPage - 1),
query: {
match_phrase: {
body: key
const MongoClient = require('mongodb').MongoClient;
const elasticsearch = require('elasticsearch');
const esClient = new elasticsearch.Client({ host: 'localhost:9200', apiVersion: '2.4' });
const fs = require('fs');
const f = require('util').format;
const BSON = require('bson')
const bson = new BSON();
let url = process.env.MONGO_RS || 'mongodb://localhost:27017';
const dbName = 'vrs';
const collectionName = 'voter';
let resumeToken;
const ARG_SKIP = Number(process.argv[2]) || 0;
const ARG_LIMIT = Number(process.argv[3]) || 100000;
const ARG_DUMP = process.argv[4] === '-d' ? true : false;
const BULK_SIZE = 1000;
let bulkOpsDone = 0;
const testDoc = require('./testDoc');
constructor() {
this.index = config.store.recipeIndex;
this.client = new elasticsearch.Client({
host: config.store.uri
});
};
Meteor.startup(() => {
if (ELASTIC_URL) {
const esClient = new elasticsearch.Client({
host: ELASTIC_URL
});
const MongoClient = MongoDriver.MongoClient;
const esSync = {};
MongoClient.connect(MONGO_URL, function(err, db) {
esSync.db = db;
});
const worker = client.worker(['eventsqueue']);
worker.register({
elastic: function (params, callback) {
try {
processParams(params);
callback(null, params);