Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
Logger.debug(JSON.stringify(item.index.error, null, 2), loggerCtx);
}
if (item.update) {
Logger.debug(JSON.stringify(item.update.error, null, 2), loggerCtx);
}
if (item.delete) {
Logger.debug(JSON.stringify(item.delete.error, null, 2), loggerCtx);
}
});
} else {
Logger.verbose(`Executed ${body.items.length} bulk operations on index [${fullIndexName}]`);
}
return body;
} catch (e) {
Logger.error(`Error when attempting to run bulk operations [${e.toString()}]`, loggerCtx);
Logger.error('Error details: ' + JSON.stringify(e.body.error, null, 2), loggerCtx);
}
}
async onVendureBootstrap(): Promise {
const { host, port } = ElasticsearchPlugin.options;
try {
const pingResult = await this.elasticsearchService.checkConnection();
} catch (e) {
Logger.error(`Could not connect to Elasticsearch instance at "${host}:${port}"`, loggerCtx);
Logger.error(JSON.stringify(e), loggerCtx);
return;
}
Logger.info(`Sucessfully connected to Elasticsearch instance at "${host}:${port}"`, loggerCtx);
await this.elasticsearchService.createIndicesIfNotExists();
this.eventBus.ofType(CatalogModificationEvent).subscribe(event => {
if (event.entity instanceof Product || event.entity instanceof ProductVariant) {
return this.elasticsearchIndexService.updateProductOrVariant(event.ctx, event.entity).start();
}
});
const collectionModification$ = this.eventBus.ofType(CollectionModificationEvent);
const closingNotifier$ = collectionModification$.pipe(debounceTime(50));
collectionModification$
.pipe(
}
const result = await handler.handle(event as any, EmailPlugin.options.globalTemplateVars);
if (!result) {
return;
}
const bodySource = await this.templateLoader.loadTemplate(type, result.templateFile);
const generated = await this.generator.generate(
result.from,
result.subject,
bodySource,
result.templateVars,
);
const emailDetails = { ...generated, recipient: result.recipient };
await this.emailSender.send(emailDetails, this.transport);
} catch (e) {
Logger.error(e.message, 'EmailPlugin', e.stack);
}
}
}
private async deleteIndices(prefix: string) {
try {
const index = prefix + VARIANT_INDEX_NAME;
await this.client.indices.delete({ index });
Logger.verbose(`Deleted index "${index}"`, loggerCtx);
} catch (e) {
Logger.error(e, loggerCtx);
}
try {
const index = prefix + PRODUCT_INDEX_NAME;
await this.client.indices.delete({ index });
Logger.verbose(`Deleted index "${index}"`, loggerCtx);
} catch (e) {
Logger.error(e, loggerCtx);
}
}
if (item.index) {
Logger.debug(JSON.stringify(item.index.error, null, 2), loggerCtx);
}
if (item.update) {
Logger.debug(JSON.stringify(item.update.error, null, 2), loggerCtx);
}
if (item.delete) {
Logger.debug(JSON.stringify(item.delete.error, null, 2), loggerCtx);
}
});
} else {
Logger.verbose(`Executed ${body.items.length} bulk operations on index [${fullIndexName}]`);
}
return body;
} catch (e) {
Logger.error(`Error when attempting to run bulk operations [${e.toString()}]`, loggerCtx);
Logger.error('Error details: ' + JSON.stringify(e.body.error, null, 2), loggerCtx);
}
}
private async createIndices(prefix: string) {
try {
const index = prefix + VARIANT_INDEX_NAME;
await this.client.indices.create({ index });
Logger.verbose(`Created index "${index}"`, loggerCtx);
} catch (e) {
Logger.error(JSON.stringify(e, null, 2), loggerCtx);
}
try {
const index = prefix + PRODUCT_INDEX_NAME;
await this.client.indices.create({ index });
Logger.verbose(`Created index "${index}"`, loggerCtx);
} catch (e) {
Logger.error(JSON.stringify(e, null, 2), loggerCtx);
}
}
error: err => {
Logger.error(err);
reporter.complete(false);
},
});