Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
'use strict';
/** Imports */
import Parser, { plugins } from 'babylon/lib/parser';
import { TokenType, types as tt } from 'babylon/lib/tokenizer/types';
/** Constants */
const CHAR_CODES = '|>'.split('').map((c) => c.charCodeAt(0));
const PLUGIN_NAME = 'pipeline';
const beforeExpr = true;
/** Types */
tt.pipeline = new TokenType('|>', { beforeExpr, binop: 12 });
/** Parser */
const pp = Parser.prototype;
pp.readToken_pipeline = function readToken_pipeline(code) { // eslint-disable-line camelcase
return this.finishOp(tt.pipeline, 2);
};
/** Plugin */
function plugin(instance) {
instance.extend('readToken', (inner) => function readToken(code) {
const next = this.input.charCodeAt(this.state.pos + 1);
if (!(code === CHAR_CODES[0] && next === CHAR_CODES[1])) {
import { types as tt, TokenType } from "babylon/lib/tokenizer/types";
import forOwn from "lodash/object/forOwn";
import isString from "lodash/lang/isString";
// TODO: create and export actual token types for these.
export const tokenTypes = {
newline: new TokenType("newline"),
whitespace: new TokenType("whitespace"),
blockCommentStart: new TokenType("/*"),
blockCommentBody: new TokenType("blockCommentBody"),
blockCommentEnd: new TokenType("*/"),
lineCommentStart: new TokenType("//"),
lineCommentBody: new TokenType("lineCommentBody"),
};
forOwn(tokenTypes, function(tokenType) {
tokenType.whitespace = true;
});
const ttCst = tokenTypes;
export const tokenToName = new Map();
for (let name in tt) { tokenToName.set(tt[name], name); }
for (let name in ttCst) { tokenToName.set(ttCst[name], name); }
export function getTokenName(tokenType) {
import { types as tt, TokenType } from "babylon/lib/tokenizer/types";
import forOwn from "lodash/object/forOwn";
import isString from "lodash/lang/isString";
// TODO: create and export actual token types for these.
export const tokenTypes = {
newline: new TokenType("newline"),
whitespace: new TokenType("whitespace"),
blockCommentStart: new TokenType("/*"),
blockCommentBody: new TokenType("blockCommentBody"),
blockCommentEnd: new TokenType("*/"),
lineCommentStart: new TokenType("//"),
lineCommentBody: new TokenType("lineCommentBody"),
};
forOwn(tokenTypes, function(tokenType) {
tokenType.whitespace = true;
});
const ttCst = tokenTypes;
export const tokenToName = new Map();
for (let name in tt) { tokenToName.set(tt[name], name); }
for (let name in ttCst) { tokenToName.set(ttCst[name], name); }
export function getTokenName(tokenType) {
if (isString(tokenType)) return tokenType;
import { types as tt, TokenType } from "babylon/lib/tokenizer/types";
import forOwn from "lodash/object/forOwn";
import isString from "lodash/lang/isString";
// TODO: create and export actual token types for these.
export const tokenTypes = {
newline: new TokenType("newline"),
whitespace: new TokenType("whitespace"),
blockCommentStart: new TokenType("/*"),
blockCommentBody: new TokenType("blockCommentBody"),
blockCommentEnd: new TokenType("*/"),
lineCommentStart: new TokenType("//"),
lineCommentBody: new TokenType("lineCommentBody"),
};
forOwn(tokenTypes, function(tokenType) {
tokenType.whitespace = true;
});
const ttCst = tokenTypes;
export const tokenToName = new Map();
for (let name in tt) { tokenToName.set(tt[name], name); }
for (let name in ttCst) { tokenToName.set(ttCst[name], name); }
export function getTokenName(tokenType) {
if (isString(tokenType)) return tokenType;
return tokenToName.get(tokenType)
import { types as tt, TokenType } from "babylon/lib/tokenizer/types";
import forOwn from "lodash/object/forOwn";
import isString from "lodash/lang/isString";
// TODO: create and export actual token types for these.
export const tokenTypes = {
newline: new TokenType("newline"),
whitespace: new TokenType("whitespace"),
blockCommentStart: new TokenType("/*"),
blockCommentBody: new TokenType("blockCommentBody"),
blockCommentEnd: new TokenType("*/"),
lineCommentStart: new TokenType("//"),
lineCommentBody: new TokenType("lineCommentBody"),
};
forOwn(tokenTypes, function(tokenType) {
tokenType.whitespace = true;
});
const ttCst = tokenTypes;
export const tokenToName = new Map();
for (let name in tt) { tokenToName.set(tt[name], name); }
for (let name in ttCst) { tokenToName.set(ttCst[name], name); }
import { types as tt, TokenType } from "babylon/lib/tokenizer/types";
import forOwn from "lodash/object/forOwn";
import isString from "lodash/lang/isString";
// TODO: create and export actual token types for these.
export const tokenTypes = {
newline: new TokenType("newline"),
whitespace: new TokenType("whitespace"),
blockCommentStart: new TokenType("/*"),
blockCommentBody: new TokenType("blockCommentBody"),
blockCommentEnd: new TokenType("*/"),
lineCommentStart: new TokenType("//"),
lineCommentBody: new TokenType("lineCommentBody"),
};
forOwn(tokenTypes, function(tokenType) {
tokenType.whitespace = true;
});
const ttCst = tokenTypes;
export const tokenToName = new Map();
for (let name in tt) { tokenToName.set(tt[name], name); }
for (let name in ttCst) { tokenToName.set(ttCst[name], name); }
export function getTokenName(tokenType) {
if (isString(tokenType)) return tokenType;
return tokenToName.get(tokenType)
}
import { types as tt, TokenType } from "babylon/lib/tokenizer/types";
import forOwn from "lodash/object/forOwn";
import isString from "lodash/lang/isString";
// TODO: create and export actual token types for these.
export const tokenTypes = {
newline: new TokenType("newline"),
whitespace: new TokenType("whitespace"),
blockCommentStart: new TokenType("/*"),
blockCommentBody: new TokenType("blockCommentBody"),
blockCommentEnd: new TokenType("*/"),
lineCommentStart: new TokenType("//"),
lineCommentBody: new TokenType("lineCommentBody"),
};
forOwn(tokenTypes, function(tokenType) {
tokenType.whitespace = true;
});
const ttCst = tokenTypes;
export const tokenToName = new Map();
for (let name in tt) { tokenToName.set(tt[name], name); }
for (let name in ttCst) { tokenToName.set(ttCst[name], name); }
import { types as tt, TokenType } from "babylon/lib/tokenizer/types";
import forOwn from "lodash/object/forOwn";
import isString from "lodash/lang/isString";
// TODO: create and export actual token types for these.
export const tokenTypes = {
newline: new TokenType("newline"),
whitespace: new TokenType("whitespace"),
blockCommentStart: new TokenType("/*"),
blockCommentBody: new TokenType("blockCommentBody"),
blockCommentEnd: new TokenType("*/"),
lineCommentStart: new TokenType("//"),
lineCommentBody: new TokenType("lineCommentBody"),
};
forOwn(tokenTypes, function(tokenType) {
tokenType.whitespace = true;
});
const ttCst = tokenTypes;
export const tokenToName = new Map();
for (let name in tt) { tokenToName.set(tt[name], name); }
for (let name in ttCst) { tokenToName.set(ttCst[name], name); }
export function getTokenName(tokenType) {
if (isString(tokenType)) return tokenType;
return tokenToName.get(tokenType)
}