How to use lookahead_1 method in Cucumber-gherkin

Best JavaScript code snippet using cucumber-gherkin

chevrotain.js

Source:chevrotain.js Github

copy

Full Screen

1/*! chevrotain - v1.0.1 */2(function webpackUniversalModuleDefinition(root, factory) {3 if(typeof exports === 'object' && typeof module === 'object')4 module.exports = factory();5 else if(typeof define === 'function' && define.amd)6 define("chevrotain", [], factory);7 else if(typeof exports === 'object')8 exports["chevrotain"] = factory();9 else10 root["chevrotain"] = factory();11})(typeof self !== 'undefined' ? self : this, function() {12return /******/ (function(modules) { // webpackBootstrap13/******/ // The module cache14/******/ var installedModules = {};15/******/16/******/ // The require function17/******/ function __webpack_require__(moduleId) {18/******/19/******/ // Check if module is in cache20/******/ if(installedModules[moduleId]) {21/******/ return installedModules[moduleId].exports;22/******/ }23/******/ // Create a new module (and put it into the cache)24/******/ var module = installedModules[moduleId] = {25/******/ i: moduleId,26/******/ l: false,27/******/ exports: {}28/******/ };29/******/30/******/ // Execute the module function31/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);32/******/33/******/ // Flag the module as loaded34/******/ module.l = true;35/******/36/******/ // Return the exports of the module37/******/ return module.exports;38/******/ }39/******/40/******/41/******/ // expose the modules object (__webpack_modules__)42/******/ __webpack_require__.m = modules;43/******/44/******/ // expose the module cache45/******/ __webpack_require__.c = installedModules;46/******/47/******/ // define getter function for harmony exports48/******/ __webpack_require__.d = function(exports, name, getter) {49/******/ if(!__webpack_require__.o(exports, name)) {50/******/ Object.defineProperty(exports, name, {51/******/ configurable: false,52/******/ enumerable: true,53/******/ get: getter54/******/ });55/******/ }56/******/ };57/******/58/******/ // getDefaultExport function for compatibility with non-harmony modules59/******/ __webpack_require__.n = function(module) {60/******/ var getter = module && module.__esModule ?61/******/ function getDefault() { return module['default']; } :62/******/ function getModuleExports() { return module; };63/******/ __webpack_require__.d(getter, 'a', getter);64/******/ return getter;65/******/ };66/******/67/******/ // Object.prototype.hasOwnProperty.call68/******/ __webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); };69/******/70/******/ // __webpack_public_path__71/******/ __webpack_require__.p = "";72/******/73/******/ // Load entry module and return exports74/******/ return __webpack_require__(__webpack_require__.s = 20);75/******/ })76/************************************************************************/77/******/ ([78/* 0 */79/***/ (function(module, exports, __webpack_require__) {80"use strict";81Object.defineProperty(exports, "__esModule", { value: true });82/*83 Utils using lodash style API. (not necessarily 100% compliant) for functional and other utils.84 These utils should replace usage of lodash in the production code base. not because they are any better...85 but for the purpose of being a dependency free library.86 The hotspots in the code are already written in imperative style for performance reasons.87 so writing several dozen utils which may be slower than the original lodash, does not matter as much88 considering they will not be invoked in hotspots...89 */90function isEmpty(arr) {91 return arr && arr.length === 0;92}93exports.isEmpty = isEmpty;94function keys(obj) {95 if (obj === undefined || obj === null) {96 return [];97 }98 return Object.keys(obj);99}100exports.keys = keys;101function values(obj) {102 var vals = [];103 var keys = Object.keys(obj);104 for (var i = 0; i < keys.length; i++) {105 vals.push(obj[keys[i]]);106 }107 return vals;108}109exports.values = values;110function mapValues(obj, callback) {111 var result = [];112 var objKeys = keys(obj);113 for (var idx = 0; idx < objKeys.length; idx++) {114 var currKey = objKeys[idx];115 result.push(callback.call(null, obj[currKey], currKey));116 }117 return result;118}119exports.mapValues = mapValues;120function map(arr, callback) {121 var result = [];122 for (var idx = 0; idx < arr.length; idx++) {123 result.push(callback.call(null, arr[idx], idx));124 }125 return result;126}127exports.map = map;128function flatten(arr) {129 var result = [];130 for (var idx = 0; idx < arr.length; idx++) {131 var currItem = arr[idx];132 if (Array.isArray(currItem)) {133 result = result.concat(flatten(currItem));134 }135 else {136 result.push(currItem);137 }138 }139 return result;140}141exports.flatten = flatten;142function first(arr) {143 return isEmpty(arr) ? undefined : arr[0];144}145exports.first = first;146function last(arr) {147 var len = arr && arr.length;148 return len ? arr[len - 1] : undefined;149}150exports.last = last;151function forEach(collection, iteratorCallback) {152 if (Array.isArray(collection)) {153 for (var i = 0; i < collection.length; i++) {154 iteratorCallback.call(null, collection[i], i);155 }156 }157 else if (isObject(collection)) {158 var colKeys = keys(collection);159 for (var i = 0; i < colKeys.length; i++) {160 var key = colKeys[i];161 var value = collection[key];162 iteratorCallback.call(null, value, key);163 }164 }165 else {166 /* istanbul ignore next */167 throw Error("non exhaustive match");168 }169}170exports.forEach = forEach;171function isString(item) {172 return typeof item === "string";173}174exports.isString = isString;175function isUndefined(item) {176 return item === undefined;177}178exports.isUndefined = isUndefined;179function isFunction(item) {180 return item instanceof Function;181}182exports.isFunction = isFunction;183function drop(arr, howMuch) {184 if (howMuch === void 0) { howMuch = 1; }185 return arr.slice(howMuch, arr.length);186}187exports.drop = drop;188function dropRight(arr, howMuch) {189 if (howMuch === void 0) { howMuch = 1; }190 return arr.slice(0, arr.length - howMuch);191}192exports.dropRight = dropRight;193function filter(arr, predicate) {194 var result = [];195 if (Array.isArray(arr)) {196 for (var i = 0; i < arr.length; i++) {197 var item = arr[i];198 if (predicate.call(null, item)) {199 result.push(item);200 }201 }202 }203 return result;204}205exports.filter = filter;206function reject(arr, predicate) {207 return filter(arr, function (item) { return !predicate(item); });208}209exports.reject = reject;210function pick(obj, predicate) {211 var keys = Object.keys(obj);212 var result = {};213 for (var i = 0; i < keys.length; i++) {214 var currKey = keys[i];215 var currItem = obj[currKey];216 if (predicate(currItem)) {217 result[currKey] = currItem;218 }219 }220 return result;221}222exports.pick = pick;223function has(obj, prop) {224 if (isObject(obj)) {225 return obj.hasOwnProperty(prop);226 }227 return false;228}229exports.has = has;230function contains(arr, item) {231 return find(arr, function (currItem) { return currItem === item; }) !== undefined ? true : false;232}233exports.contains = contains;234/**235 * shallow clone236 */237function cloneArr(arr) {238 var newArr = [];239 for (var i = 0; i < arr.length; i++) {240 newArr.push(arr[i]);241 }242 return newArr;243}244exports.cloneArr = cloneArr;245/**246 * shallow clone247 */248function cloneObj(obj) {249 var clonedObj = {};250 for (var key in obj) {251 /* istanbul ignore else */252 if (Object.prototype.hasOwnProperty.call(obj, key)) {253 clonedObj[key] = obj[key];254 }255 }256 return clonedObj;257}258exports.cloneObj = cloneObj;259function find(arr, predicate) {260 for (var i = 0; i < arr.length; i++) {261 var item = arr[i];262 if (predicate.call(null, item)) {263 return item;264 }265 }266 return undefined;267}268exports.find = find;269function findAll(arr, predicate) {270 var found = [];271 for (var i = 0; i < arr.length; i++) {272 var item = arr[i];273 if (predicate.call(null, item)) {274 found.push(item);275 }276 }277 return found;278}279exports.findAll = findAll;280function reduce(arrOrObj, iterator, initial) {281 var vals = Array.isArray(arrOrObj)282 ? arrOrObj283 : values(arrOrObj);284 var accumulator = initial;285 for (var i = 0; i < vals.length; i++) {286 accumulator = iterator.call(null, accumulator, vals[i], i);287 }288 return accumulator;289}290exports.reduce = reduce;291function compact(arr) {292 return reject(arr, function (item) { return item === null || item === undefined; });293}294exports.compact = compact;295function uniq(arr, identity) {296 if (identity === void 0) { identity = function (item) { return item; }; }297 var identities = [];298 return reduce(arr, function (result, currItem) {299 var currIdentity = identity(currItem);300 if (contains(identities, currIdentity)) {301 return result;302 }303 else {304 identities.push(currIdentity);305 return result.concat(currItem);306 }307 }, []);308}309exports.uniq = uniq;310function partial(func) {311 var restArgs = [];312 for (var _i = 1; _i < arguments.length; _i++) {313 restArgs[_i - 1] = arguments[_i];314 }315 var firstArg = [null];316 var allArgs = firstArg.concat(restArgs);317 return Function.bind.apply(func, allArgs);318}319exports.partial = partial;320function isArray(obj) {321 return Array.isArray(obj);322}323exports.isArray = isArray;324function isRegExp(obj) {325 return obj instanceof RegExp;326}327exports.isRegExp = isRegExp;328function isObject(obj) {329 return obj instanceof Object;330}331exports.isObject = isObject;332function every(arr, predicate) {333 for (var i = 0; i < arr.length; i++) {334 if (!predicate(arr[i], i)) {335 return false;336 }337 }338 return true;339}340exports.every = every;341function difference(arr, values) {342 return reject(arr, function (item) { return contains(values, item); });343}344exports.difference = difference;345function some(arr, predicate) {346 for (var i = 0; i < arr.length; i++) {347 if (predicate(arr[i])) {348 return true;349 }350 }351 return false;352}353exports.some = some;354function indexOf(arr, value) {355 for (var i = 0; i < arr.length; i++) {356 if (arr[i] === value) {357 return i;358 }359 }360 return -1;361}362exports.indexOf = indexOf;363function sortBy(arr, orderFunc) {364 var result = cloneArr(arr);365 result.sort(function (a, b) { return orderFunc(a) - orderFunc(b); });366 return result;367}368exports.sortBy = sortBy;369function zipObject(keys, values) {370 if (keys.length !== values.length) {371 throw Error("can't zipObject with different number of keys and values!");372 }373 var result = {};374 for (var i = 0; i < keys.length; i++) {375 result[keys[i]] = values[i];376 }377 return result;378}379exports.zipObject = zipObject;380/**381 * mutates! (and returns) target382 */383function assign(target) {384 var sources = [];385 for (var _i = 1; _i < arguments.length; _i++) {386 sources[_i - 1] = arguments[_i];387 }388 for (var i = 0; i < sources.length; i++) {389 var curSource = sources[i];390 var currSourceKeys = keys(curSource);391 for (var j = 0; j < currSourceKeys.length; j++) {392 var currKey = currSourceKeys[j];393 target[currKey] = curSource[currKey];394 }395 }396 return target;397}398exports.assign = assign;399/**400 * mutates! (and returns) target401 */402function assignNoOverwrite(target) {403 var sources = [];404 for (var _i = 1; _i < arguments.length; _i++) {405 sources[_i - 1] = arguments[_i];406 }407 for (var i = 0; i < sources.length; i++) {408 var curSource = sources[i];409 if (isUndefined(curSource)) {410 continue;411 }412 var currSourceKeys = keys(curSource);413 for (var j = 0; j < currSourceKeys.length; j++) {414 var currKey = currSourceKeys[j];415 if (!has(target, currKey)) {416 target[currKey] = curSource[currKey];417 }418 }419 }420 return target;421}422exports.assignNoOverwrite = assignNoOverwrite;423function defaults() {424 var sources = [];425 for (var _i = 0; _i < arguments.length; _i++) {426 sources[_i] = arguments[_i];427 }428 return assignNoOverwrite.apply(null, [{}].concat(sources));429}430exports.defaults = defaults;431function groupBy(arr, groupKeyFunc) {432 var result = {};433 forEach(arr, function (item) {434 var currGroupKey = groupKeyFunc(item);435 var currGroupArr = result[currGroupKey];436 if (currGroupArr) {437 currGroupArr.push(item);438 }439 else {440 result[currGroupKey] = [item];441 }442 });443 return result;444}445exports.groupBy = groupBy;446/**447 * Merge obj2 into obj1.448 * Will overwrite existing properties with the same name449 */450function merge(obj1, obj2) {451 var result = cloneObj(obj1);452 var keys2 = keys(obj2);453 for (var i = 0; i < keys2.length; i++) {454 var key = keys2[i];455 var value = obj2[key];456 result[key] = value;457 }458 return result;459}460exports.merge = merge;461function NOOP() { }462exports.NOOP = NOOP;463function IDENTITY(item) {464 return item;465}466exports.IDENTITY = IDENTITY;467//# sourceMappingURL=utils.js.map468/***/ }),469/* 1 */470/***/ (function(module, exports, __webpack_require__) {471"use strict";472var __extends = (this && this.__extends) || (function () {473 var extendStatics = Object.setPrototypeOf ||474 ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||475 function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };476 return function (d, b) {477 extendStatics(d, b);478 function __() { this.constructor = d; }479 d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());480 };481})();482Object.defineProperty(exports, "__esModule", { value: true });483var utils_1 = __webpack_require__(0);484var tokens_public_1 = __webpack_require__(2);485var gast;486(function (gast) {487 var AbstractProduction = /** @class */ (function () {488 function AbstractProduction(definition) {489 this.definition = definition;490 }491 AbstractProduction.prototype.accept = function (visitor) {492 visitor.visit(this);493 utils_1.forEach(this.definition, function (prod) {494 prod.accept(visitor);495 });496 };497 return AbstractProduction;498 }());499 gast.AbstractProduction = AbstractProduction;500 var NonTerminal = /** @class */ (function (_super) {501 __extends(NonTerminal, _super);502 function NonTerminal(nonTerminalName, referencedRule, occurrenceInParent, implicitOccurrenceIndex) {503 if (referencedRule === void 0) { referencedRule = undefined; }504 if (occurrenceInParent === void 0) { occurrenceInParent = 1; }505 if (implicitOccurrenceIndex === void 0) { implicitOccurrenceIndex = false; }506 var _this = _super.call(this, []) || this;507 _this.nonTerminalName = nonTerminalName;508 _this.referencedRule = referencedRule;509 _this.occurrenceInParent = occurrenceInParent;510 _this.implicitOccurrenceIndex = implicitOccurrenceIndex;511 return _this;512 }513 Object.defineProperty(NonTerminal.prototype, "definition", {514 get: function () {515 if (this.referencedRule !== undefined) {516 return this.referencedRule.definition;517 }518 return [];519 },520 set: function (definition) {521 // immutable522 },523 enumerable: true,524 configurable: true525 });526 NonTerminal.prototype.accept = function (visitor) {527 visitor.visit(this);528 // don't visit children of a reference, we will get cyclic infinite loops if we do so529 };530 return NonTerminal;531 }(AbstractProduction));532 gast.NonTerminal = NonTerminal;533 var Rule = /** @class */ (function (_super) {534 __extends(Rule, _super);535 function Rule(name, definition, orgText) {536 if (orgText === void 0) { orgText = ""; }537 var _this = _super.call(this, definition) || this;538 _this.name = name;539 _this.orgText = orgText;540 return _this;541 }542 return Rule;543 }(AbstractProduction));544 gast.Rule = Rule;545 var Flat = /** @class */ (function (_super) {546 __extends(Flat, _super);547 // A named Flat production is used to indicate a Nested Rule in an alternation548 function Flat(definition, name) {549 var _this = _super.call(this, definition) || this;550 _this.name = name;551 return _this;552 }553 return Flat;554 }(AbstractProduction));555 gast.Flat = Flat;556 var Option = /** @class */ (function (_super) {557 __extends(Option, _super);558 function Option(definition, occurrenceInParent, name, implicitOccurrenceIndex) {559 if (occurrenceInParent === void 0) { occurrenceInParent = 1; }560 if (implicitOccurrenceIndex === void 0) { implicitOccurrenceIndex = false; }561 var _this = _super.call(this, definition) || this;562 _this.occurrenceInParent = occurrenceInParent;563 _this.name = name;564 _this.implicitOccurrenceIndex = implicitOccurrenceIndex;565 return _this;566 }567 return Option;568 }(AbstractProduction));569 gast.Option = Option;570 var RepetitionMandatory = /** @class */ (function (_super) {571 __extends(RepetitionMandatory, _super);572 function RepetitionMandatory(definition, occurrenceInParent, name, implicitOccurrenceIndex) {573 if (occurrenceInParent === void 0) { occurrenceInParent = 1; }574 if (implicitOccurrenceIndex === void 0) { implicitOccurrenceIndex = false; }575 var _this = _super.call(this, definition) || this;576 _this.occurrenceInParent = occurrenceInParent;577 _this.name = name;578 _this.implicitOccurrenceIndex = implicitOccurrenceIndex;579 return _this;580 }581 return RepetitionMandatory;582 }(AbstractProduction));583 gast.RepetitionMandatory = RepetitionMandatory;584 var RepetitionMandatoryWithSeparator = /** @class */ (function (_super) {585 __extends(RepetitionMandatoryWithSeparator, _super);586 function RepetitionMandatoryWithSeparator(definition, separator, occurrenceInParent, name, implicitOccurrenceIndex) {587 if (occurrenceInParent === void 0) { occurrenceInParent = 1; }588 if (implicitOccurrenceIndex === void 0) { implicitOccurrenceIndex = false; }589 var _this = _super.call(this, definition) || this;590 _this.separator = separator;591 _this.occurrenceInParent = occurrenceInParent;592 _this.name = name;593 _this.implicitOccurrenceIndex = implicitOccurrenceIndex;594 return _this;595 }596 return RepetitionMandatoryWithSeparator;597 }(AbstractProduction));598 gast.RepetitionMandatoryWithSeparator = RepetitionMandatoryWithSeparator;599 var Repetition = /** @class */ (function (_super) {600 __extends(Repetition, _super);601 function Repetition(definition, occurrenceInParent, name, implicitOccurrenceIndex) {602 if (occurrenceInParent === void 0) { occurrenceInParent = 1; }603 if (implicitOccurrenceIndex === void 0) { implicitOccurrenceIndex = false; }604 var _this = _super.call(this, definition) || this;605 _this.occurrenceInParent = occurrenceInParent;606 _this.name = name;607 _this.implicitOccurrenceIndex = implicitOccurrenceIndex;608 return _this;609 }610 return Repetition;611 }(AbstractProduction));612 gast.Repetition = Repetition;613 var RepetitionWithSeparator = /** @class */ (function (_super) {614 __extends(RepetitionWithSeparator, _super);615 function RepetitionWithSeparator(definition, separator, occurrenceInParent, name, implicitOccurrenceIndex) {616 if (occurrenceInParent === void 0) { occurrenceInParent = 1; }617 if (implicitOccurrenceIndex === void 0) { implicitOccurrenceIndex = false; }618 var _this = _super.call(this, definition) || this;619 _this.separator = separator;620 _this.occurrenceInParent = occurrenceInParent;621 _this.name = name;622 _this.implicitOccurrenceIndex = implicitOccurrenceIndex;623 return _this;624 }625 return RepetitionWithSeparator;626 }(AbstractProduction));627 gast.RepetitionWithSeparator = RepetitionWithSeparator;628 var Alternation = /** @class */ (function (_super) {629 __extends(Alternation, _super);630 function Alternation(definition, occurrenceInParent, name, implicitOccurrenceIndex) {631 if (occurrenceInParent === void 0) { occurrenceInParent = 1; }632 if (implicitOccurrenceIndex === void 0) { implicitOccurrenceIndex = false; }633 var _this = _super.call(this, definition) || this;634 _this.occurrenceInParent = occurrenceInParent;635 _this.name = name;636 _this.implicitOccurrenceIndex = implicitOccurrenceIndex;637 return _this;638 }639 return Alternation;640 }(AbstractProduction));641 gast.Alternation = Alternation;642 var Terminal = /** @class */ (function () {643 function Terminal(terminalType, occurrenceInParent, implicitOccurrenceIndex) {644 if (occurrenceInParent === void 0) { occurrenceInParent = 1; }645 if (implicitOccurrenceIndex === void 0) { implicitOccurrenceIndex = false; }646 this.terminalType = terminalType;647 this.occurrenceInParent = occurrenceInParent;648 this.implicitOccurrenceIndex = implicitOccurrenceIndex;649 }650 Terminal.prototype.accept = function (visitor) {651 visitor.visit(this);652 };653 return Terminal;654 }());655 gast.Terminal = Terminal;656 var GAstVisitor = /** @class */ (function () {657 function GAstVisitor() {658 }659 GAstVisitor.prototype.visit = function (node) {660 if (node instanceof NonTerminal) {661 return this.visitNonTerminal(node);662 }663 else if (node instanceof Flat) {664 return this.visitFlat(node);665 }666 else if (node instanceof Option) {667 return this.visitOption(node);668 }669 else if (node instanceof RepetitionMandatory) {670 return this.visitRepetitionMandatory(node);671 }672 else if (node instanceof RepetitionMandatoryWithSeparator) {673 return this.visitRepetitionMandatoryWithSeparator(node);674 }675 else if (node instanceof RepetitionWithSeparator) {676 return this.visitRepetitionWithSeparator(node);677 }678 else if (node instanceof Repetition) {679 return this.visitRepetition(node);680 }681 else if (node instanceof Alternation) {682 return this.visitAlternation(node);683 }684 else if (node instanceof Terminal) {685 return this.visitTerminal(node);686 }687 else if (node instanceof Rule) {688 return this.visitRule(node);689 }690 else {691 /* istanbul ignore next */692 throw Error("non exhaustive match");693 }694 };695 GAstVisitor.prototype.visitNonTerminal = function (node) { };696 GAstVisitor.prototype.visitFlat = function (node) { };697 GAstVisitor.prototype.visitOption = function (node) { };698 GAstVisitor.prototype.visitRepetition = function (node) { };699 GAstVisitor.prototype.visitRepetitionMandatory = function (node) { };700 GAstVisitor.prototype.visitRepetitionMandatoryWithSeparator = function (node) { };701 GAstVisitor.prototype.visitRepetitionWithSeparator = function (node) { };702 GAstVisitor.prototype.visitAlternation = function (node) { };703 GAstVisitor.prototype.visitTerminal = function (node) { };704 GAstVisitor.prototype.visitRule = function (node) { };705 return GAstVisitor;706 }());707 gast.GAstVisitor = GAstVisitor;708 function serializeGrammar(topRules) {709 return utils_1.map(topRules, serializeProduction);710 }711 gast.serializeGrammar = serializeGrammar;712 function serializeProduction(node) {713 function convertDefinition(definition) {714 return utils_1.map(definition, serializeProduction);715 }716 if (node instanceof NonTerminal) {717 return {718 type: "NonTerminal",719 name: node.nonTerminalName,720 occurrenceInParent: node.occurrenceInParent721 };722 }723 else if (node instanceof Flat) {724 return {725 type: "Flat",726 definition: convertDefinition(node.definition)727 };728 }729 else if (node instanceof Option) {730 return {731 type: "Option",732 definition: convertDefinition(node.definition)733 };734 }735 else if (node instanceof RepetitionMandatory) {736 return {737 type: "RepetitionMandatory",738 definition: convertDefinition(node.definition)739 };740 }741 else if (node instanceof RepetitionMandatoryWithSeparator) {742 return {743 type: "RepetitionMandatoryWithSeparator",744 separator: serializeProduction(new Terminal(node.separator)),745 definition: convertDefinition(node.definition)746 };747 }748 else if (node instanceof RepetitionWithSeparator) {749 return {750 type: "RepetitionWithSeparator",751 separator: serializeProduction(new Terminal(node.separator)),752 definition: convertDefinition(node.definition)753 };754 }755 else if (node instanceof Repetition) {756 return {757 type: "Repetition",758 definition: convertDefinition(node.definition)759 };760 }761 else if (node instanceof Alternation) {762 return {763 type: "Alternation",764 definition: convertDefinition(node.definition)765 };766 }767 else if (node instanceof Terminal) {768 var serializedTerminal = {769 type: "Terminal",770 name: tokens_public_1.tokenName(node.terminalType),771 label: tokens_public_1.tokenLabel(node.terminalType),772 occurrenceInParent: node.occurrenceInParent773 };774 var pattern = node.terminalType.PATTERN;775 if (node.terminalType.PATTERN) {776 serializedTerminal.pattern = utils_1.isRegExp(pattern)777 ? pattern.source778 : pattern;779 }780 return serializedTerminal;781 }782 else if (node instanceof Rule) {783 // IGNORE ABOVE ELSE784 return {785 type: "Rule",786 name: node.name,787 definition: convertDefinition(node.definition)788 };789 }790 else {791 /* istanbul ignore next */792 throw Error("non exhaustive match");793 }794 }795 gast.serializeProduction = serializeProduction;796})(gast = exports.gast || (exports.gast = {}));797//# sourceMappingURL=gast_public.js.map798/***/ }),799/* 2 */800/***/ (function(module, exports, __webpack_require__) {801"use strict";802Object.defineProperty(exports, "__esModule", { value: true });803var utils_1 = __webpack_require__(0);804var lang_extensions_1 = __webpack_require__(3);805var lexer_public_1 = __webpack_require__(8);806var tokens_1 = __webpack_require__(4);807/**808 * This can be used to improve the quality/readability of error messages or syntax diagrams.809 *810 * @param {TokenType} clazz - A constructor for a Token subclass811 * @returns {string} - The Human readable label for a Token if it exists.812 */813function tokenLabel(clazz) {814 if (hasTokenLabel(clazz)) {815 return clazz.LABEL;816 }817 else {818 return tokenName(clazz);819 }820}821exports.tokenLabel = tokenLabel;822function hasTokenLabel(obj) {823 return utils_1.isString(obj.LABEL) && obj.LABEL !== "";824}825exports.hasTokenLabel = hasTokenLabel;826function tokenName(obj) {827 // The tokenName property is needed under some old versions of node.js (0.10/0.12)828 // where the Function.prototype.name property is not defined as a 'configurable' property829 // enable producing readable error messages.830 /* istanbul ignore if -> will only run in old versions of node.js */831 if (utils_1.isObject(obj) &&832 obj.hasOwnProperty("tokenName") &&833 utils_1.isString(obj.tokenName)) {834 return obj.tokenName;835 }836 else {837 return lang_extensions_1.functionName(obj);838 }839}840exports.tokenName = tokenName;841var PARENT = "parent";842var CATEGORIES = "categories";843var LABEL = "label";844var GROUP = "group";845var PUSH_MODE = "push_mode";846var POP_MODE = "pop_mode";847var LONGER_ALT = "longer_alt";848var LINE_BREAKS = "line_breaks";849/**850 * @param {ITokenConfig} config - The configuration for851 * @returns {TokenType} - A constructor for the new Token subclass852 */853function createToken(config) {854 return createTokenInternal(config);855}856exports.createToken = createToken;857function createTokenInternal(config) {858 var tokenName = config.name;859 var pattern = config.pattern;860 var tokenType = {};861 // can be overwritten according to:862 // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function/863 // name?redirectlocale=en-US&redirectslug=JavaScript%2FReference%2FGlobal_Objects%2FFunction%2Fname864 /* istanbul ignore if -> will only run in old versions of node.js */865 if (!lang_extensions_1.defineNameProp(tokenType, tokenName)) {866 // hack to save the tokenName in situations where the constructor's name property cannot be reconfigured867 tokenType.tokenName = tokenName;868 }869 if (!utils_1.isUndefined(pattern)) {870 tokenType.PATTERN = pattern;871 }872 if (utils_1.has(config, PARENT)) {873 throw "The parent property is no longer supported.\n" +874 "See: [TODO-add link] for details.";875 }876 if (utils_1.has(config, CATEGORIES)) {877 tokenType.CATEGORIES = config[CATEGORIES];878 }879 tokens_1.augmentTokenTypes([tokenType]);880 if (utils_1.has(config, LABEL)) {881 tokenType.LABEL = config[LABEL];882 }883 if (utils_1.has(config, GROUP)) {884 tokenType.GROUP = config[GROUP];885 }886 if (utils_1.has(config, POP_MODE)) {887 tokenType.POP_MODE = config[POP_MODE];888 }889 if (utils_1.has(config, PUSH_MODE)) {890 tokenType.PUSH_MODE = config[PUSH_MODE];891 }892 if (utils_1.has(config, LONGER_ALT)) {893 tokenType.LONGER_ALT = config[LONGER_ALT];894 }895 if (utils_1.has(config, LINE_BREAKS)) {896 tokenType.LINE_BREAKS = config[LINE_BREAKS];897 }898 return tokenType;899}900exports.EOF = createToken({ name: "EOF", pattern: lexer_public_1.Lexer.NA });901tokens_1.augmentTokenTypes([exports.EOF]);902/**903 * Utility to create Chevrotain Token "instances"904 * Note that Chevrotain tokens are not real instances, and thus the instanceOf cannot be used.905 *906 * @param tokType907 * @param image908 * @param startOffset909 * @param endOffset910 * @param startLine911 * @param endLine912 * @param startColumn913 * @param endColumn914 * @returns {{image: string,915 * startOffset: number,916 * endOffset: number,917 * startLine: number,918 * endLine: number,919 * startColumn: number,920 * endColumn: number,921 * tokenType}}922 */923function createTokenInstance(tokType, image, startOffset, endOffset, startLine, endLine, startColumn, endColumn) {924 return {925 image: image,926 startOffset: startOffset,927 endOffset: endOffset,928 startLine: startLine,929 endLine: endLine,930 startColumn: startColumn,931 endColumn: endColumn,932 tokenTypeIdx: tokType.tokenTypeIdx,933 tokenType: tokType934 };935}936exports.createTokenInstance = createTokenInstance;937/**938 * A Utility method to check if a token is of the type of the argument Token class.939 * This utility is needed because Chevrotain tokens support "categories" which means940 * A TokenType may have multiple categories, so a TokenType for the "true" literal in JavaScript941 * May be both a Keyword Token and a Literal Token.942 *943 * @param token {IToken}944 * @param tokType {TokenType}945 * @returns {boolean}946 */947function tokenMatcher(token, tokType) {948 return tokens_1.tokenStructuredMatcher(token, tokType);949}950exports.tokenMatcher = tokenMatcher;951//# sourceMappingURL=tokens_public.js.map952/***/ }),953/* 3 */954/***/ (function(module, exports, __webpack_require__) {955"use strict";956Object.defineProperty(exports, "__esModule", { value: true });957var utils = __webpack_require__(0);958var utils_1 = __webpack_require__(0);959function classNameFromInstance(instance) {960 return functionName(instance.constructor);961}962exports.classNameFromInstance = classNameFromInstance;963var FUNC_NAME_REGEXP = /^\s*function\s*(\S*)\s*\(/;964var NAME = "name";965/* istanbul ignore next too many hacks for IE/old versions of node.js here*/966function functionName(func) {967 // Engines that support Function.prototype.name OR the nth (n>1) time after968 // the name has been computed in the following else block.969 var existingNameProp = func.name;970 if (existingNameProp) {971 return existingNameProp;972 }973 // hack for IE and engines that do not support Object.defineProperty on function.name (Node.js 0.10 && 0.12)974 var computedName = func.toString().match(FUNC_NAME_REGEXP)[1];975 return computedName;976}977exports.functionName = functionName;978/**979 * @returns {boolean} - has the property been successfully defined980 */981function defineNameProp(obj, nameValue) {982 var namePropDescriptor = Object.getOwnPropertyDescriptor(obj, NAME);983 /* istanbul ignore else -> will only run in old versions of node.js */984 if (utils_1.isUndefined(namePropDescriptor) || namePropDescriptor.configurable) {985 Object.defineProperty(obj, NAME, {986 enumerable: false,987 configurable: true,988 writable: false,989 value: nameValue990 });991 return true;992 }993 /* istanbul ignore next -> will only run in old versions of node.js */994 return false;995}996exports.defineNameProp = defineNameProp;997/**998 * simple Hashtable between a string and some generic value999 * this should be removed once typescript supports ES6 style Hashtable1000 */1001var HashTable = /** @class */ (function () {1002 function HashTable() {1003 this._state = {};1004 }1005 HashTable.prototype.keys = function () {1006 return utils.keys(this._state);1007 };1008 HashTable.prototype.values = function () {1009 return utils.values(this._state);1010 };1011 HashTable.prototype.put = function (key, value) {1012 this._state[key] = value;1013 };1014 HashTable.prototype.putAll = function (other) {1015 this._state = utils.assign(this._state, other._state);1016 };1017 HashTable.prototype.get = function (key) {1018 // To avoid edge case with a key called "hasOwnProperty" we need to perform the commented out check below1019 // -> if (Object.prototype.hasOwnProperty.call(this._state, key)) { ... } <-1020 // however this costs nearly 25% of the parser's runtime.1021 // if someone decides to name their Parser class "hasOwnProperty" they deserve what they will get :)1022 return this._state[key];1023 };1024 HashTable.prototype.containsKey = function (key) {1025 return utils.has(this._state, key);1026 };1027 HashTable.prototype.clear = function () {1028 this._state = {};1029 };1030 return HashTable;1031}());1032exports.HashTable = HashTable;1033//# sourceMappingURL=lang_extensions.js.map1034/***/ }),1035/* 4 */1036/***/ (function(module, exports, __webpack_require__) {1037"use strict";1038Object.defineProperty(exports, "__esModule", { value: true });1039var utils_1 = __webpack_require__(0);1040var lang_extensions_1 = __webpack_require__(3);1041var tokens_public_1 = __webpack_require__(2);1042function tokenStructuredMatcher(tokInstance, tokConstructor) {1043 var instanceType = tokInstance.tokenTypeIdx;1044 if (instanceType === tokConstructor.tokenTypeIdx) {1045 return true;1046 }1047 else {1048 return (tokConstructor.isParent === true &&1049 tokConstructor.categoryMatchesMap[instanceType] === true);1050 }1051}1052exports.tokenStructuredMatcher = tokenStructuredMatcher;1053// Optimized tokenMatcher in case our grammar does not use token categories1054// Being so tiny it is much more likely to be in-lined and this avoid the function call overhead1055function tokenStructuredMatcherNoCategories(token, tokType) {1056 return token.tokenTypeIdx === tokType.tokenTypeIdx;1057}1058exports.tokenStructuredMatcherNoCategories = tokenStructuredMatcherNoCategories;1059exports.tokenShortNameIdx = 1;1060exports.tokenIdxToClass = new lang_extensions_1.HashTable();1061function augmentTokenTypes(tokenTypes) {1062 // collect the parent Token Types as well.1063 var tokenTypesAndParents = expandCategories(tokenTypes);1064 // add required tokenType and categoryMatches properties1065 assignTokenDefaultProps(tokenTypesAndParents);1066 // fill up the categoryMatches1067 assignCategoriesMapProp(tokenTypesAndParents);1068 assignCategoriesTokensProp(tokenTypesAndParents);1069 utils_1.forEach(tokenTypesAndParents, function (tokType) {1070 tokType.isParent = tokType.categoryMatches.length > 0;1071 });1072}1073exports.augmentTokenTypes = augmentTokenTypes;1074function expandCategories(tokenTypes) {1075 var result = utils_1.cloneArr(tokenTypes);1076 var categories = tokenTypes;1077 var searching = true;1078 while (searching) {1079 categories = utils_1.compact(utils_1.flatten(utils_1.map(categories, function (currTokType) { return currTokType.CATEGORIES; })));1080 var newCategories = utils_1.difference(categories, result);1081 result = result.concat(newCategories);1082 if (utils_1.isEmpty(newCategories)) {1083 searching = false;1084 }1085 else {1086 categories = newCategories;1087 }1088 }1089 return result;1090}1091exports.expandCategories = expandCategories;1092function assignTokenDefaultProps(tokenTypes) {1093 utils_1.forEach(tokenTypes, function (currTokType) {1094 if (!hasShortKeyProperty(currTokType)) {1095 exports.tokenIdxToClass.put(exports.tokenShortNameIdx, currTokType);1096 currTokType.tokenTypeIdx = exports.tokenShortNameIdx++;1097 }1098 // CATEGORIES? : TokenType | TokenType[]1099 if (hasCategoriesProperty(currTokType) &&1100 !utils_1.isArray(currTokType.CATEGORIES)) {1101 currTokType.CATEGORIES = [currTokType.CATEGORIES];1102 }1103 if (!hasCategoriesProperty(currTokType)) {1104 currTokType.CATEGORIES = [];1105 }1106 if (!hasExtendingTokensTypesProperty(currTokType)) {1107 currTokType.categoryMatches = [];1108 }1109 if (!hasExtendingTokensTypesMapProperty(currTokType)) {1110 currTokType.categoryMatchesMap = {};1111 }1112 if (!hasTokenNameProperty(currTokType)) {1113 // saved for fast access during CST building.1114 currTokType.tokenName = tokens_public_1.tokenName(currTokType);1115 }1116 });1117}1118exports.assignTokenDefaultProps = assignTokenDefaultProps;1119function assignCategoriesTokensProp(tokenTypes) {1120 utils_1.forEach(tokenTypes, function (currTokType) {1121 // avoid duplications1122 currTokType.categoryMatches = [];1123 utils_1.forEach(currTokType.categoryMatchesMap, function (val, key) {1124 currTokType.categoryMatches.push(exports.tokenIdxToClass.get(key).tokenTypeIdx);1125 });1126 });1127}1128exports.assignCategoriesTokensProp = assignCategoriesTokensProp;1129function assignCategoriesMapProp(tokenTypes) {1130 utils_1.forEach(tokenTypes, function (currTokType) {1131 singleAssignCategoriesToksMap([], currTokType);1132 });1133}1134exports.assignCategoriesMapProp = assignCategoriesMapProp;1135function singleAssignCategoriesToksMap(path, nextNode) {1136 utils_1.forEach(path, function (pathNode) {1137 nextNode.categoryMatchesMap[pathNode.tokenTypeIdx] = true;1138 });1139 utils_1.forEach(nextNode.CATEGORIES, function (nextCategory) {1140 var newPath = path.concat(nextNode);1141 if (!utils_1.contains(newPath, nextCategory)) {1142 singleAssignCategoriesToksMap(newPath, nextCategory);1143 }1144 });1145}1146function hasShortKeyProperty(tokType) {1147 return utils_1.has(tokType, "tokenTypeIdx");1148}1149exports.hasShortKeyProperty = hasShortKeyProperty;1150function hasCategoriesProperty(tokType) {1151 return utils_1.has(tokType, "CATEGORIES");1152}1153exports.hasCategoriesProperty = hasCategoriesProperty;1154function hasExtendingTokensTypesProperty(tokType) {1155 return utils_1.has(tokType, "categoryMatches");1156}1157exports.hasExtendingTokensTypesProperty = hasExtendingTokensTypesProperty;1158function hasExtendingTokensTypesMapProperty(tokType) {1159 return utils_1.has(tokType, "categoryMatchesMap");1160}1161exports.hasExtendingTokensTypesMapProperty = hasExtendingTokensTypesMapProperty;1162function hasTokenNameProperty(tokType) {1163 return utils_1.has(tokType, "tokenName");1164}1165exports.hasTokenNameProperty = hasTokenNameProperty;1166function isTokenType(tokType) {1167 return utils_1.has(tokType, "tokenTypeIdx");1168}1169exports.isTokenType = isTokenType;1170//# sourceMappingURL=tokens.js.map1171/***/ }),1172/* 5 */1173/***/ (function(module, exports, __webpack_require__) {1174"use strict";1175var __extends = (this && this.__extends) || (function () {1176 var extendStatics = Object.setPrototypeOf ||1177 ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||1178 function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };1179 return function (d, b) {1180 extendStatics(d, b);1181 function __() { this.constructor = d; }1182 d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());1183 };1184})();1185Object.defineProperty(exports, "__esModule", { value: true });1186/* tslint:disable:no-use-before-declare */1187var rest_1 = __webpack_require__(10);1188var gast_public_1 = __webpack_require__(1);1189var utils_1 = __webpack_require__(0);1190var tokens_public_1 = __webpack_require__(2);1191var first_1 = __webpack_require__(15);1192/* tslint:enable:no-use-before-declare */1193var AbstractNextPossibleTokensWalker = /** @class */ (function (_super) {1194 __extends(AbstractNextPossibleTokensWalker, _super);1195 function AbstractNextPossibleTokensWalker(topProd, path) {1196 var _this = _super.call(this) || this;1197 _this.topProd = topProd;1198 _this.path = path;1199 _this.possibleTokTypes = [];1200 _this.nextProductionName = "";1201 _this.nextProductionOccurrence = 0;1202 _this.found = false;1203 _this.isAtEndOfPath = false;1204 return _this;1205 }1206 AbstractNextPossibleTokensWalker.prototype.startWalking = function () {1207 this.found = false;1208 if (this.path.ruleStack[0] !== this.topProd.name) {1209 throw Error("The path does not start with the walker's top Rule!");1210 }1211 // immutable for the win1212 this.ruleStack = utils_1.cloneArr(this.path.ruleStack).reverse(); // intelij bug requires assertion1213 this.occurrenceStack = utils_1.cloneArr(this.path.occurrenceStack).reverse(); // intelij bug requires assertion1214 // already verified that the first production is valid, we now seek the 2nd production1215 this.ruleStack.pop();1216 this.occurrenceStack.pop();1217 this.updateExpectedNext();1218 this.walk(this.topProd);1219 return this.possibleTokTypes;1220 };1221 AbstractNextPossibleTokensWalker.prototype.walk = function (prod, prevRest) {1222 if (prevRest === void 0) { prevRest = []; }1223 // stop scanning once we found the path1224 if (!this.found) {1225 _super.prototype.walk.call(this, prod, prevRest);1226 }1227 };1228 AbstractNextPossibleTokensWalker.prototype.walkProdRef = function (refProd, currRest, prevRest) {1229 // found the next production, need to keep walking in it1230 if (refProd.referencedRule.name === this.nextProductionName &&1231 refProd.occurrenceInParent === this.nextProductionOccurrence) {1232 var fullRest = currRest.concat(prevRest);1233 this.updateExpectedNext();1234 this.walk(refProd.referencedRule, fullRest);1235 }1236 };1237 AbstractNextPossibleTokensWalker.prototype.updateExpectedNext = function () {1238 // need to consume the Terminal1239 if (utils_1.isEmpty(this.ruleStack)) {1240 // must reset nextProductionXXX to avoid walking down another Top Level production while what we are1241 // really seeking is the last Terminal...1242 this.nextProductionName = "";1243 this.nextProductionOccurrence = 0;1244 this.isAtEndOfPath = true;1245 }1246 else {1247 this.nextProductionName = this.ruleStack.pop();1248 this.nextProductionOccurrence = this.occurrenceStack.pop();1249 }1250 };1251 return AbstractNextPossibleTokensWalker;1252}(rest_1.RestWalker));1253exports.AbstractNextPossibleTokensWalker = AbstractNextPossibleTokensWalker;1254var NextAfterTokenWalker = /** @class */ (function (_super) {1255 __extends(NextAfterTokenWalker, _super);1256 function NextAfterTokenWalker(topProd, path) {1257 var _this = _super.call(this, topProd, path) || this;1258 _this.path = path;1259 _this.nextTerminalName = "";1260 _this.nextTerminalOccurrence = 0;1261 _this.nextTerminalName = tokens_public_1.tokenName(_this.path.lastTok);1262 _this.nextTerminalOccurrence = _this.path.lastTokOccurrence;1263 return _this;1264 }1265 NextAfterTokenWalker.prototype.walkTerminal = function (terminal, currRest, prevRest) {1266 if (this.isAtEndOfPath &&1267 tokens_public_1.tokenName(terminal.terminalType) === this.nextTerminalName &&1268 terminal.occurrenceInParent === this.nextTerminalOccurrence &&1269 !this.found) {1270 var fullRest = currRest.concat(prevRest);1271 var restProd = new gast_public_1.gast.Flat(fullRest);1272 this.possibleTokTypes = first_1.first(restProd);1273 this.found = true;1274 }1275 };1276 return NextAfterTokenWalker;1277}(AbstractNextPossibleTokensWalker));1278exports.NextAfterTokenWalker = NextAfterTokenWalker;1279/**1280 * This walker only "walks" a single "TOP" level in the Grammar Ast, this means1281 * it never "follows" production refs1282 */1283var AbstractNextTerminalAfterProductionWalker = /** @class */ (function (_super) {1284 __extends(AbstractNextTerminalAfterProductionWalker, _super);1285 function AbstractNextTerminalAfterProductionWalker(topRule, occurrence) {1286 var _this = _super.call(this) || this;1287 _this.topRule = topRule;1288 _this.occurrence = occurrence;1289 _this.result = {1290 token: undefined,1291 occurrence: undefined,1292 isEndOfRule: undefined1293 };1294 return _this;1295 }1296 AbstractNextTerminalAfterProductionWalker.prototype.startWalking = function () {1297 this.walk(this.topRule);1298 return this.result;1299 };1300 return AbstractNextTerminalAfterProductionWalker;1301}(rest_1.RestWalker));1302exports.AbstractNextTerminalAfterProductionWalker = AbstractNextTerminalAfterProductionWalker;1303var NextTerminalAfterManyWalker = /** @class */ (function (_super) {1304 __extends(NextTerminalAfterManyWalker, _super);1305 function NextTerminalAfterManyWalker() {1306 return _super !== null && _super.apply(this, arguments) || this;1307 }1308 NextTerminalAfterManyWalker.prototype.walkMany = function (manyProd, currRest, prevRest) {1309 if (manyProd.occurrenceInParent === this.occurrence) {1310 var firstAfterMany = utils_1.first(currRest.concat(prevRest));1311 this.result.isEndOfRule = firstAfterMany === undefined;1312 if (firstAfterMany instanceof gast_public_1.gast.Terminal) {1313 this.result.token = firstAfterMany.terminalType;1314 this.result.occurrence = firstAfterMany.occurrenceInParent;1315 }1316 }1317 else {1318 _super.prototype.walkMany.call(this, manyProd, currRest, prevRest);1319 }1320 };1321 return NextTerminalAfterManyWalker;1322}(AbstractNextTerminalAfterProductionWalker));1323exports.NextTerminalAfterManyWalker = NextTerminalAfterManyWalker;1324var NextTerminalAfterManySepWalker = /** @class */ (function (_super) {1325 __extends(NextTerminalAfterManySepWalker, _super);1326 function NextTerminalAfterManySepWalker() {1327 return _super !== null && _super.apply(this, arguments) || this;1328 }1329 NextTerminalAfterManySepWalker.prototype.walkManySep = function (manySepProd, currRest, prevRest) {1330 if (manySepProd.occurrenceInParent === this.occurrence) {1331 var firstAfterManySep = utils_1.first(currRest.concat(prevRest));1332 this.result.isEndOfRule = firstAfterManySep === undefined;1333 if (firstAfterManySep instanceof gast_public_1.gast.Terminal) {1334 this.result.token = firstAfterManySep.terminalType;1335 this.result.occurrence = firstAfterManySep.occurrenceInParent;1336 }1337 }1338 else {1339 _super.prototype.walkManySep.call(this, manySepProd, currRest, prevRest);1340 }1341 };1342 return NextTerminalAfterManySepWalker;1343}(AbstractNextTerminalAfterProductionWalker));1344exports.NextTerminalAfterManySepWalker = NextTerminalAfterManySepWalker;1345var NextTerminalAfterAtLeastOneWalker = /** @class */ (function (_super) {1346 __extends(NextTerminalAfterAtLeastOneWalker, _super);1347 function NextTerminalAfterAtLeastOneWalker() {1348 return _super !== null && _super.apply(this, arguments) || this;1349 }1350 NextTerminalAfterAtLeastOneWalker.prototype.walkAtLeastOne = function (atLeastOneProd, currRest, prevRest) {1351 if (atLeastOneProd.occurrenceInParent === this.occurrence) {1352 var firstAfterAtLeastOne = utils_1.first(currRest.concat(prevRest));1353 this.result.isEndOfRule = firstAfterAtLeastOne === undefined;1354 if (firstAfterAtLeastOne instanceof gast_public_1.gast.Terminal) {1355 this.result.token = firstAfterAtLeastOne.terminalType;1356 this.result.occurrence = firstAfterAtLeastOne.occurrenceInParent;1357 }1358 }1359 else {1360 _super.prototype.walkAtLeastOne.call(this, atLeastOneProd, currRest, prevRest);1361 }1362 };1363 return NextTerminalAfterAtLeastOneWalker;1364}(AbstractNextTerminalAfterProductionWalker));1365exports.NextTerminalAfterAtLeastOneWalker = NextTerminalAfterAtLeastOneWalker;1366// TODO: reduce code duplication in the AfterWalkers1367var NextTerminalAfterAtLeastOneSepWalker = /** @class */ (function (_super) {1368 __extends(NextTerminalAfterAtLeastOneSepWalker, _super);1369 function NextTerminalAfterAtLeastOneSepWalker() {1370 return _super !== null && _super.apply(this, arguments) || this;1371 }1372 NextTerminalAfterAtLeastOneSepWalker.prototype.walkAtLeastOneSep = function (atleastOneSepProd, currRest, prevRest) {1373 if (atleastOneSepProd.occurrenceInParent === this.occurrence) {1374 var firstAfterfirstAfterAtLeastOneSep = utils_1.first(currRest.concat(prevRest));1375 this.result.isEndOfRule =1376 firstAfterfirstAfterAtLeastOneSep === undefined;1377 if (firstAfterfirstAfterAtLeastOneSep instanceof gast_public_1.gast.Terminal) {1378 this.result.token =1379 firstAfterfirstAfterAtLeastOneSep.terminalType;1380 this.result.occurrence =1381 firstAfterfirstAfterAtLeastOneSep.occurrenceInParent;1382 }1383 }1384 else {1385 _super.prototype.walkAtLeastOneSep.call(this, atleastOneSepProd, currRest, prevRest);1386 }1387 };1388 return NextTerminalAfterAtLeastOneSepWalker;1389}(AbstractNextTerminalAfterProductionWalker));1390exports.NextTerminalAfterAtLeastOneSepWalker = NextTerminalAfterAtLeastOneSepWalker;1391function possiblePathsFrom(targetDef, maxLength, currPath) {1392 if (currPath === void 0) { currPath = []; }1393 // avoid side effects1394 currPath = utils_1.cloneArr(currPath);1395 var result = [];1396 var i = 0;1397 function remainingPathWith(nextDef) {1398 return nextDef.concat(utils_1.drop(targetDef, i + 1));1399 }1400 function getAlternativesForProd(definition) {1401 var alternatives = possiblePathsFrom(remainingPathWith(definition), maxLength, currPath);1402 return result.concat(alternatives);1403 }1404 /**1405 * Mandatory productions will halt the loop as the paths computed from their recursive calls will already contain the1406 * following (rest) of the targetDef.1407 *1408 * For optional productions (Option/Repetition/...) the loop will continue to represent the paths that do not include the1409 * the optional production.1410 */1411 while (currPath.length < maxLength && i < targetDef.length) {1412 var prod = targetDef[i];1413 if (prod instanceof gast_public_1.gast.Flat) {1414 return getAlternativesForProd(prod.definition);1415 }1416 else if (prod instanceof gast_public_1.gast.NonTerminal) {1417 return getAlternativesForProd(prod.definition);1418 }1419 else if (prod instanceof gast_public_1.gast.Option) {1420 result = getAlternativesForProd(prod.definition);1421 }1422 else if (prod instanceof gast_public_1.gast.RepetitionMandatory) {1423 return getAlternativesForProd(prod.definition);1424 }1425 else if (prod instanceof gast_public_1.gast.RepetitionMandatoryWithSeparator) {1426 var newDef = [1427 new gast_public_1.gast.Flat(prod.definition),1428 new gast_public_1.gast.Repetition([new gast_public_1.gast.Terminal(prod.separator)].concat(prod.definition))1429 ];1430 return getAlternativesForProd(newDef);1431 }1432 else if (prod instanceof gast_public_1.gast.RepetitionWithSeparator) {1433 var newDef = prod.definition.concat([1434 new gast_public_1.gast.Repetition([new gast_public_1.gast.Terminal(prod.separator)].concat(prod.definition))1435 ]);1436 result = getAlternativesForProd(newDef);1437 }1438 else if (prod instanceof gast_public_1.gast.Repetition) {1439 result = getAlternativesForProd(prod.definition);1440 }1441 else if (prod instanceof gast_public_1.gast.Alternation) {1442 utils_1.forEach(prod.definition, function (currAlt) {1443 result = getAlternativesForProd(currAlt.definition);1444 });1445 return result;1446 }1447 else if (prod instanceof gast_public_1.gast.Terminal) {1448 currPath.push(prod.terminalType);1449 }1450 else {1451 /* istanbul ignore next */1452 throw Error("non exhaustive match");1453 }1454 i++;1455 }1456 result.push({1457 partialPath: currPath,1458 suffixDef: utils_1.drop(targetDef, i)1459 });1460 return result;1461}1462exports.possiblePathsFrom = possiblePathsFrom;1463function nextPossibleTokensAfter(initialDef, tokenVector, tokMatcher, maxLookAhead) {1464 var EXIT_NON_TERMINAL = "EXIT_NONE_TERMINAL";1465 // to avoid creating a new Array each time.1466 var EXIT_NON_TERMINAL_ARR = [EXIT_NON_TERMINAL];1467 var EXIT_ALTERNATIVE = "EXIT_ALTERNATIVE";1468 var foundCompletePath = false;1469 var tokenVectorLength = tokenVector.length;1470 var minimalAlternativesIndex = tokenVectorLength - maxLookAhead - 1;1471 var result = [];1472 var possiblePaths = [];1473 possiblePaths.push({1474 idx: -1,1475 def: initialDef,1476 ruleStack: [],1477 occurrenceStack: []1478 });1479 while (!utils_1.isEmpty(possiblePaths)) {1480 var currPath = possiblePaths.pop();1481 // skip alternatives if no more results can be found (assuming deterministic grammar with fixed lookahead)1482 if (currPath === EXIT_ALTERNATIVE) {1483 if (foundCompletePath &&1484 utils_1.last(possiblePaths).idx <= minimalAlternativesIndex) {1485 // remove irrelevant alternative1486 possiblePaths.pop();1487 }1488 continue;1489 }1490 var currDef = currPath.def;1491 var currIdx = currPath.idx;1492 var currRuleStack = currPath.ruleStack;1493 var currOccurrenceStack = currPath.occurrenceStack;1494 // For Example: an empty path could exist in a valid grammar in the case of an EMPTY_ALT1495 if (utils_1.isEmpty(currDef)) {1496 continue;1497 }1498 var prod = currDef[0];1499 if (prod === EXIT_NON_TERMINAL) {1500 var nextPath = {1501 idx: currIdx,1502 def: utils_1.drop(currDef),1503 ruleStack: utils_1.dropRight(currRuleStack),1504 occurrenceStack: utils_1.dropRight(currOccurrenceStack)1505 };1506 possiblePaths.push(nextPath);1507 }1508 else if (prod instanceof gast_public_1.gast.Terminal) {1509 if (currIdx < tokenVectorLength - 1) {1510 var nextIdx = currIdx + 1;1511 var actualToken = tokenVector[nextIdx];1512 if (tokMatcher(actualToken, prod.terminalType)) {1513 var nextPath = {1514 idx: nextIdx,1515 def: utils_1.drop(currDef),1516 ruleStack: currRuleStack,1517 occurrenceStack: currOccurrenceStack1518 };1519 possiblePaths.push(nextPath);1520 }1521 // end of the line1522 }1523 else if (currIdx === tokenVectorLength - 1) {1524 // IGNORE ABOVE ELSE1525 result.push({1526 nextTokenType: prod.terminalType,1527 nextTokenOccurrence: prod.occurrenceInParent,1528 ruleStack: currRuleStack,1529 occurrenceStack: currOccurrenceStack1530 });1531 foundCompletePath = true;1532 }1533 else {1534 /* istanbul ignore next */1535 throw Error("non exhaustive match");1536 }1537 }1538 else if (prod instanceof gast_public_1.gast.NonTerminal) {1539 var newRuleStack = utils_1.cloneArr(currRuleStack);1540 newRuleStack.push(prod.nonTerminalName);1541 var newOccurrenceStack = utils_1.cloneArr(currOccurrenceStack);1542 newOccurrenceStack.push(prod.occurrenceInParent);1543 var nextPath = {1544 idx: currIdx,1545 def: prod.definition.concat(EXIT_NON_TERMINAL_ARR, utils_1.drop(currDef)),1546 ruleStack: newRuleStack,1547 occurrenceStack: newOccurrenceStack1548 };1549 possiblePaths.push(nextPath);1550 }1551 else if (prod instanceof gast_public_1.gast.Option) {1552 // the order of alternatives is meaningful, FILO (Last path will be traversed first).1553 var nextPathWithout = {1554 idx: currIdx,1555 def: utils_1.drop(currDef),1556 ruleStack: currRuleStack,1557 occurrenceStack: currOccurrenceStack1558 };1559 possiblePaths.push(nextPathWithout);1560 // required marker to avoid backtracking paths whose higher priority alternatives already matched1561 possiblePaths.push(EXIT_ALTERNATIVE);1562 var nextPathWith = {1563 idx: currIdx,1564 def: prod.definition.concat(utils_1.drop(currDef)),1565 ruleStack: currRuleStack,1566 occurrenceStack: currOccurrenceStack1567 };1568 possiblePaths.push(nextPathWith);1569 }1570 else if (prod instanceof gast_public_1.gast.RepetitionMandatory) {1571 // TODO:(THE NEW operators here take a while...) (convert once?)1572 var secondIteration = new gast_public_1.gast.Repetition(prod.definition, prod.occurrenceInParent);1573 var nextDef = prod.definition.concat([secondIteration], utils_1.drop(currDef));1574 var nextPath = {1575 idx: currIdx,1576 def: nextDef,1577 ruleStack: currRuleStack,1578 occurrenceStack: currOccurrenceStack1579 };1580 possiblePaths.push(nextPath);1581 }1582 else if (prod instanceof gast_public_1.gast.RepetitionMandatoryWithSeparator) {1583 // TODO:(THE NEW operators here take a while...) (convert once?)1584 var separatorGast = new gast_public_1.gast.Terminal(prod.separator);1585 var secondIteration = new gast_public_1.gast.Repetition([separatorGast].concat(prod.definition), prod.occurrenceInParent);1586 var nextDef = prod.definition.concat([secondIteration], utils_1.drop(currDef));1587 var nextPath = {1588 idx: currIdx,1589 def: nextDef,1590 ruleStack: currRuleStack,1591 occurrenceStack: currOccurrenceStack1592 };1593 possiblePaths.push(nextPath);1594 }1595 else if (prod instanceof gast_public_1.gast.RepetitionWithSeparator) {1596 // the order of alternatives is meaningful, FILO (Last path will be traversed first).1597 var nextPathWithout = {1598 idx: currIdx,1599 def: utils_1.drop(currDef),1600 ruleStack: currRuleStack,1601 occurrenceStack: currOccurrenceStack1602 };1603 possiblePaths.push(nextPathWithout);1604 // required marker to avoid backtracking paths whose higher priority alternatives already matched1605 possiblePaths.push(EXIT_ALTERNATIVE);1606 var separatorGast = new gast_public_1.gast.Terminal(prod.separator);1607 var nthRepetition = new gast_public_1.gast.Repetition([separatorGast].concat(prod.definition), prod.occurrenceInParent);1608 var nextDef = prod.definition.concat([nthRepetition], utils_1.drop(currDef));1609 var nextPathWith = {1610 idx: currIdx,1611 def: nextDef,1612 ruleStack: currRuleStack,1613 occurrenceStack: currOccurrenceStack1614 };1615 possiblePaths.push(nextPathWith);1616 }1617 else if (prod instanceof gast_public_1.gast.Repetition) {1618 // the order of alternatives is meaningful, FILO (Last path will be traversed first).1619 var nextPathWithout = {1620 idx: currIdx,1621 def: utils_1.drop(currDef),1622 ruleStack: currRuleStack,1623 occurrenceStack: currOccurrenceStack1624 };1625 possiblePaths.push(nextPathWithout);1626 // required marker to avoid backtracking paths whose higher priority alternatives already matched1627 possiblePaths.push(EXIT_ALTERNATIVE);1628 // TODO: an empty repetition will cause infinite loops here, will the parser detect this in selfAnalysis?1629 var nthRepetition = new gast_public_1.gast.Repetition(prod.definition, prod.occurrenceInParent);1630 var nextDef = prod.definition.concat([nthRepetition], utils_1.drop(currDef));1631 var nextPathWith = {1632 idx: currIdx,1633 def: nextDef,1634 ruleStack: currRuleStack,1635 occurrenceStack: currOccurrenceStack1636 };1637 possiblePaths.push(nextPathWith);1638 }1639 else if (prod instanceof gast_public_1.gast.Alternation) {1640 // the order of alternatives is meaningful, FILO (Last path will be traversed first).1641 for (var i = prod.definition.length - 1; i >= 0; i--) {1642 var currAlt = prod.definition[i];1643 var currAltPath = {1644 idx: currIdx,1645 def: currAlt.definition.concat(utils_1.drop(currDef)),1646 ruleStack: currRuleStack,1647 occurrenceStack: currOccurrenceStack1648 };1649 possiblePaths.push(currAltPath);1650 possiblePaths.push(EXIT_ALTERNATIVE);1651 }1652 }1653 else if (prod instanceof gast_public_1.gast.Flat) {1654 possiblePaths.push({1655 idx: currIdx,1656 def: prod.definition.concat(utils_1.drop(currDef)),1657 ruleStack: currRuleStack,1658 occurrenceStack: currOccurrenceStack1659 });1660 }1661 else if (prod instanceof gast_public_1.gast.Rule) {1662 // last because we should only encounter at most a single one of these per invocation.1663 possiblePaths.push(expandTopLevelRule(prod, currIdx, currRuleStack, currOccurrenceStack));1664 }1665 else {1666 /* istanbul ignore next */1667 throw Error("non exhaustive match");1668 }1669 }1670 return result;1671}1672exports.nextPossibleTokensAfter = nextPossibleTokensAfter;1673function expandTopLevelRule(topRule, currIdx, currRuleStack, currOccurrenceStack) {1674 var newRuleStack = utils_1.cloneArr(currRuleStack);1675 newRuleStack.push(topRule.name);1676 var newCurrOccurrenceStack = utils_1.cloneArr(currOccurrenceStack);1677 // top rule is always assumed to have been called with occurrence index 11678 newCurrOccurrenceStack.push(1);1679 return {1680 idx: currIdx,1681 def: topRule.definition,1682 ruleStack: newRuleStack,1683 occurrenceStack: newCurrOccurrenceStack1684 };1685}1686//# sourceMappingURL=interpreter.js.map1687/***/ }),1688/* 6 */1689/***/ (function(module, exports, __webpack_require__) {1690"use strict";1691Object.defineProperty(exports, "__esModule", { value: true });1692var cache = __webpack_require__(7);1693var cache_1 = __webpack_require__(7);1694var exceptions_public_1 = __webpack_require__(12);1695var lang_extensions_1 = __webpack_require__(3);1696var resolver_1 = __webpack_require__(21);1697var checks_1 = __webpack_require__(13);1698var utils_1 = __webpack_require__(0);1699var follow_1 = __webpack_require__(23);1700var tokens_public_1 = __webpack_require__(2);1701var lookahead_1 = __webpack_require__(14);1702var gast_builder_1 = __webpack_require__(24);1703var interpreter_1 = __webpack_require__(5);1704var constants_1 = __webpack_require__(18);1705var gast_public_1 = __webpack_require__(1);1706var gast_1 = __webpack_require__(9);1707var tokens_1 = __webpack_require__(4);1708var cst_1 = __webpack_require__(16);1709var keys_1 = __webpack_require__(17);1710var cst_visitor_1 = __webpack_require__(26);1711var errors_public_1 = __webpack_require__(19);1712var serializeGrammar = gast_public_1.gast.serializeGrammar;1713var ParserDefinitionErrorType;1714(function (ParserDefinitionErrorType) {1715 ParserDefinitionErrorType[ParserDefinitionErrorType["INVALID_RULE_NAME"] = 0] = "INVALID_RULE_NAME";1716 ParserDefinitionErrorType[ParserDefinitionErrorType["DUPLICATE_RULE_NAME"] = 1] = "DUPLICATE_RULE_NAME";1717 ParserDefinitionErrorType[ParserDefinitionErrorType["INVALID_RULE_OVERRIDE"] = 2] = "INVALID_RULE_OVERRIDE";1718 ParserDefinitionErrorType[ParserDefinitionErrorType["DUPLICATE_PRODUCTIONS"] = 3] = "DUPLICATE_PRODUCTIONS";1719 ParserDefinitionErrorType[ParserDefinitionErrorType["UNRESOLVED_SUBRULE_REF"] = 4] = "UNRESOLVED_SUBRULE_REF";1720 ParserDefinitionErrorType[ParserDefinitionErrorType["LEFT_RECURSION"] = 5] = "LEFT_RECURSION";1721 ParserDefinitionErrorType[ParserDefinitionErrorType["NONE_LAST_EMPTY_ALT"] = 6] = "NONE_LAST_EMPTY_ALT";1722 ParserDefinitionErrorType[ParserDefinitionErrorType["AMBIGUOUS_ALTS"] = 7] = "AMBIGUOUS_ALTS";1723 ParserDefinitionErrorType[ParserDefinitionErrorType["CONFLICT_TOKENS_RULES_NAMESPACE"] = 8] = "CONFLICT_TOKENS_RULES_NAMESPACE";1724 ParserDefinitionErrorType[ParserDefinitionErrorType["INVALID_TOKEN_NAME"] = 9] = "INVALID_TOKEN_NAME";1725 ParserDefinitionErrorType[ParserDefinitionErrorType["INVALID_NESTED_RULE_NAME"] = 10] = "INVALID_NESTED_RULE_NAME";1726 ParserDefinitionErrorType[ParserDefinitionErrorType["DUPLICATE_NESTED_NAME"] = 11] = "DUPLICATE_NESTED_NAME";1727 ParserDefinitionErrorType[ParserDefinitionErrorType["NO_NON_EMPTY_LOOKAHEAD"] = 12] = "NO_NON_EMPTY_LOOKAHEAD";1728 ParserDefinitionErrorType[ParserDefinitionErrorType["AMBIGUOUS_PREFIX_ALTS"] = 13] = "AMBIGUOUS_PREFIX_ALTS";1729 ParserDefinitionErrorType[ParserDefinitionErrorType["TOO_MANY_ALTS"] = 14] = "TOO_MANY_ALTS";1730})(ParserDefinitionErrorType = exports.ParserDefinitionErrorType || (exports.ParserDefinitionErrorType = {}));1731var IN_RULE_RECOVERY_EXCEPTION = "InRuleRecoveryException";1732exports.END_OF_FILE = tokens_public_1.createTokenInstance(tokens_public_1.EOF, "", NaN, NaN, NaN, NaN, NaN, NaN);1733Object.freeze(exports.END_OF_FILE);1734var DEFAULT_PARSER_CONFIG = Object.freeze({1735 recoveryEnabled: false,1736 maxLookahead: 4,1737 ignoredIssues: {},1738 dynamicTokensEnabled: false,1739 // TODO: Document this breaking change, can it be mitigated?1740 // TODO: change to true1741 outputCst: false,1742 errorMessageProvider: errors_public_1.defaultErrorProvider1743});1744var DEFAULT_RULE_CONFIG = Object.freeze({1745 recoveryValueFunc: function () { return undefined; },1746 resyncEnabled: true1747});1748/**1749 * Convenience used to express an empty alternative in an OR (alternation).1750 * can be used to more clearly describe the intent in a case of empty alternation.1751 *1752 * For example:1753 *1754 * 1. without using EMPTY_ALT:1755 *1756 * this.OR([1757 * {ALT: () => {1758 * this.CONSUME1(OneTok)1759 * return "1"1760 * }},1761 * {ALT: () => {1762 * this.CONSUME1(TwoTok)1763 * return "2"1764 * }},1765 * {ALT: () => { // implicitly empty because there are no invoked grammar rules (OR/MANY/CONSUME...) inside this alternative.1766 * return "666"1767 * }},1768 * ])1769 *1770 *1771 * 2. using EMPTY_ALT:1772 *1773 * this.OR([1774 * {ALT: () => {1775 * this.CONSUME1(OneTok)1776 * return "1"1777 * }},1778 * {ALT: () => {1779 * this.CONSUME1(TwoTok)1780 * return "2"1781 * }},1782 * {ALT: EMPTY_ALT("666")}, // explicitly empty, clearer intent1783 * ])1784 *1785 */1786function EMPTY_ALT(value) {1787 if (value === void 0) { value = undefined; }1788 return function () {1789 return value;1790 };1791}1792exports.EMPTY_ALT = EMPTY_ALT;1793var EOF_FOLLOW_KEY = {};1794/**1795 * A Recognizer capable of self analysis to determine it's grammar structure1796 * This is used for more advanced features requiring such information.1797 * For example: Error Recovery, Automatic lookahead calculation.1798 */1799var Parser = /** @class */ (function () {1800 function Parser(input, tokensDictionary, config) {1801 if (config === void 0) { config = DEFAULT_PARSER_CONFIG; }1802 this._errors = [];1803 this.isBackTrackingStack = [];1804 this.RULE_STACK = [];1805 this.RULE_OCCURRENCE_STACK = [];1806 this.CST_STACK = [];1807 this.tokensMap = undefined;1808 this.definedRulesNames = [];1809 this.shortRuleNameToFull = new lang_extensions_1.HashTable();1810 this.fullRuleNameToShort = new lang_extensions_1.HashTable();1811 // The shortName Index must be coded "after" the first 8bits to enable building unique lookahead keys1812 this.ruleShortNameIdx = 256;1813 this.LAST_EXPLICIT_RULE_STACK = [];1814 this.selfAnalysisDone = false;1815 this.currIdx = -1;1816 /**1817 * Only used internally for storing productions as they are built for the first time.1818 * The final productions should be accessed from the static cache.1819 */1820 this._productions = new lang_extensions_1.HashTable();1821 this.input = input;1822 // configuration1823 this.recoveryEnabled = utils_1.has(config, "recoveryEnabled")1824 ? config.recoveryEnabled1825 : DEFAULT_PARSER_CONFIG.recoveryEnabled;1826 // performance optimization, NOOP will be inlined which1827 // effectively means that this optional feature does not exist1828 // when not used.1829 if (!this.recoveryEnabled) {1830 this.attemptInRepetitionRecovery = utils_1.NOOP;1831 }1832 this.dynamicTokensEnabled = utils_1.has(config, "dynamicTokensEnabled")1833 ? config.dynamicTokensEnabled1834 : DEFAULT_PARSER_CONFIG.dynamicTokensEnabled;1835 this.maxLookahead = utils_1.has(config, "maxLookahead")1836 ? config.maxLookahead1837 : DEFAULT_PARSER_CONFIG.maxLookahead;1838 this.ignoredIssues = utils_1.has(config, "ignoredIssues")1839 ? config.ignoredIssues1840 : DEFAULT_PARSER_CONFIG.ignoredIssues;1841 this.outputCst = utils_1.has(config, "outputCst")1842 ? config.outputCst1843 : DEFAULT_PARSER_CONFIG.outputCst;1844 this.errorMessageProvider = utils_1.defaults(config.errorMessageProvider, DEFAULT_PARSER_CONFIG.errorMessageProvider);1845 if (!this.outputCst) {1846 this.cstInvocationStateUpdate = utils_1.NOOP;1847 this.cstFinallyStateUpdate = utils_1.NOOP;1848 this.cstPostTerminal = utils_1.NOOP;1849 this.cstPostNonTerminal = utils_1.NOOP;1850 this.getLastExplicitRuleShortName = this.getLastExplicitRuleShortNameNoCst;1851 this.getPreviousExplicitRuleShortName = this.getPreviousExplicitRuleShortNameNoCst;1852 this.getPreviousExplicitRuleOccurenceIndex = this.getPreviousExplicitRuleOccurenceIndexNoCst;1853 this.manyInternal = this.manyInternalNoCst;1854 this.orInternal = this.orInternalNoCst;1855 this.optionInternal = this.optionInternalNoCst;1856 this.atLeastOneInternal = this.atLeastOneInternalNoCst;1857 this.manySepFirstInternal = this.manySepFirstInternalNoCst;1858 this.atLeastOneSepFirstInternal = this.atLeastOneSepFirstInternalNoCst;1859 }1860 this.className = lang_extensions_1.classNameFromInstance(this);1861 this.firstAfterRepMap = cache.getFirstAfterRepForClass(this.className);1862 this.classLAFuncs = cache.getLookaheadFuncsForClass(this.className);1863 this.cstDictDefForRule = cache.getCstDictDefPerRuleForClass(this.className);1864 if (!cache.CLASS_TO_DEFINITION_ERRORS.containsKey(this.className)) {1865 this.definitionErrors = [];1866 cache.CLASS_TO_DEFINITION_ERRORS.put(this.className, this.definitionErrors);1867 }1868 else {1869 this.definitionErrors = cache.CLASS_TO_DEFINITION_ERRORS.get(this.className);1870 }1871 if (utils_1.isArray(tokensDictionary)) {1872 this.tokensMap = utils_1.reduce(tokensDictionary, function (acc, tokenClazz) {1873 acc[tokens_public_1.tokenName(tokenClazz)] = tokenClazz;1874 return acc;1875 }, {});1876 }1877 else if (utils_1.has(tokensDictionary, "modes") &&1878 utils_1.every(utils_1.flatten(utils_1.values(tokensDictionary.modes)), tokens_1.isTokenType)) {1879 var allTokenTypes = utils_1.flatten(utils_1.values(tokensDictionary.modes));1880 var uniqueTokens = utils_1.uniq(allTokenTypes);1881 this.tokensMap = utils_1.reduce(uniqueTokens, function (acc, tokenClazz) {1882 acc[tokens_public_1.tokenName(tokenClazz)] = tokenClazz;1883 return acc;1884 }, {});1885 }1886 else if (utils_1.isObject(tokensDictionary)) {1887 this.tokensMap = utils_1.cloneObj(tokensDictionary);1888 }1889 else {1890 throw new Error("<tokensDictionary> argument must be An Array of Token constructors" +1891 " A dictionary of Token constructors or an IMultiModeLexerDefinition");1892 }1893 var noTokenCategoriesUsed = utils_1.every(utils_1.values(tokensDictionary), function (tokenConstructor) { return utils_1.isEmpty(tokenConstructor.categoryMatches); });1894 this.tokenMatcher = noTokenCategoriesUsed1895 ? tokens_1.tokenStructuredMatcherNoCategories1896 : tokens_1.tokenStructuredMatcher;1897 // always add EOF to the tokenNames -> constructors map. it is useful to assure all the input has been1898 // parsed with a clear error message ("expecting EOF but found ...")1899 /* tslint:disable */1900 this.tokensMap["EOF"] = tokens_public_1.EOF;1901 /* tslint:enable */1902 // Because ES2015+ syntax should be supported for creating Token classes1903 // We cannot assume that the Token classes were created using the "extendToken" utilities1904 // Therefore we must augment the Token classes both on Lexer initialization and on Parser initialization1905 tokens_1.augmentTokenTypes(utils_1.values(this.tokensMap));1906 }1907 Parser.performSelfAnalysis = function (parserInstance) {1908 var definitionErrors = [];1909 var defErrorsMsgs;1910 parserInstance.selfAnalysisDone = true;1911 var className = lang_extensions_1.classNameFromInstance(parserInstance);1912 // can't test this with nyc tool, instrumentation causes the class name to be not empty.1913 /* istanbul ignore if */1914 if (className === "") {1915 // just a simple "throw Error" without any fancy "definition error" because the logic below relies on a unique parser name to1916 // save/access those definition errors...1917 /* istanbul ignore next */1918 throw Error("A Parser's constructor may not be an anonymous Function, it must be a named function\n" +1919 "The constructor's name is used at runtime for performance (caching) purposes.");1920 }1921 // this information should only be computed once1922 if (!cache.CLASS_TO_SELF_ANALYSIS_DONE.containsKey(className)) {1923 cache.CLASS_TO_SELF_ANALYSIS_DONE.put(className, true);1924 var orgProductions_1 = parserInstance._productions;1925 var clonedProductions_1 = new lang_extensions_1.HashTable();1926 // clone the grammar productions to support grammar inheritance. requirements:1927 // 1. We want to avoid rebuilding the grammar every time so a cache for the productions is used.1928 // 2. We need to collect the production from multiple grammars in an inheritance scenario during constructor invocation1929 // so the myGast variable is used.1930 // 3. If a Production has been overridden references to it in the GAST must also be updated.1931 utils_1.forEach(orgProductions_1.keys(), function (key) {1932 var value = orgProductions_1.get(key);1933 clonedProductions_1.put(key, gast_1.cloneProduction(value));1934 });1935 cache.getProductionsForClass(className).putAll(clonedProductions_1);1936 // assumes this cache has been initialized (in the relevant parser's constructor)1937 // TODO: consider making the self analysis a member method to resolve this.1938 // that way it won't be callable before the constructor has been invoked...1939 definitionErrors = cache.CLASS_TO_DEFINITION_ERRORS.get(className);1940 var resolverErrors = resolver_1.resolveGrammar(clonedProductions_1);1941 definitionErrors.push.apply(definitionErrors, resolverErrors); // mutability for the win?1942 // only perform additional grammar validations IFF no resolving errors have occurred.1943 // as unresolved grammar may lead to unhandled runtime exceptions in the follow up validations.1944 if (utils_1.isEmpty(resolverErrors)) {1945 var validationErrors = checks_1.validateGrammar(clonedProductions_1.values(), parserInstance.maxLookahead, utils_1.values(parserInstance.tokensMap), parserInstance.ignoredIssues);1946 definitionErrors.push.apply(definitionErrors, validationErrors); // mutability for the win?1947 }1948 if (!utils_1.isEmpty(definitionErrors) &&1949 !Parser.DEFER_DEFINITION_ERRORS_HANDLING) {1950 defErrorsMsgs = utils_1.map(definitionErrors, function (defError) { return defError.message; });1951 throw new Error("Parser Definition Errors detected\n: " + defErrorsMsgs.join("\n-------------------------------\n"));1952 }1953 if (utils_1.isEmpty(definitionErrors)) {1954 // this analysis may fail if the grammar is not perfectly valid1955 var allFollows = follow_1.computeAllProdsFollows(clonedProductions_1.values());1956 cache.setResyncFollowsForClass(className, allFollows);1957 }1958 var cstAnalysisResult = cst_1.analyzeCst(clonedProductions_1.values(), parserInstance.fullRuleNameToShort);1959 cache1960 .getCstDictDefPerRuleForClass(className)1961 .putAll(cstAnalysisResult.dictDef);1962 cache.CLASS_TO_ALL_RULE_NAMES.put(className, cstAnalysisResult.allRuleNames);1963 }1964 // reThrow the validation errors each time an erroneous parser is instantiated1965 if (!utils_1.isEmpty(cache.CLASS_TO_DEFINITION_ERRORS.get(className)) &&1966 !Parser.DEFER_DEFINITION_ERRORS_HANDLING) {1967 defErrorsMsgs = utils_1.map(cache.CLASS_TO_DEFINITION_ERRORS.get(className), function (defError) { return defError.message; });1968 throw new Error("Parser Definition Errors detected\n: " + defErrorsMsgs.join("\n-------------------------------\n"));1969 }1970 };1971 Object.defineProperty(Parser.prototype, "errors", {1972 get: function () {1973 return utils_1.cloneArr(this._errors);1974 },1975 set: function (newErrors) {1976 this._errors = newErrors;1977 },1978 enumerable: true,1979 configurable: true1980 });1981 /**1982 * Resets the parser state, should be overridden for custom parsers which "carry" additional state.1983 * When overriding, remember to also invoke the super implementation!1984 */1985 Parser.prototype.reset = function () {1986 this.resetLexerState();1987 this.isBackTrackingStack = [];1988 this.errors = [];1989 this.RULE_STACK = [];1990 this.LAST_EXPLICIT_RULE_STACK = [];1991 this.CST_STACK = [];1992 this.RULE_OCCURRENCE_STACK = [];1993 };1994 Parser.prototype.isAtEndOfInput = function () {1995 return this.tokenMatcher(this.LA(1), tokens_public_1.EOF);1996 };1997 Parser.prototype.getBaseCstVisitorConstructor = function () {1998 var cachedConstructor = cache_1.CLASS_TO_BASE_CST_VISITOR.get(this.className);1999 if (utils_1.isUndefined(cachedConstructor)) {2000 var allRuleNames = cache_1.CLASS_TO_ALL_RULE_NAMES.get(this.className);2001 cachedConstructor = cst_visitor_1.createBaseSemanticVisitorConstructor(this.className, allRuleNames);2002 cache_1.CLASS_TO_BASE_CST_VISITOR.put(this.className, cachedConstructor);2003 }2004 return cachedConstructor;2005 };2006 Parser.prototype.getBaseCstVisitorConstructorWithDefaults = function () {2007 var cachedConstructor = cache_1.CLASS_TO_BASE_CST_VISITOR_WITH_DEFAULTS.get(this.className);2008 if (utils_1.isUndefined(cachedConstructor)) {2009 var allRuleNames = cache_1.CLASS_TO_ALL_RULE_NAMES.get(this.className);2010 var baseConstructor = this.getBaseCstVisitorConstructor();2011 cachedConstructor = cst_visitor_1.createBaseVisitorConstructorWithDefaults(this.className, allRuleNames, baseConstructor);2012 cache_1.CLASS_TO_BASE_CST_VISITOR_WITH_DEFAULTS.put(this.className, cachedConstructor);2013 }2014 return cachedConstructor;2015 };2016 Parser.prototype.getGAstProductions = function () {2017 return cache.getProductionsForClass(this.className);2018 };2019 // This is more than a convenience method.2020 // It is mostly used to draw the diagrams and having this method present on the parser instance2021 // can avoid certain situations in which the serialization logic would fail due to multiple versions of chevrotain2022 // bundled (due to multiple prototype chains and "instanceof" usage).2023 Parser.prototype.getSerializedGastProductions = function () {2024 return serializeGrammar(cache.getProductionsForClass(this.className).values());2025 };2026 /**2027 * @param startRuleName {string}2028 * @param precedingInput {IToken[]} - The token vector up to (not including) the content assist point2029 * @returns {ISyntacticContentAssistPath[]}2030 */2031 Parser.prototype.computeContentAssist = function (startRuleName, precedingInput) {2032 var startRuleGast = cache2033 .getProductionsForClass(this.className)2034 .get(startRuleName);2035 if (utils_1.isUndefined(startRuleGast)) {2036 throw Error("Rule ->" + startRuleName + "<- does not exist in this grammar.");2037 }2038 return interpreter_1.nextPossibleTokensAfter([startRuleGast], precedingInput, this.tokenMatcher, this.maxLookahead);2039 };2040 Parser.prototype.isBackTracking = function () {2041 return !utils_1.isEmpty(this.isBackTrackingStack);2042 };2043 Parser.prototype.getCurrRuleFullName = function () {2044 var shortName = this.getLastExplicitRuleShortName();2045 return this.shortRuleNameToFull.get(shortName);2046 };2047 Parser.prototype.shortRuleNameToFullName = function (shortName) {2048 return this.shortRuleNameToFull.get(shortName);2049 };2050 Parser.prototype.getHumanReadableRuleStack = function () {2051 var _this = this;2052 if (!utils_1.isEmpty(this.LAST_EXPLICIT_RULE_STACK)) {2053 return utils_1.map(this.LAST_EXPLICIT_RULE_STACK, function (currIdx) {2054 return _this.shortRuleNameToFullName(_this.RULE_STACK[currIdx]);2055 });2056 }2057 else {2058 return utils_1.map(this.RULE_STACK, function (currShortName) {2059 return _this.shortRuleNameToFullName(currShortName);2060 });2061 }2062 };2063 Parser.prototype.SAVE_ERROR = function (error) {2064 if (exceptions_public_1.exceptions.isRecognitionException(error)) {2065 error.context = {2066 ruleStack: this.getHumanReadableRuleStack(),2067 ruleOccurrenceStack: utils_1.cloneArr(this.RULE_OCCURRENCE_STACK)2068 };2069 this._errors.push(error);2070 return error;2071 }2072 else {2073 throw Error("Trying to save an Error which is not a RecognitionException");2074 }2075 };2076 /**2077 * @param grammarRule - The rule to try and parse in backtracking mode.2078 * @param args - argumens to be passed to the grammar rule execution2079 *2080 * @return {TokenType():boolean} a lookahead function that will try to parse the given grammarRule and will return true if succeed.2081 */2082 Parser.prototype.BACKTRACK = function (grammarRule, args) {2083 return function () {2084 // save org state2085 this.isBackTrackingStack.push(1);2086 var orgState = this.saveRecogState();2087 try {2088 grammarRule.apply(this, args);2089 // if no exception was thrown we have succeed parsing the rule.2090 return true;2091 }2092 catch (e) {2093 if (exceptions_public_1.exceptions.isRecognitionException(e)) {2094 return false;2095 }2096 else {2097 throw e;2098 }2099 }2100 finally {2101 this.reloadRecogState(orgState);2102 this.isBackTrackingStack.pop();2103 }2104 };2105 };2106 // Parsing DSL2107 /**2108 * Convenience method equivalent to CONSUME1.2109 * @see CONSUME12110 */2111 Parser.prototype.CONSUME = function (tokType, options) {2112 return this.CONSUME1(tokType, options);2113 };2114 /**2115 *2116 * A Parsing DSL method use to consume a single terminal Token.2117 * a Token will be consumed, IFF the next token in the token vector matches <tokType>.2118 * otherwise the parser will attempt to perform error recovery.2119 *2120 * The index in the method name indicates the unique occurrence of a terminal consumption2121 * inside a the top level rule. What this means is that if a terminal appears2122 * more than once in a single rule, each appearance must have a difference index.2123 *2124 * for example:2125 *2126 * function parseQualifiedName() {2127 * this.CONSUME1(Identifier);2128 * this.MANY(()=> {2129 * this.CONSUME1(Dot);2130 * this.CONSUME2(Identifier); // <-- here we use CONSUME2 because the terminal2131 * }); // 'Identifier' has already appeared previously in the2132 * // the rule 'parseQualifiedName'2133 * }2134 *2135 * @param {TokenType} tokType - The Type of the token to be consumed.2136 * @param options - optional properties to modify the behavior of CONSUME.2137 */2138 Parser.prototype.CONSUME1 = function (tokType, options) {2139 return this.consumeInternal(tokType, 1, options);2140 };2141 /**2142 * @see CONSUME12143 */2144 Parser.prototype.CONSUME2 = function (tokType, options) {2145 return this.consumeInternal(tokType, 2, options);2146 };2147 /**2148 * @see CONSUME12149 */2150 Parser.prototype.CONSUME3 = function (tokType, options) {2151 return this.consumeInternal(tokType, 3, options);2152 };2153 /**2154 * @see CONSUME12155 */2156 Parser.prototype.CONSUME4 = function (tokType, options) {2157 return this.consumeInternal(tokType, 4, options);2158 };2159 /**2160 * @see CONSUME12161 */2162 Parser.prototype.CONSUME5 = function (tokType, options) {2163 return this.consumeInternal(tokType, 5, options);2164 };2165 /**2166 * Convenience method equivalent to SUBRULE12167 * @see SUBRULE12168 */2169 Parser.prototype.SUBRULE = function (ruleToCall, args) {2170 if (args === void 0) { args = undefined; }2171 return this.subruleInternal(ruleToCall, 1, args);2172 };2173 /**2174 * The Parsing DSL Method is used by one rule to call another.2175 *2176 * This may seem redundant as it does not actually do much.2177 * However using it is mandatory for all sub rule invocations.2178 * calling another rule without wrapping in SUBRULE(...)2179 * will cause errors/mistakes in the Recognizer's self analysis,2180 * which will lead to errors in error recovery/automatic lookahead calculation2181 * and any other functionality relying on the Recognizer's self analysis2182 * output.2183 *2184 * As in CONSUME the index in the method name indicates the occurrence2185 * of the sub rule invocation in its rule.2186 *2187 * @param {TokenType} ruleToCall - The rule to invoke.2188 * @param {*[]} args - The arguments to pass to the invoked subrule.2189 * @returns {*} - The result of invoking ruleToCall.2190 */2191 Parser.prototype.SUBRULE1 = function (ruleToCall, args) {2192 if (args === void 0) { args = undefined; }2193 return this.subruleInternal(ruleToCall, 1, args);2194 };2195 /**2196 * @see SUBRULE12197 */2198 Parser.prototype.SUBRULE2 = function (ruleToCall, args) {2199 if (args === void 0) { args = undefined; }2200 return this.subruleInternal(ruleToCall, 2, args);2201 };2202 /**2203 * @see SUBRULE12204 */2205 Parser.prototype.SUBRULE3 = function (ruleToCall, args) {2206 if (args === void 0) { args = undefined; }2207 return this.subruleInternal(ruleToCall, 3, args);2208 };2209 /**2210 * @see SUBRULE12211 */2212 Parser.prototype.SUBRULE4 = function (ruleToCall, args) {2213 if (args === void 0) { args = undefined; }2214 return this.subruleInternal(ruleToCall, 4, args);2215 };2216 /**2217 * @see SUBRULE12218 */2219 Parser.prototype.SUBRULE5 = function (ruleToCall, args) {2220 if (args === void 0) { args = undefined; }2221 return this.subruleInternal(ruleToCall, 5, args);2222 };2223 /**2224 * Convenience method equivalent to OPTION1.2225 * @see OPTION12226 */2227 Parser.prototype.OPTION = function (actionORMethodDef) {2228 return this.OPTION1(actionORMethodDef);2229 };2230 /**2231 * Parsing DSL Method that Indicates an Optional production2232 * in EBNF notation: [...].2233 *2234 * Note that there are two syntax forms:2235 * - Passing the grammar action directly:2236 * this.OPTION(()=> {2237 * this.CONSUME(Digit)}2238 * );2239 *2240 * - using an "options" object:2241 * this.OPTION({2242 * GATE:predicateFunc,2243 * DEF: ()=>{2244 * this.CONSUME(Digit)2245 * }});2246 *2247 * The optional 'GATE' property in "options" object form can be used to add constraints2248 * to invoking the grammar action.2249 *2250 * As in CONSUME the index in the method name indicates the occurrence2251 * of the optional production in it's top rule.2252 *2253 * @param actionORMethodDef - The grammar action to optionally invoke once2254 * or an "OPTIONS" object describing the grammar action and optional properties.2255 *2256 * @returns {OUT}2257 */2258 Parser.prototype.OPTION1 = function (actionORMethodDef) {2259 return this.optionInternal(actionORMethodDef, 1);2260 };2261 /**2262 * @see OPTION12263 */2264 Parser.prototype.OPTION2 = function (actionORMethodDef) {2265 return this.optionInternal(actionORMethodDef, 2);2266 };2267 /**2268 * @see OPTION12269 */2270 Parser.prototype.OPTION3 = function (actionORMethodDef) {2271 return this.optionInternal(actionORMethodDef, 3);2272 };2273 /**2274 * @see OPTION12275 */2276 Parser.prototype.OPTION4 = function (actionORMethodDef) {2277 return this.optionInternal(actionORMethodDef, 4);2278 };2279 /**2280 * @see OPTION12281 */2282 Parser.prototype.OPTION5 = function (actionORMethodDef) {2283 return this.optionInternal(actionORMethodDef, 5);2284 };2285 /**2286 * Convenience method equivalent to OR1.2287 * @see OR12288 */2289 Parser.prototype.OR = function (altsOrOpts) {2290 return this.OR1(altsOrOpts);2291 };2292 /**2293 * Parsing DSL method that indicates a choice between a set of alternatives must be made.2294 * This is equivalent to EBNF alternation (A | B | C | D ...)2295 *2296 * There are a couple of syntax forms for the inner alternatives array.2297 *2298 * Passing alternatives array directly:2299 * this.OR([2300 * {ALT:()=>{this.CONSUME(One)}},2301 * {ALT:()=>{this.CONSUME(Two)}},2302 * {ALT:()=>{this.CONSUME(Three)}}2303 * ])2304 *2305 * Passing alternative array directly with predicates (GATE).2306 * this.OR([2307 * {GATE: predicateFunc1, ALT:()=>{this.CONSUME(One)}},2308 * {GATE: predicateFuncX, ALT:()=>{this.CONSUME(Two)}},2309 * {GATE: predicateFuncX, ALT:()=>{this.CONSUME(Three)}}2310 * ])2311 *2312 * These syntax forms can also be mixed:2313 * this.OR([2314 * {GATE: predicateFunc1, ALT:()=>{this.CONSUME(One)}},2315 * {ALT:()=>{this.CONSUME(Two)}},2316 * {ALT:()=>{this.CONSUME(Three)}}2317 * ])2318 *2319 * Additionally an "options" object may be used:2320 * this.OR({2321 * DEF:[2322 * {ALT:()=>{this.CONSUME(One)}},2323 * {ALT:()=>{this.CONSUME(Two)}},2324 * {ALT:()=>{this.CONSUME(Three)}}2325 * ],2326 * // OPTIONAL property2327 * ERR_MSG: "A Number"2328 * })2329 *2330 * The 'predicateFuncX' in the long form can be used to add constraints to choosing the alternative.2331 *2332 * As in CONSUME the index in the method name indicates the occurrence2333 * of the alternation production in it's top rule.2334 *2335 * @param altsOrOpts - A set of alternatives or an "OPTIONS" object describing the alternatives and optional properties.2336 *2337 * @returns {*} - The result of invoking the chosen alternative.2338 */2339 Parser.prototype.OR1 = function (altsOrOpts) {2340 return this.orInternal(altsOrOpts, 1);2341 };2342 /**2343 * @see OR12344 */2345 Parser.prototype.OR2 = function (altsOrOpts) {2346 return this.orInternal(altsOrOpts, 2);2347 };2348 /**2349 * @see OR12350 */2351 Parser.prototype.OR3 = function (altsOrOpts) {2352 return this.orInternal(altsOrOpts, 3);2353 };2354 /**2355 * @see OR12356 */2357 Parser.prototype.OR4 = function (altsOrOpts) {2358 return this.orInternal(altsOrOpts, 4);2359 };2360 /**2361 * @see OR12362 */2363 Parser.prototype.OR5 = function (altsOrOpts) {2364 return this.orInternal(altsOrOpts, 5);2365 };2366 /**2367 * Convenience method equivalent to MANY1.2368 * @see MANY12369 */2370 Parser.prototype.MANY = function (actionORMethodDef) {2371 return this.MANY1(actionORMethodDef);2372 };2373 /**2374 * Parsing DSL method, that indicates a repetition of zero or more.2375 * This is equivalent to EBNF repetition {...}.2376 *2377 * Note that there are two syntax forms:2378 * - Passing the grammar action directly:2379 * this.MANY(()=>{2380 * this.CONSUME(Comma)2381 * this.CONSUME(Digit)2382 * })2383 *2384 * - using an "options" object:2385 * this.MANY({2386 * GATE: predicateFunc,2387 * DEF: () => {2388 * this.CONSUME(Comma)2389 * this.CONSUME(Digit)2390 * }2391 * });2392 *2393 * The optional 'GATE' property in "options" object form can be used to add constraints2394 * to invoking the grammar action.2395 *2396 * As in CONSUME the index in the method name indicates the occurrence2397 * of the repetition production in it's top rule.2398 *2399 * @param {TokenType} actionORMethodDef - The grammar action to optionally invoke multiple times2400 * or an "OPTIONS" object describing the grammar action and optional properties.2401 *2402 * @returns {OUT[]}2403 */2404 Parser.prototype.MANY1 = function (actionORMethodDef) {2405 return this.manyInternal(1, actionORMethodDef, []);2406 };2407 /**2408 * @see MANY12409 */2410 Parser.prototype.MANY2 = function (actionORMethodDef) {2411 return this.manyInternal(2, actionORMethodDef, []);2412 };2413 /**2414 * @see MANY12415 */2416 Parser.prototype.MANY3 = function (actionORMethodDef) {2417 return this.manyInternal(3, actionORMethodDef, []);2418 };2419 /**2420 * @see MANY12421 */2422 Parser.prototype.MANY4 = function (actionORMethodDef) {2423 return this.manyInternal(4, actionORMethodDef, []);2424 };2425 /**2426 * @see MANY12427 */2428 Parser.prototype.MANY5 = function (actionORMethodDef) {2429 return this.manyInternal(5, actionORMethodDef, []);2430 };2431 /**2432 * Convenience method equivalent to MANY_SEP1.2433 * @see MANY_SEP12434 */2435 Parser.prototype.MANY_SEP = function (options) {2436 return this.MANY_SEP1(options);2437 };2438 /**2439 * Parsing DSL method, that indicates a repetition of zero or more with a separator2440 * Token between the repetitions.2441 *2442 * Example:2443 *2444 * this.MANY_SEP({2445 * SEP:Comma,2446 * DEF: () => {2447 * this.CONSUME(Number};2448 * ...2449 * );2450 * })2451 *2452 * Note that because this DSL method always requires more than one argument the options object is always required2453 * and it is not possible to use a shorter form like in the MANY DSL method.2454 *2455 * Note that for the purposes of deciding on whether or not another iteration exists2456 * Only a single Token is examined (The separator). Therefore if the grammar being implemented is2457 * so "crazy" to require multiple tokens to identify an item separator please use the more basic DSL methods2458 * to implement it.2459 *2460 * As in CONSUME the index in the method name indicates the occurrence2461 * of the repetition production in it's top rule.2462 *2463 * Note that due to current limitations in the implementation the "SEP" property must appear BEFORE the "DEF" property.2464 *2465 * @param options - An object defining the grammar of each iteration and the separator between iterations2466 *2467 * @return {ISeparatedIterationResult<OUT>}2468 */2469 Parser.prototype.MANY_SEP1 = function (options) {2470 return this.manySepFirstInternal(1, options, {2471 values: [],2472 separators: []2473 });2474 };2475 /**2476 * @see MANY_SEP12477 */2478 Parser.prototype.MANY_SEP2 = function (options) {2479 return this.manySepFirstInternal(2, options, {2480 values: [],2481 separators: []2482 });2483 };2484 /**2485 * @see MANY_SEP12486 */2487 Parser.prototype.MANY_SEP3 = function (options) {2488 return this.manySepFirstInternal(3, options, {2489 values: [],2490 separators: []2491 });2492 };2493 /**2494 * @see MANY_SEP12495 */2496 Parser.prototype.MANY_SEP4 = function (options) {2497 return this.manySepFirstInternal(4, options, {2498 values: [],2499 separators: []2500 });2501 };2502 /**2503 * @see MANY_SEP12504 */2505 Parser.prototype.MANY_SEP5 = function (options) {2506 return this.manySepFirstInternal(5, options, {2507 values: [],2508 separators: []2509 });2510 };2511 /**2512 * Convenience method equivalent to AT_LEAST_ONE1.2513 * @see AT_LEAST_ONE12514 */2515 Parser.prototype.AT_LEAST_ONE = function (actionORMethodDef) {2516 return this.AT_LEAST_ONE1(actionORMethodDef);2517 };2518 /**2519 * Convenience method, same as MANY but the repetition is of one or more.2520 * failing to match at least one repetition will result in a parsing error and2521 * cause a parsing error.2522 *2523 * @see MANY12524 *2525 * @param actionORMethodDef - The grammar action to optionally invoke multiple times2526 * or an "OPTIONS" object describing the grammar action and optional properties.2527 *2528 * @return {OUT[]}2529 */2530 Parser.prototype.AT_LEAST_ONE1 = function (actionORMethodDef) {2531 return this.atLeastOneInternal(1, actionORMethodDef, []);2532 };2533 /**2534 * @see AT_LEAST_ONE12535 */2536 Parser.prototype.AT_LEAST_ONE2 = function (actionORMethodDef) {2537 return this.atLeastOneInternal(2, actionORMethodDef, []);2538 };2539 /**2540 * @see AT_LEAST_ONE12541 */2542 Parser.prototype.AT_LEAST_ONE3 = function (actionORMethodDef) {2543 return this.atLeastOneInternal(3, actionORMethodDef, []);2544 };2545 /**2546 * @see AT_LEAST_ONE12547 */2548 Parser.prototype.AT_LEAST_ONE4 = function (actionORMethodDef) {2549 return this.atLeastOneInternal(4, actionORMethodDef, []);2550 };2551 /**2552 * @see AT_LEAST_ONE12553 */2554 Parser.prototype.AT_LEAST_ONE5 = function (actionORMethodDef) {2555 return this.atLeastOneInternal(5, actionORMethodDef, []);2556 };2557 /**2558 * Convenience method equivalent to AT_LEAST_ONE_SEP1.2559 * @see AT_LEAST_ONE12560 */2561 Parser.prototype.AT_LEAST_ONE_SEP = function (options) {2562 return this.AT_LEAST_ONE_SEP1(options);2563 };2564 /**2565 * Convenience method, same as MANY_SEP but the repetition is of one or more.2566 * failing to match at least one repetition will result in a parsing error and2567 * cause the parser to attempt error recovery.2568 *2569 * Note that an additional optional property ERR_MSG can be used to provide custom error messages.2570 *2571 * @see MANY_SEP12572 *2573 * @param options - An object defining the grammar of each iteration and the separator between iterations2574 *2575 * @return {ISeparatedIterationResult<OUT>}2576 */2577 Parser.prototype.AT_LEAST_ONE_SEP1 = function (options) {2578 return this.atLeastOneSepFirstInternal(1, options, {2579 values: [],2580 separators: []2581 });2582 };2583 /**2584 * @see AT_LEAST_ONE_SEP12585 */2586 Parser.prototype.AT_LEAST_ONE_SEP2 = function (options) {2587 return this.atLeastOneSepFirstInternal(2, options, {2588 values: [],2589 separators: []2590 });2591 };2592 /**2593 * @see AT_LEAST_ONE_SEP12594 */2595 Parser.prototype.AT_LEAST_ONE_SEP3 = function (options) {2596 return this.atLeastOneSepFirstInternal(3, options, {2597 values: [],2598 separators: []2599 });2600 };2601 /**2602 * @see AT_LEAST_ONE_SEP12603 */2604 Parser.prototype.AT_LEAST_ONE_SEP4 = function (options) {2605 return this.atLeastOneSepFirstInternal(4, options, {2606 values: [],2607 separators: []2608 });2609 };2610 /**2611 * @see AT_LEAST_ONE_SEP12612 */2613 Parser.prototype.AT_LEAST_ONE_SEP5 = function (options) {2614 return this.atLeastOneSepFirstInternal(5, options, {2615 values: [],2616 separators: []2617 });2618 };2619 /**2620 *2621 * @param {string} name - The name of the rule.2622 * @param {TokenType} implementation - The implementation of the rule.2623 * @param {IRuleConfig} [config] - The rule's optional configuration.2624 *2625 * @returns {TokenType} - The parsing rule which is the production implementation wrapped with the parsing logic that handles2626 * Parser state / error recovery&reporting/ ...2627 */2628 Parser.prototype.RULE = function (name, implementation, 2629 // TODO: how to describe the optional return type of CSTNode? T|CstNode is not good because it is not backward2630 // compatible, T|any is very general...2631 config) {2632 // TODO: how to describe the optional return type of CSTNode? T|CstNode is not good because it is not backward2633 // compatible, T|any is very general...2634 if (config === void 0) { config = DEFAULT_RULE_CONFIG; }2635 var ruleErrors = checks_1.validateRuleName(name);2636 ruleErrors = ruleErrors.concat(checks_1.validateRuleDoesNotAlreadyExist(name, this.definedRulesNames, this.className));2637 this.definedRulesNames.push(name);2638 this.definitionErrors.push.apply(this.definitionErrors, ruleErrors); // mutability for the win2639 // only build the gast representation once.2640 if (!this._productions.containsKey(name)) {2641 var gastProduction = gast_builder_1.buildTopProduction(implementation.toString(), name, this.tokensMap);2642 this._productions.put(name, gastProduction);2643 }2644 else {2645 var parserClassProductions = cache.getProductionsForClass(this.className);2646 var cachedProduction = parserClassProductions.get(name);2647 // in case of duplicate rules the cache will not be filled at this point.2648 if (!utils_1.isUndefined(cachedProduction)) {2649 // filling up the _productions is always needed to inheriting grammars can access it (as an instance member)2650 // otherwise they will be unaware of productions defined in super grammars.2651 this._productions.put(name, cachedProduction);2652 }2653 }2654 var ruleImplementation = this.defineRule(name, implementation, config);2655 this[name] = ruleImplementation;2656 return ruleImplementation;2657 };2658 /**2659 * @See RULE2660 * Same as RULE, but should only be used in "extending" grammars to override rules/productions2661 * from the super grammar.2662 */2663 Parser.prototype.OVERRIDE_RULE = function (name, impl, config) {2664 if (config === void 0) { config = DEFAULT_RULE_CONFIG; }2665 var ruleErrors = checks_1.validateRuleName(name);2666 ruleErrors = ruleErrors.concat(checks_1.validateRuleIsOverridden(name, this.definedRulesNames, this.className));2667 this.definitionErrors.push.apply(this.definitionErrors, ruleErrors); // mutability for the win2668 var alreadyOverridden = cache.getProductionOverriddenForClass(this.className);2669 // only build the GAST of an overridden rule once.2670 if (!alreadyOverridden.containsKey(name)) {2671 alreadyOverridden.put(name, true);2672 var gastProduction = gast_builder_1.buildTopProduction(impl.toString(), name, this.tokensMap);2673 this._productions.put(name, gastProduction);2674 }2675 else {2676 var parserClassProductions = cache.getProductionsForClass(this.className);2677 // filling up the _productions is always needed to inheriting grammars can access it (as an instance member)2678 // otherwise they will be unaware of productions defined in super grammars.2679 this._productions.put(name, parserClassProductions.get(name));2680 }2681 return this.defineRule(name, impl, config);2682 };2683 Parser.prototype.ruleInvocationStateUpdate = function (shortName, fullName, idxInCallingRule) {2684 this.RULE_OCCURRENCE_STACK.push(idxInCallingRule);2685 this.RULE_STACK.push(shortName);2686 // NOOP when cst is disabled2687 this.cstInvocationStateUpdate(fullName, shortName);2688 };2689 Parser.prototype.ruleFinallyStateUpdate = function () {2690 this.RULE_STACK.pop();2691 this.RULE_OCCURRENCE_STACK.pop();2692 // NOOP when cst is disabled2693 this.cstFinallyStateUpdate();2694 if (this.RULE_STACK.length === 0 && !this.isAtEndOfInput()) {2695 var firstRedundantTok = this.LA(1);2696 var errMsg = this.errorMessageProvider.buildNotAllInputParsedMessage({2697 firstRedundant: firstRedundantTok,2698 ruleName: this.getCurrRuleFullName()2699 });2700 this.SAVE_ERROR(new exceptions_public_1.exceptions.NotAllInputParsedException(errMsg, firstRedundantTok));2701 }2702 };2703 Parser.prototype.nestedRuleInvocationStateUpdate = function (nestedRuleName, shortNameKey) {2704 this.RULE_OCCURRENCE_STACK.push(1);2705 this.RULE_STACK.push(shortNameKey);2706 this.cstNestedInvocationStateUpdate(nestedRuleName, shortNameKey);2707 };2708 Parser.prototype.nestedRuleFinallyStateUpdate = function () {2709 this.RULE_STACK.pop();2710 this.RULE_OCCURRENCE_STACK.pop();2711 // NOOP when cst is disabled2712 this.cstNestedFinallyStateUpdate();2713 };2714 /**2715 * Returns an "imaginary" Token to insert when Single Token Insertion is done2716 * Override this if you require special behavior in your grammar.2717 * For example if an IntegerToken is required provide one with the image '0' so it would be valid syntactically.2718 */2719 Parser.prototype.getTokenToInsert = function (tokType) {2720 var tokToInsert = tokens_public_1.createTokenInstance(tokType, "", NaN, NaN, NaN, NaN, NaN, NaN);2721 tokToInsert.isInsertedInRecovery = true;2722 return tokToInsert;2723 };2724 /**2725 * By default all tokens type may be inserted. This behavior may be overridden in inheriting Recognizers2726 * for example: One may decide that only punctuation tokens may be inserted automatically as they have no additional2727 * semantic value. (A mandatory semicolon has no additional semantic meaning, but an Integer may have additional meaning2728 * depending on its int value and context (Inserting an integer 0 in cardinality: "[1..]" will cause semantic issues2729 * as the max of the cardinality will be greater than the min value (and this is a false error!).2730 */2731 Parser.prototype.canTokenTypeBeInsertedInRecovery = function (tokType) {2732 return true;2733 };2734 Parser.prototype.getCurrentGrammarPath = function (tokType, tokIdxInRule) {2735 var pathRuleStack = this.getHumanReadableRuleStack();2736 var pathOccurrenceStack = utils_1.cloneArr(this.RULE_OCCURRENCE_STACK);2737 var grammarPath = {2738 ruleStack: pathRuleStack,2739 occurrenceStack: pathOccurrenceStack,2740 lastTok: tokType,2741 lastTokOccurrence: tokIdxInRule2742 };2743 return grammarPath;2744 };2745 // TODO: should this be a member method or a utility? it does not have any state or usage of 'this'...2746 // TODO: should this be more explicitly part of the public API?2747 Parser.prototype.getNextPossibleTokenTypes = function (grammarPath) {2748 var topRuleName = utils_1.first(grammarPath.ruleStack);2749 var gastProductions = this.getGAstProductions();2750 var topProduction = gastProductions.get(topRuleName);2751 var nextPossibleTokenTypes = new interpreter_1.NextAfterTokenWalker(topProduction, grammarPath).startWalking();2752 return nextPossibleTokenTypes;2753 };2754 Parser.prototype.subruleInternal = function (ruleToCall, idx, args) {2755 var ruleResult = ruleToCall.call(this, idx, args);2756 this.cstPostNonTerminal(ruleResult, ruleToCall.ruleName);2757 return ruleResult;2758 };2759 /**2760 * @param tokType - The Type of Token we wish to consume (Reference to its constructor function).2761 * @param idx - Occurrence index of consumed token in the invoking parser rule text2762 * for example:2763 * IDENT (DOT IDENT)*2764 * the first ident will have idx 1 and the second one idx 22765 * * note that for the second ident the idx is always 2 even if its invoked 30 times in the same rule2766 * the idx is about the position in grammar (source code) and has nothing to do with a specific invocation2767 * details.2768 * @param options -2769 *2770 * @returns {Token} - The consumed Token.2771 */2772 Parser.prototype.consumeInternal = function (tokType, idx, options) {2773 var consumedToken;2774 try {2775 var nextToken = this.LA(1);2776 if (this.tokenMatcher(nextToken, tokType) === true) {2777 this.consumeToken();2778 consumedToken = nextToken;2779 }2780 else {2781 var msg = void 0;2782 if (options !== undefined && options.ERR_MSG) {2783 msg = options.ERR_MSG;2784 }2785 else {2786 msg = this.errorMessageProvider.buildMismatchTokenMessage({2787 expected: tokType,2788 actual: nextToken,2789 ruleName: this.getCurrRuleFullName()2790 });2791 }2792 throw this.SAVE_ERROR(new exceptions_public_1.exceptions.MismatchedTokenException(msg, nextToken));2793 }2794 }2795 catch (eFromConsumption) {2796 // no recovery allowed during backtracking, otherwise backtracking may recover invalid syntax and accept it2797 // but the original syntax could have been parsed successfully without any backtracking + recovery2798 if (this.recoveryEnabled &&2799 // TODO: more robust checking of the exception type. Perhaps Typescript extending expressions?2800 eFromConsumption.name === "MismatchedTokenException" &&2801 !this.isBackTracking()) {2802 var follows = this.getFollowsForInRuleRecovery(tokType, idx);2803 try {2804 consumedToken = this.tryInRuleRecovery(tokType, follows);2805 }2806 catch (eFromInRuleRecovery) {2807 if (eFromInRuleRecovery.name === IN_RULE_RECOVERY_EXCEPTION) {2808 // failed in RuleRecovery.2809 // throw the original error in order to trigger reSync error recovery2810 throw eFromConsumption;2811 }2812 else {2813 throw eFromInRuleRecovery;2814 }2815 }2816 }2817 else {2818 throw eFromConsumption;2819 }2820 }2821 this.cstPostTerminal(tokType, consumedToken);2822 return consumedToken;2823 };2824 // other functionality2825 Parser.prototype.saveRecogState = function () {2826 // errors is a getter which will clone the errors array2827 var savedErrors = this.errors;2828 var savedRuleStack = utils_1.cloneArr(this.RULE_STACK);2829 return {2830 errors: savedErrors,2831 lexerState: this.exportLexerState(),2832 RULE_STACK: savedRuleStack,2833 CST_STACK: this.CST_STACK,2834 LAST_EXPLICIT_RULE_STACK: this.LAST_EXPLICIT_RULE_STACK2835 };2836 };2837 Parser.prototype.reloadRecogState = function (newState) {2838 this.errors = newState.errors;2839 this.importLexerState(newState.lexerState);2840 this.RULE_STACK = newState.RULE_STACK;2841 };2842 Parser.prototype.defineRule = function (ruleName, impl, config) {2843 if (this.selfAnalysisDone) {2844 throw Error("Grammar rule <" + ruleName + "> may not be defined after the 'performSelfAnalysis' method has been called'\n" +2845 "Make sure that all grammar rule definitions are done before 'performSelfAnalysis' is called.");2846 }2847 var resyncEnabled = utils_1.has(config, "resyncEnabled")2848 ? config.resyncEnabled2849 : DEFAULT_RULE_CONFIG.resyncEnabled;2850 var recoveryValueFunc = utils_1.has(config, "recoveryValueFunc")2851 ? config.recoveryValueFunc2852 : DEFAULT_RULE_CONFIG.recoveryValueFunc;2853 // performance optimization: Use small integers as keys for the longer human readable "full" rule names.2854 // this greatly improves Map access time (as much as 8% for some performance benchmarks).2855 /* tslint:disable */2856 var shortName = this.ruleShortNameIdx <<2857 (keys_1.BITS_FOR_METHOD_IDX + keys_1.BITS_FOR_OCCURRENCE_IDX);2858 /* tslint:enable */2859 this.ruleShortNameIdx++;2860 this.shortRuleNameToFull.put(shortName, ruleName);2861 this.fullRuleNameToShort.put(ruleName, shortName);2862 function invokeRuleWithTry(args) {2863 try {2864 // TODO: dynamically get rid of this?2865 if (this.outputCst === true) {2866 impl.apply(this, args);2867 return this.CST_STACK[this.CST_STACK.length - 1];2868 }2869 else {2870 return impl.apply(this, args);2871 }2872 }2873 catch (e) {2874 var isFirstInvokedRule = this.RULE_STACK.length === 1;2875 // note the reSync is always enabled for the first rule invocation, because we must always be able to2876 // reSync with EOF and just output some INVALID ParseTree2877 // during backtracking reSync recovery is disabled, otherwise we can't be certain the backtracking2878 // path is really the most valid one2879 var reSyncEnabled = resyncEnabled &&2880 !this.isBackTracking() &&2881 this.recoveryEnabled;2882 if (exceptions_public_1.exceptions.isRecognitionException(e)) {2883 if (reSyncEnabled) {2884 var reSyncTokType = this.findReSyncTokenType();2885 if (this.isInCurrentRuleReSyncSet(reSyncTokType)) {2886 e.resyncedTokens = this.reSyncTo(reSyncTokType);2887 if (this.outputCst) {2888 var partialCstResult = this.CST_STACK[this.CST_STACK.length - 1];2889 partialCstResult.recoveredNode = true;2890 return partialCstResult;2891 }2892 else {2893 return recoveryValueFunc();2894 }2895 }2896 else {2897 if (this.outputCst) {2898 // recovery is only for "real" non nested rules2899 var prevRuleShortName = this.getLastExplicitRuleShortNameNoCst();2900 var preRuleFullName = this.shortRuleNameToFull.get(prevRuleShortName);2901 var partialCstResult = this.CST_STACK[this.CST_STACK.length - 1];2902 partialCstResult.recoveredNode = true;2903 this.cstPostNonTerminalRecovery(partialCstResult, preRuleFullName);2904 }2905 // to be handled farther up the call stack2906 throw e;2907 }2908 }2909 else if (isFirstInvokedRule) {2910 // otherwise a Redundant input error will be created as well and we cannot guarantee that this is indeed the case2911 this.moveToTerminatedState();2912 // the parser should never throw one of its own errors outside its flow.2913 // even if error recovery is disabled2914 return recoveryValueFunc();2915 }2916 else {2917 // to be handled farther up the call stack2918 throw e;2919 }2920 }2921 else {2922 // some other Error type which we don't know how to handle (for example a built in JavaScript Error)2923 throw e;2924 }2925 }2926 finally {2927 this.ruleFinallyStateUpdate();2928 }2929 }2930 var wrappedGrammarRule;2931 wrappedGrammarRule = function (idxInCallingRule, args) {2932 if (idxInCallingRule === void 0) { idxInCallingRule = 1; }2933 this.ruleInvocationStateUpdate(shortName, ruleName, idxInCallingRule);2934 return invokeRuleWithTry.call(this, args);2935 };2936 var ruleNamePropName = "ruleName";2937 wrappedGrammarRule[ruleNamePropName] = ruleName;2938 return wrappedGrammarRule;2939 };2940 Parser.prototype.tryInRepetitionRecovery = function (grammarRule, grammarRuleArgs, lookAheadFunc, expectedTokType) {2941 var _this = this;2942 // TODO: can the resyncTokenType be cached?2943 var reSyncTokType = this.findReSyncTokenType();2944 var savedLexerState = this.exportLexerState();2945 var resyncedTokens = [];2946 var passedResyncPoint = false;2947 var nextTokenWithoutResync = this.LA(1);2948 var currToken = this.LA(1);2949 var generateErrorMessage = function () {2950 // we are preemptively re-syncing before an error has been detected, therefor we must reproduce2951 // the error that would have been thrown2952 var msg = _this.errorMessageProvider.buildMismatchTokenMessage({2953 expected: expectedTokType,2954 actual: nextTokenWithoutResync,2955 ruleName: _this.getCurrRuleFullName()2956 });2957 var error = new exceptions_public_1.exceptions.MismatchedTokenException(msg, nextTokenWithoutResync);2958 // the first token here will be the original cause of the error, this is not part of the resyncedTokens property.2959 error.resyncedTokens = utils_1.dropRight(resyncedTokens);2960 _this.SAVE_ERROR(error);2961 };2962 while (!passedResyncPoint) {2963 // re-synced to a point where we can safely exit the repetition/2964 if (this.tokenMatcher(currToken, expectedTokType)) {2965 generateErrorMessage();2966 return; // must return here to avoid reverting the inputIdx2967 }2968 else if (lookAheadFunc.call(this)) {2969 // we skipped enough tokens so we can resync right back into another iteration of the repetition grammar rule2970 generateErrorMessage();2971 // recursive invocation in other to support multiple re-syncs in the same top level repetition grammar rule2972 grammarRule.apply(this, grammarRuleArgs);2973 return; // must return here to avoid reverting the inputIdx2974 }2975 else if (this.tokenMatcher(currToken, reSyncTokType)) {2976 passedResyncPoint = true;2977 }2978 else {2979 currToken = this.SKIP_TOKEN();2980 this.addToResyncTokens(currToken, resyncedTokens);2981 }2982 }2983 // we were unable to find a CLOSER point to resync inside the Repetition, reset the state.2984 // The parsing exception we were trying to prevent will happen in the NEXT parsing step. it may be handled by2985 // "between rules" resync recovery later in the flow.2986 this.importLexerState(savedLexerState);2987 };2988 Parser.prototype.shouldInRepetitionRecoveryBeTried = function (expectTokAfterLastMatch, nextTokIdx) {2989 // arguments to try and perform resync into the next iteration of the many are missing2990 if (expectTokAfterLastMatch === undefined || nextTokIdx === undefined) {2991 return false;2992 }2993 // no need to recover, next token is what we expect...2994 if (this.tokenMatcher(this.LA(1), expectTokAfterLastMatch)) {2995 return false;2996 }2997 // error recovery is disabled during backtracking as it can make the parser ignore a valid grammar path2998 // and prefer some backtracking path that includes recovered errors.2999 if (this.isBackTracking()) {3000 return false;3001 }3002 // if we can perform inRule recovery (single token insertion or deletion) we always prefer that recovery algorithm3003 // because if it works, it makes the least amount of changes to the input stream (greedy algorithm)3004 //noinspection RedundantIfStatementJS3005 if (this.canPerformInRuleRecovery(expectTokAfterLastMatch, this.getFollowsForInRuleRecovery(expectTokAfterLastMatch, nextTokIdx))) {3006 return false;3007 }3008 return true;3009 };3010 // Error Recovery functionality3011 Parser.prototype.getFollowsForInRuleRecovery = function (tokType, tokIdxInRule) {3012 var grammarPath = this.getCurrentGrammarPath(tokType, tokIdxInRule);3013 var follows = this.getNextPossibleTokenTypes(grammarPath);3014 return follows;3015 };3016 Parser.prototype.tryInRuleRecovery = function (expectedTokType, follows) {3017 if (this.canRecoverWithSingleTokenInsertion(expectedTokType, follows)) {3018 var tokToInsert = this.getTokenToInsert(expectedTokType);3019 return tokToInsert;3020 }3021 if (this.canRecoverWithSingleTokenDeletion(expectedTokType)) {3022 var nextTok = this.SKIP_TOKEN();3023 this.consumeToken();3024 return nextTok;3025 }3026 throw new InRuleRecoveryException("sad sad panda");3027 };3028 Parser.prototype.canPerformInRuleRecovery = function (expectedToken, follows) {3029 return (this.canRecoverWithSingleTokenInsertion(expectedToken, follows) ||3030 this.canRecoverWithSingleTokenDeletion(expectedToken));3031 };3032 Parser.prototype.canRecoverWithSingleTokenInsertion = function (expectedTokType, follows) {3033 var _this = this;3034 if (!this.canTokenTypeBeInsertedInRecovery(expectedTokType)) {3035 return false;3036 }3037 // must know the possible following tokens to perform single token insertion3038 if (utils_1.isEmpty(follows)) {3039 return false;3040 }3041 var mismatchedTok = this.LA(1);3042 var isMisMatchedTokInFollows = utils_1.find(follows, function (possibleFollowsTokType) {3043 return _this.tokenMatcher(mismatchedTok, possibleFollowsTokType);3044 }) !== undefined;3045 return isMisMatchedTokInFollows;3046 };3047 Parser.prototype.canRecoverWithSingleTokenDeletion = function (expectedTokType) {3048 var isNextTokenWhatIsExpected = this.tokenMatcher(this.LA(2), expectedTokType);3049 return isNextTokenWhatIsExpected;3050 };3051 Parser.prototype.isInCurrentRuleReSyncSet = function (tokenTypeIdx) {3052 var followKey = this.getCurrFollowKey();3053 var currentRuleReSyncSet = this.getFollowSetFromFollowKey(followKey);3054 return utils_1.contains(currentRuleReSyncSet, tokenTypeIdx);3055 };3056 Parser.prototype.findReSyncTokenType = function () {3057 var allPossibleReSyncTokTypes = this.flattenFollowSet();3058 // this loop will always terminate as EOF is always in the follow stack and also always (virtually) in the input3059 var nextToken = this.LA(1);3060 var k = 2;3061 while (true) {3062 var nextTokenType = nextToken.tokenType;3063 if (utils_1.contains(allPossibleReSyncTokTypes, nextTokenType)) {3064 return nextTokenType;3065 }3066 nextToken = this.LA(k);3067 k++;3068 }3069 };3070 Parser.prototype.getCurrFollowKey = function () {3071 // the length is at least one as we always add the ruleName to the stack before invoking the rule.3072 if (this.RULE_STACK.length === 1) {3073 return EOF_FOLLOW_KEY;3074 }3075 var currRuleShortName = this.getLastExplicitRuleShortName();3076 var prevRuleShortName = this.getPreviousExplicitRuleShortName();3077 var prevRuleIdx = this.getPreviousExplicitRuleOccurenceIndex();3078 return {3079 ruleName: this.shortRuleNameToFullName(currRuleShortName),3080 idxInCallingRule: prevRuleIdx,3081 inRule: this.shortRuleNameToFullName(prevRuleShortName)3082 };3083 };3084 Parser.prototype.buildFullFollowKeyStack = function () {3085 var _this = this;3086 var explicitRuleStack = this.RULE_STACK;3087 var explicitOccurrenceStack = this.RULE_OCCURRENCE_STACK;3088 if (!utils_1.isEmpty(this.LAST_EXPLICIT_RULE_STACK)) {3089 explicitRuleStack = utils_1.map(this.LAST_EXPLICIT_RULE_STACK, function (idx) { return _this.RULE_STACK[idx]; });3090 explicitOccurrenceStack = utils_1.map(this.LAST_EXPLICIT_RULE_STACK, function (idx) { return _this.RULE_OCCURRENCE_STACK[idx]; });3091 }3092 // TODO: only iterate over explicit rules here3093 return utils_1.map(explicitRuleStack, function (ruleName, idx) {3094 if (idx === 0) {3095 return EOF_FOLLOW_KEY;3096 }3097 return {3098 ruleName: _this.shortRuleNameToFullName(ruleName),3099 idxInCallingRule: explicitOccurrenceStack[idx],3100 inRule: _this.shortRuleNameToFullName(explicitRuleStack[idx - 1])3101 };3102 });3103 };3104 Parser.prototype.flattenFollowSet = function () {3105 var _this = this;3106 var followStack = utils_1.map(this.buildFullFollowKeyStack(), function (currKey) {3107 return _this.getFollowSetFromFollowKey(currKey);3108 });3109 return utils_1.flatten(followStack);3110 };3111 Parser.prototype.getFollowSetFromFollowKey = function (followKey) {3112 if (followKey === EOF_FOLLOW_KEY) {3113 return [tokens_public_1.EOF];3114 }3115 var followName = followKey.ruleName +3116 followKey.idxInCallingRule +3117 constants_1.IN +3118 followKey.inRule;3119 return cache.getResyncFollowsForClass(this.className).get(followName);3120 };3121 // It does not make any sense to include a virtual EOF token in the list of resynced tokens3122 // as EOF does not really exist and thus does not contain any useful information (line/column numbers)3123 Parser.prototype.addToResyncTokens = function (token, resyncTokens) {3124 if (!this.tokenMatcher(token, tokens_public_1.EOF)) {3125 resyncTokens.push(token);3126 }3127 return resyncTokens;3128 };3129 Parser.prototype.reSyncTo = function (tokType) {3130 var resyncedTokens = [];3131 var nextTok = this.LA(1);3132 while (this.tokenMatcher(nextTok, tokType) === false) {3133 nextTok = this.SKIP_TOKEN();3134 this.addToResyncTokens(nextTok, resyncedTokens);3135 }3136 // the last token is not part of the error.3137 return utils_1.dropRight(resyncedTokens);3138 };3139 Parser.prototype.attemptInRepetitionRecovery = function (prodFunc, args, lookaheadFunc, dslMethodIdx, prodOccurrence, nextToksWalker) {3140 var key = this.getKeyForAutomaticLookahead(dslMethodIdx, prodOccurrence);3141 var firstAfterRepInfo = this.firstAfterRepMap.get(key);3142 if (firstAfterRepInfo === undefined) {3143 var currRuleName = this.getCurrRuleFullName();3144 var ruleGrammar = this.getGAstProductions().get(currRuleName);3145 var walker = new nextToksWalker(ruleGrammar, prodOccurrence);3146 firstAfterRepInfo = walker.startWalking();3147 this.firstAfterRepMap.put(key, firstAfterRepInfo);3148 }3149 var expectTokAfterLastMatch = firstAfterRepInfo.token;3150 var nextTokIdx = firstAfterRepInfo.occurrence;3151 var isEndOfRule = firstAfterRepInfo.isEndOfRule;3152 // special edge case of a TOP most repetition after which the input should END.3153 // this will force an attempt for inRule recovery in that scenario.3154 if (this.RULE_STACK.length === 1 &&3155 isEndOfRule &&3156 expectTokAfterLastMatch === undefined) {3157 expectTokAfterLastMatch = tokens_public_1.EOF;3158 nextTokIdx = 1;3159 }3160 if (this.shouldInRepetitionRecoveryBeTried(expectTokAfterLastMatch, nextTokIdx)) {3161 // TODO: performance optimization: instead of passing the original args here, we modify3162 // the args param (or create a new one) and make sure the lookahead func is explicitly provided3163 // to avoid searching the cache for it once more.3164 this.tryInRepetitionRecovery(prodFunc, args, lookaheadFunc, expectTokAfterLastMatch);3165 }3166 };3167 Parser.prototype.cstNestedInvocationStateUpdate = function (nestedName, shortName) {3168 var initDef = this.cstDictDefForRule.get(shortName);3169 this.CST_STACK.push({3170 name: nestedName,3171 fullName: this.shortRuleNameToFull.get(this.getLastExplicitRuleShortName()) + nestedName,3172 children: initDef()3173 });3174 };3175 Parser.prototype.cstInvocationStateUpdate = function (fullRuleName, shortName) {3176 this.LAST_EXPLICIT_RULE_STACK.push(this.RULE_STACK.length - 1);3177 var initDef = this.cstDictDefForRule.get(shortName);3178 this.CST_STACK.push({3179 name: fullRuleName,3180 children: initDef()3181 });3182 };3183 Parser.prototype.cstFinallyStateUpdate = function () {3184 this.LAST_EXPLICIT_RULE_STACK.pop();3185 this.CST_STACK.pop();3186 };3187 Parser.prototype.cstNestedFinallyStateUpdate = function () {3188 this.CST_STACK.pop();3189 };3190 // Implementation of parsing DSL3191 Parser.prototype.optionInternal = function (actionORMethodDef, occurrence) {3192 var key = this.getKeyForAutomaticLookahead(keys_1.OPTION_IDX, occurrence);3193 var nestedName = this.nestedRuleBeforeClause(actionORMethodDef, key);3194 try {3195 return this.optionInternalLogic(actionORMethodDef, occurrence, key);3196 }3197 finally {3198 if (nestedName !== undefined) {3199 this.nestedRuleFinallyClause(key, nestedName);3200 }3201 }3202 };3203 Parser.prototype.optionInternalNoCst = function (actionORMethodDef, occurrence) {3204 var key = this.getKeyForAutomaticLookahead(keys_1.OPTION_IDX, occurrence);3205 return this.optionInternalLogic(actionORMethodDef, occurrence, key);3206 };3207 Parser.prototype.optionInternalLogic = function (actionORMethodDef, occurrence, key) {3208 var _this = this;3209 var lookAheadFunc = this.getLookaheadFuncForOption(key, occurrence);3210 var action;3211 var predicate;3212 if (actionORMethodDef.DEF !== undefined) {3213 action = actionORMethodDef.DEF;3214 predicate = actionORMethodDef.GATE;3215 // predicate present3216 if (predicate !== undefined) {3217 var orgLookaheadFunction_1 = lookAheadFunc;3218 lookAheadFunc = function () {3219 return (predicate.call(_this) && orgLookaheadFunction_1.call(_this));3220 };3221 }3222 }3223 else {3224 action = actionORMethodDef;3225 }3226 if (lookAheadFunc.call(this) === true) {3227 return action.call(this);3228 }3229 return undefined;3230 };3231 Parser.prototype.atLeastOneInternal = function (prodOccurrence, actionORMethodDef, result) {3232 var laKey = this.getKeyForAutomaticLookahead(keys_1.AT_LEAST_ONE_IDX, prodOccurrence);3233 var nestedName = this.nestedRuleBeforeClause(actionORMethodDef, laKey);3234 try {3235 return this.atLeastOneInternalLogic(prodOccurrence, actionORMethodDef, result, laKey);3236 }3237 finally {3238 if (nestedName !== undefined) {3239 this.nestedRuleFinallyClause(laKey, nestedName);3240 }3241 }3242 };3243 Parser.prototype.atLeastOneInternalNoCst = function (prodOccurrence, actionORMethodDef, result) {3244 var key = this.getKeyForAutomaticLookahead(keys_1.AT_LEAST_ONE_IDX, prodOccurrence);3245 return this.atLeastOneInternalLogic(prodOccurrence, actionORMethodDef, result, key);3246 };3247 Parser.prototype.atLeastOneInternalLogic = function (prodOccurrence, actionORMethodDef, result, key) {3248 var _this = this;3249 var lookAheadFunc = this.getLookaheadFuncForAtLeastOne(key, prodOccurrence);3250 var action;3251 var predicate;3252 if (actionORMethodDef.DEF !== undefined) {3253 action = actionORMethodDef.DEF;3254 predicate = actionORMethodDef.GATE;3255 // predicate present3256 if (predicate !== undefined) {3257 var orgLookaheadFunction_2 = lookAheadFunc;3258 lookAheadFunc = function () {3259 return (predicate.call(_this) && orgLookaheadFunction_2.call(_this));3260 };3261 }3262 }3263 else {3264 action = actionORMethodDef;3265 }3266 if (lookAheadFunc.call(this) === true) {3267 result.push(action.call(this));3268 while (lookAheadFunc.call(this) === true) {3269 result.push(action.call(this));3270 }3271 }3272 else {3273 throw this.raiseEarlyExitException(prodOccurrence, lookahead_1.PROD_TYPE.REPETITION_MANDATORY, actionORMethodDef.ERR_MSG);3274 }3275 // note that while it may seem that this can cause an error because by using a recursive call to3276 // AT_LEAST_ONE we change the grammar to AT_LEAST_TWO, AT_LEAST_THREE ... , the possible recursive call3277 // from the tryInRepetitionRecovery(...) will only happen IFF there really are TWO/THREE/.... items.3278 // Performance optimization: "attemptInRepetitionRecovery" will be defined as NOOP unless recovery is enabled3279 this.attemptInRepetitionRecovery(this.atLeastOneInternal, [prodOccurrence, actionORMethodDef, result], lookAheadFunc, keys_1.AT_LEAST_ONE_IDX, prodOccurrence, interpreter_1.NextTerminalAfterAtLeastOneWalker);3280 return result;3281 };3282 Parser.prototype.atLeastOneSepFirstInternal = function (prodOccurrence, options, result) {3283 var laKey = this.getKeyForAutomaticLookahead(keys_1.AT_LEAST_ONE_SEP_IDX, prodOccurrence);3284 var nestedName = this.nestedRuleBeforeClause(options, laKey);3285 try {3286 return this.atLeastOneSepFirstInternalLogic(prodOccurrence, options, result, laKey);3287 }3288 finally {3289 if (nestedName !== undefined) {3290 this.nestedRuleFinallyClause(laKey, nestedName);3291 }3292 }3293 };3294 Parser.prototype.atLeastOneSepFirstInternalNoCst = function (prodOccurrence, options, result) {3295 var laKey = this.getKeyForAutomaticLookahead(keys_1.AT_LEAST_ONE_SEP_IDX, prodOccurrence);3296 return this.atLeastOneSepFirstInternalLogic(prodOccurrence, options, result, laKey);3297 };3298 Parser.prototype.atLeastOneSepFirstInternalLogic = function (prodOccurrence, options, result, key) {3299 var _this = this;3300 var action = options.DEF;3301 var separator = options.SEP;3302 var firstIterationLookaheadFunc = this.getLookaheadFuncForAtLeastOneSep(key, prodOccurrence);3303 var values = result.values;3304 var separators = result.separators;3305 // 1st iteration3306 if (firstIterationLookaheadFunc.call(this) === true) {3307 values.push(action.call(this));3308 var separatorLookAheadFunc = function () {3309 return _this.tokenMatcher(_this.LA(1), separator);3310 };3311 // 2nd..nth iterations3312 while (this.tokenMatcher(this.LA(1), separator) === true) {3313 // note that this CONSUME will never enter recovery because3314 // the separatorLookAheadFunc checks that the separator really does exist.3315 separators.push(this.CONSUME(separator));3316 values.push(action.call(this));3317 }3318 // Performance optimization: "attemptInRepetitionRecovery" will be defined as NOOP unless recovery is enabled3319 this.attemptInRepetitionRecovery(this.repetitionSepSecondInternal, [3320 prodOccurrence,3321 separator,3322 separatorLookAheadFunc,3323 action,3324 interpreter_1.NextTerminalAfterAtLeastOneSepWalker,3325 result3326 ], separatorLookAheadFunc, keys_1.AT_LEAST_ONE_SEP_IDX, prodOccurrence, interpreter_1.NextTerminalAfterAtLeastOneSepWalker);3327 }3328 else {3329 throw this.raiseEarlyExitException(prodOccurrence, lookahead_1.PROD_TYPE.REPETITION_MANDATORY_WITH_SEPARATOR, options.ERR_MSG);3330 }3331 return result;3332 };3333 Parser.prototype.manyInternal = function (prodOccurrence, actionORMethodDef, result) {3334 var laKey = this.getKeyForAutomaticLookahead(keys_1.MANY_IDX, prodOccurrence);3335 var nestedName = this.nestedRuleBeforeClause(actionORMethodDef, laKey);3336 try {3337 return this.manyInternalLogic(prodOccurrence, actionORMethodDef, result, laKey);3338 }3339 finally {3340 if (nestedName !== undefined) {3341 this.nestedRuleFinallyClause(laKey, nestedName);3342 }3343 }3344 };3345 Parser.prototype.manyInternalNoCst = function (prodOccurrence, actionORMethodDef, result) {3346 var laKey = this.getKeyForAutomaticLookahead(keys_1.MANY_IDX, prodOccurrence);3347 return this.manyInternalLogic(prodOccurrence, actionORMethodDef, result, laKey);3348 };3349 Parser.prototype.manyInternalLogic = function (prodOccurrence, actionORMethodDef, result, key) {3350 var _this = this;3351 var lookaheadFunction = this.getLookaheadFuncForMany(key, prodOccurrence);3352 var action;3353 var predicate;3354 if (actionORMethodDef.DEF !== undefined) {3355 action = actionORMethodDef.DEF;3356 predicate = actionORMethodDef.GATE;3357 // predicate present3358 if (predicate !== undefined) {3359 var orgLookaheadFunction_3 = lookaheadFunction;3360 lookaheadFunction = function () {3361 return (predicate.call(_this) && orgLookaheadFunction_3.call(_this));3362 };3363 }3364 }3365 else {3366 action = actionORMethodDef;3367 }3368 while (lookaheadFunction.call(this)) {3369 result.push(action.call(this));3370 }3371 // Performance optimization: "attemptInRepetitionRecovery" will be defined as NOOP unless recovery is enabled3372 this.attemptInRepetitionRecovery(this.manyInternal, [prodOccurrence, actionORMethodDef, result], lookaheadFunction, keys_1.MANY_IDX, prodOccurrence, interpreter_1.NextTerminalAfterManyWalker);3373 return result;3374 };3375 Parser.prototype.manySepFirstInternal = function (prodOccurrence, options, result) {3376 var laKey = this.getKeyForAutomaticLookahead(keys_1.MANY_SEP_IDX, prodOccurrence);3377 var nestedName = this.nestedRuleBeforeClause(options, laKey);3378 try {3379 return this.manySepFirstInternalLogic(prodOccurrence, options, result, laKey);3380 }3381 finally {3382 if (nestedName !== undefined) {3383 this.nestedRuleFinallyClause(laKey, nestedName);3384 }3385 }3386 };3387 Parser.prototype.manySepFirstInternalNoCst = function (prodOccurrence, options, result) {3388 var laKey = this.getKeyForAutomaticLookahead(keys_1.MANY_SEP_IDX, prodOccurrence);3389 return this.manySepFirstInternalLogic(prodOccurrence, options, result, laKey);3390 };3391 Parser.prototype.manySepFirstInternalLogic = function (prodOccurrence, options, result, key) {3392 var _this = this;3393 var action = options.DEF;3394 var separator = options.SEP;3395 var firstIterationLaFunc = this.getLookaheadFuncForManySep(key, prodOccurrence);3396 var values = result.values;3397 var separators = result.separators;3398 // 1st iteration3399 if (firstIterationLaFunc.call(this) === true) {3400 values.push(action.call(this));3401 var separatorLookAheadFunc = function () {3402 return _this.tokenMatcher(_this.LA(1), separator);3403 };3404 // 2nd..nth iterations3405 while (this.tokenMatcher(this.LA(1), separator) === true) {3406 // note that this CONSUME will never enter recovery because3407 // the separatorLookAheadFunc checks that the separator really does exist.3408 separators.push(this.CONSUME(separator));3409 values.push(action.call(this));3410 }3411 // Performance optimization: "attemptInRepetitionRecovery" will be defined as NOOP unless recovery is enabled3412 this.attemptInRepetitionRecovery(this.repetitionSepSecondInternal, [3413 prodOccurrence,3414 separator,3415 separatorLookAheadFunc,3416 action,3417 interpreter_1.NextTerminalAfterManySepWalker,3418 result3419 ], separatorLookAheadFunc, keys_1.MANY_SEP_IDX, prodOccurrence, interpreter_1.NextTerminalAfterManySepWalker);3420 }3421 return result;3422 };3423 Parser.prototype.repetitionSepSecondInternal = function (prodOccurrence, separator, separatorLookAheadFunc, action, nextTerminalAfterWalker, result) {3424 while (separatorLookAheadFunc()) {3425 // note that this CONSUME will never enter recovery because3426 // the separatorLookAheadFunc checks that the separator really does exist.3427 result.separators.push(this.CONSUME(separator));3428 result.values.push(action.call(this));3429 }3430 // we can only arrive to this function after an error3431 // has occurred (hence the name 'second') so the following3432 // IF will always be entered, its possible to remove it...3433 // however it is kept to avoid confusion and be consistent.3434 // Performance optimization: "attemptInRepetitionRecovery" will be defined as NOOP unless recovery is enabled3435 /* istanbul ignore else */3436 this.attemptInRepetitionRecovery(this.repetitionSepSecondInternal, [3437 prodOccurrence,3438 separator,3439 separatorLookAheadFunc,3440 action,3441 nextTerminalAfterWalker,3442 result3443 ], separatorLookAheadFunc, keys_1.AT_LEAST_ONE_SEP_IDX, prodOccurrence, nextTerminalAfterWalker);3444 };3445 Parser.prototype.orInternalNoCst = function (altsOrOpts, occurrence) {3446 var alts = utils_1.isArray(altsOrOpts)3447 ? altsOrOpts3448 : altsOrOpts.DEF;3449 var laFunc = this.getLookaheadFuncForOr(occurrence, alts);3450 var altIdxToTake = laFunc.call(this, alts);3451 if (altIdxToTake !== undefined) {3452 var chosenAlternative = alts[altIdxToTake];3453 return chosenAlternative.ALT.call(this);3454 }3455 this.raiseNoAltException(occurrence, altsOrOpts.ERR_MSG);3456 };3457 Parser.prototype.orInternal = function (altsOrOpts, occurrence) {3458 var laKey = this.getKeyForAutomaticLookahead(keys_1.OR_IDX, occurrence);3459 var nestedName = this.nestedRuleBeforeClause(altsOrOpts, laKey);3460 try {3461 var alts = utils_1.isArray(altsOrOpts)3462 ? altsOrOpts3463 : altsOrOpts.DEF;3464 var laFunc = this.getLookaheadFuncForOr(occurrence, alts);3465 var altIdxToTake = laFunc.call(this, alts);3466 if (altIdxToTake !== undefined) {3467 var chosenAlternative = alts[altIdxToTake];3468 var nestedAltBeforeClauseResult = this.nestedAltBeforeClause(chosenAlternative, occurrence, keys_1.OR_IDX, altIdxToTake);3469 try {3470 return chosenAlternative.ALT.call(this);3471 }3472 finally {3473 if (nestedAltBeforeClauseResult !== undefined) {3474 this.nestedRuleFinallyClause(nestedAltBeforeClauseResult.shortName, nestedAltBeforeClauseResult.nestedName);3475 }3476 }3477 }3478 this.raiseNoAltException(occurrence, altsOrOpts.ERR_MSG);3479 }3480 finally {3481 if (nestedName !== undefined) {3482 this.nestedRuleFinallyClause(laKey, nestedName);3483 }3484 }3485 };3486 // this actually returns a number, but it is always used as a string (object prop key)3487 Parser.prototype.getKeyForAutomaticLookahead = function (dslMethodIdx, occurrence) {3488 var currRuleShortName = this.getLastExplicitRuleShortName();3489 /* tslint:disable */3490 return keys_1.getKeyForAutomaticLookahead(currRuleShortName, dslMethodIdx, occurrence);3491 /* tslint:enable */3492 };3493 Parser.prototype.getLookaheadFuncForOr = function (occurrence, alts) {3494 var key = this.getKeyForAutomaticLookahead(keys_1.OR_IDX, occurrence);3495 var laFunc = this.classLAFuncs.get(key);3496 if (laFunc === undefined) {3497 var ruleName = this.getCurrRuleFullName();3498 var ruleGrammar = this.getGAstProductions().get(ruleName);3499 // note that hasPredicates is only computed once.3500 var hasPredicates = utils_1.some(alts, function (currAlt) {3501 return utils_1.isFunction(currAlt.GATE);3502 });3503 laFunc = lookahead_1.buildLookaheadFuncForOr(occurrence, ruleGrammar, this.maxLookahead, hasPredicates, this.dynamicTokensEnabled, this.lookAheadBuilderForAlternatives);3504 this.classLAFuncs.put(key, laFunc);3505 return laFunc;3506 }3507 else {3508 return laFunc;3509 }3510 };3511 // Automatic lookahead calculation3512 Parser.prototype.getLookaheadFuncForOption = function (key, occurrence) {3513 return this.getLookaheadFuncFor(key, occurrence, this.maxLookahead, lookahead_1.PROD_TYPE.OPTION);3514 };3515 Parser.prototype.getLookaheadFuncForMany = function (key, occurrence) {3516 return this.getLookaheadFuncFor(key, occurrence, this.maxLookahead, lookahead_1.PROD_TYPE.REPETITION);3517 };3518 Parser.prototype.getLookaheadFuncForManySep = function (key, occurrence) {3519 return this.getLookaheadFuncFor(key, occurrence, this.maxLookahead, lookahead_1.PROD_TYPE.REPETITION_WITH_SEPARATOR);3520 };3521 Parser.prototype.getLookaheadFuncForAtLeastOne = function (key, occurrence) {3522 return this.getLookaheadFuncFor(key, occurrence, this.maxLookahead, lookahead_1.PROD_TYPE.REPETITION_MANDATORY);3523 };3524 Parser.prototype.getLookaheadFuncForAtLeastOneSep = function (key, occurrence) {3525 return this.getLookaheadFuncFor(key, occurrence, this.maxLookahead, lookahead_1.PROD_TYPE.REPETITION_MANDATORY_WITH_SEPARATOR);3526 };3527 // TODO: consider caching the error message computed information3528 Parser.prototype.raiseNoAltException = function (occurrence, errMsgTypes) {3529 var ruleName = this.getCurrRuleFullName();3530 var ruleGrammar = this.getGAstProductions().get(ruleName);3531 // TODO: getLookaheadPathsForOr can be slow for large enough maxLookahead and certain grammars, consider caching ?3532 var lookAheadPathsPerAlternative = lookahead_1.getLookaheadPathsForOr(occurrence, ruleGrammar, this.maxLookahead);3533 var actualTokens = [];3534 for (var i = 1; i < this.maxLookahead; i++) {3535 actualTokens.push(this.LA(i));3536 }3537 var errMsg = this.errorMessageProvider.buildNoViableAltMessage({3538 expectedPathsPerAlt: lookAheadPathsPerAlternative,3539 actual: actualTokens,3540 customUserDescription: errMsgTypes,3541 ruleName: this.getCurrRuleFullName()3542 });3543 throw this.SAVE_ERROR(new exceptions_public_1.exceptions.NoViableAltException(errMsg, this.LA(1)));3544 };3545 Parser.prototype.getLookaheadFuncFor = function (key, occurrence, maxLookahead, prodType) {3546 var laFunc = this.classLAFuncs.get(key);3547 if (laFunc === undefined) {3548 var ruleName = this.getCurrRuleFullName();3549 var ruleGrammar = this.getGAstProductions().get(ruleName);3550 laFunc = lookahead_1.buildLookaheadFuncForOptionalProd(occurrence, ruleGrammar, maxLookahead, this.dynamicTokensEnabled, prodType, this.lookAheadBuilderForOptional);3551 this.classLAFuncs.put(key, laFunc);3552 return laFunc;3553 }3554 else {3555 return laFunc;3556 }3557 };3558 // TODO: consider caching the error message computed information3559 Parser.prototype.raiseEarlyExitException = function (occurrence, prodType, userDefinedErrMsg) {3560 var ruleName = this.getCurrRuleFullName();3561 var ruleGrammar = this.getGAstProductions().get(ruleName);3562 var lookAheadPathsPerAlternative = lookahead_1.getLookaheadPathsForOptionalProd(occurrence, ruleGrammar, prodType, this.maxLookahead);3563 var insideProdPaths = lookAheadPathsPerAlternative[0];3564 var actualTokens = [];3565 for (var i = 1; i < this.maxLookahead; i++) {3566 actualTokens.push(this.LA(i));3567 }3568 var msg = this.errorMessageProvider.buildEarlyExitMessage({3569 expectedIterationPaths: insideProdPaths,3570 actual: actualTokens,3571 previous: this.LA(0),3572 customUserDescription: userDefinedErrMsg,3573 ruleName: ruleName3574 });3575 throw this.SAVE_ERROR(new exceptions_public_1.exceptions.EarlyExitException(msg, this.LA(1), this.LA(0)));3576 };3577 Parser.prototype.getLastExplicitRuleShortName = function () {3578 var lastExplictIndex = this.LAST_EXPLICIT_RULE_STACK[this.LAST_EXPLICIT_RULE_STACK.length - 1];3579 return this.RULE_STACK[lastExplictIndex];3580 };3581 Parser.prototype.getLastExplicitRuleShortNameNoCst = function () {3582 var ruleStack = this.RULE_STACK;3583 return ruleStack[ruleStack.length - 1];3584 };3585 Parser.prototype.getPreviousExplicitRuleShortName = function () {3586 var lastExplicitIndex = this.LAST_EXPLICIT_RULE_STACK[this.LAST_EXPLICIT_RULE_STACK.length - 2];3587 return this.RULE_STACK[lastExplicitIndex];3588 };3589 Parser.prototype.getPreviousExplicitRuleShortNameNoCst = function () {3590 var ruleStack = this.RULE_STACK;3591 return ruleStack[ruleStack.length - 2];3592 };3593 Parser.prototype.getPreviousExplicitRuleOccurenceIndex = function () {3594 var lastExplicitIndex = this.LAST_EXPLICIT_RULE_STACK[this.LAST_EXPLICIT_RULE_STACK.length - 2];3595 return this.RULE_OCCURRENCE_STACK[lastExplicitIndex];3596 };3597 Parser.prototype.getPreviousExplicitRuleOccurenceIndexNoCst = function () {3598 var occurrenceStack = this.RULE_OCCURRENCE_STACK;3599 return occurrenceStack[occurrenceStack.length - 2];3600 };3601 Parser.prototype.nestedRuleBeforeClause = function (methodOpts, laKey) {3602 var nestedName;3603 if (methodOpts.NAME !== undefined) {3604 nestedName = methodOpts.NAME;3605 this.nestedRuleInvocationStateUpdate(nestedName, laKey);3606 return nestedName;3607 }3608 else {3609 return undefined;3610 }3611 };3612 Parser.prototype.nestedAltBeforeClause = function (methodOpts, occurrence, methodKeyIdx, altIdx) {3613 var ruleIdx = this.getLastExplicitRuleShortName();3614 var shortName = keys_1.getKeyForAltIndex(ruleIdx, methodKeyIdx, occurrence, altIdx);3615 var nestedName;3616 if (methodOpts.NAME !== undefined) {3617 nestedName = methodOpts.NAME;3618 this.nestedRuleInvocationStateUpdate(nestedName, shortName);3619 return {3620 shortName: shortName,3621 nestedName: nestedName3622 };3623 }3624 else {3625 return undefined;3626 }3627 };3628 Parser.prototype.nestedRuleFinallyClause = function (laKey, nestedName) {3629 var cstStack = this.CST_STACK;3630 var nestedRuleCst = cstStack[cstStack.length - 1];3631 this.nestedRuleFinallyStateUpdate();3632 // this return a different result than the previous invocation because "nestedRuleFinallyStateUpdate" pops the cst stack3633 var parentCstNode = cstStack[cstStack.length - 1];3634 cst_1.addNoneTerminalToCst(parentCstNode, nestedName, nestedRuleCst);3635 };3636 Parser.prototype.cstPostTerminal = function (tokType, consumedToken) {3637 var currTokTypeName = tokType.tokenName;3638 var rootCst = this.CST_STACK[this.CST_STACK.length - 1];3639 cst_1.addTerminalToCst(rootCst, consumedToken, currTokTypeName);3640 };3641 Parser.prototype.cstPostNonTerminal = function (ruleCstResult, ruleName) {3642 cst_1.addNoneTerminalToCst(this.CST_STACK[this.CST_STACK.length - 1], ruleName, ruleCstResult);3643 };3644 Parser.prototype.cstPostNonTerminalRecovery = function (ruleCstResult, ruleName) {3645 // TODO: assumes not first rule, is this assumption always correct?3646 cst_1.addNoneTerminalToCst(this.CST_STACK[this.CST_STACK.length - 2], ruleName, ruleCstResult);3647 };3648 Object.defineProperty(Parser.prototype, "input", {3649 get: function () {3650 return this.tokVector;3651 },3652 // lexer related methods3653 set: function (newInput) {3654 this.reset();3655 this.tokVector = newInput;3656 this.tokVectorLength = newInput.length;3657 },3658 enumerable: true,3659 configurable: true3660 });3661 // skips a token and returns the next token3662 Parser.prototype.SKIP_TOKEN = function () {3663 if (this.currIdx <= this.tokVector.length - 2) {3664 this.consumeToken();3665 return this.LA(1);3666 }3667 else {3668 return exports.END_OF_FILE;3669 }3670 };3671 // Lexer (accessing Token vector) related methods which can be overridden to implement lazy lexers3672 // or lexers dependent on parser context.3673 Parser.prototype.LA = function (howMuch) {3674 // TODO: is this optimization (saving tokVectorLength benefits?)3675 if (this.currIdx + howMuch < 0 ||3676 this.tokVectorLength <= this.currIdx + howMuch) {3677 return exports.END_OF_FILE;3678 }3679 else {3680 return this.tokVector[this.currIdx + howMuch];3681 }3682 };3683 Parser.prototype.consumeToken = function () {3684 this.currIdx++;3685 };3686 Parser.prototype.exportLexerState = function () {3687 return this.currIdx;3688 };3689 Parser.prototype.importLexerState = function (newState) {3690 this.currIdx = newState;3691 };3692 Parser.prototype.resetLexerState = function () {3693 this.currIdx = -1;3694 };3695 Parser.prototype.moveToTerminatedState = function () {3696 this.currIdx = this.tokVector.length - 1;3697 };3698 Parser.prototype.lookAheadBuilderForOptional = function (alt, tokenMatcher, dynamicTokensEnabled) {3699 return lookahead_1.buildSingleAlternativeLookaheadFunction(alt, tokenMatcher, dynamicTokensEnabled);3700 };3701 Parser.prototype.lookAheadBuilderForAlternatives = function (alts, hasPredicates, tokenMatcher, dynamicTokensEnabled) {3702 return lookahead_1.buildAlternativesLookAheadFunc(alts, hasPredicates, tokenMatcher, dynamicTokensEnabled);3703 };3704 Parser.NO_RESYNC = false;3705 // Set this flag to true if you don't want the Parser to throw error when problems in it's definition are detected.3706 // (normally during the parser's constructor).3707 // This is a design time flag, it will not affect the runtime error handling of the parser, just design time errors,3708 // for example: duplicate rule names, referencing an unresolved subrule, ect...3709 // This flag should not be enabled during normal usage, it is used in special situations, for example when3710 // needing to display the parser definition errors in some GUI(online playground).3711 Parser.DEFER_DEFINITION_ERRORS_HANDLING = false;3712 return Parser;3713}());3714exports.Parser = Parser;3715function InRuleRecoveryException(message) {3716 this.name = IN_RULE_RECOVERY_EXCEPTION;3717 this.message = message;3718}3719InRuleRecoveryException.prototype = Error.prototype;3720//# sourceMappingURL=parser_public.js.map3721/***/ }),3722/* 7 */3723/***/ (function(module, exports, __webpack_require__) {3724"use strict";3725/**3726 * module used to cache static information about parsers,3727 */3728Object.defineProperty(exports, "__esModule", { value: true });3729var lang_extensions_1 = __webpack_require__(3);3730var utils_1 = __webpack_require__(0);3731exports.CLASS_TO_DEFINITION_ERRORS = new lang_extensions_1.HashTable();3732exports.CLASS_TO_SELF_ANALYSIS_DONE = new lang_extensions_1.HashTable();3733exports.CLASS_TO_GRAMMAR_PRODUCTIONS = new lang_extensions_1.HashTable();3734function getProductionsForClass(className) {3735 return getFromNestedHashTable(className, exports.CLASS_TO_GRAMMAR_PRODUCTIONS);3736}3737exports.getProductionsForClass = getProductionsForClass;3738exports.CLASS_TO_RESYNC_FOLLOW_SETS = new lang_extensions_1.HashTable();3739function getResyncFollowsForClass(className) {3740 return getFromNestedHashTable(className, exports.CLASS_TO_RESYNC_FOLLOW_SETS);3741}3742exports.getResyncFollowsForClass = getResyncFollowsForClass;3743function setResyncFollowsForClass(className, followSet) {3744 exports.CLASS_TO_RESYNC_FOLLOW_SETS.put(className, followSet);3745}3746exports.setResyncFollowsForClass = setResyncFollowsForClass;3747exports.CLASS_TO_LOOKAHEAD_FUNCS = new lang_extensions_1.HashTable();3748function getLookaheadFuncsForClass(className) {3749 return getFromNestedHashTable(className, exports.CLASS_TO_LOOKAHEAD_FUNCS);3750}3751exports.getLookaheadFuncsForClass = getLookaheadFuncsForClass;3752exports.CLASS_TO_FIRST_AFTER_REPETITION = new lang_extensions_1.HashTable();3753function getFirstAfterRepForClass(className) {3754 return getFromNestedHashTable(className, exports.CLASS_TO_FIRST_AFTER_REPETITION);3755}3756exports.getFirstAfterRepForClass = getFirstAfterRepForClass;3757exports.CLASS_TO_PRODUCTION_OVERRIDEN = new lang_extensions_1.HashTable();3758function getProductionOverriddenForClass(className) {3759 return getFromNestedHashTable(className, exports.CLASS_TO_PRODUCTION_OVERRIDEN);3760}3761exports.getProductionOverriddenForClass = getProductionOverriddenForClass;3762exports.CLASS_TO_CST_DICT_DEF_PER_RULE = new lang_extensions_1.HashTable();3763function getCstDictDefPerRuleForClass(className) {3764 return getFromNestedHashTable(className, exports.CLASS_TO_CST_DICT_DEF_PER_RULE);3765}3766exports.getCstDictDefPerRuleForClass = getCstDictDefPerRuleForClass;3767exports.CLASS_TO_BASE_CST_VISITOR = new lang_extensions_1.HashTable();3768exports.CLASS_TO_BASE_CST_VISITOR_WITH_DEFAULTS = new lang_extensions_1.HashTable();3769exports.CLASS_TO_ALL_RULE_NAMES = new lang_extensions_1.HashTable();3770// TODO reflective test to verify this has not changed, for example (OPTION6 added)3771exports.MAX_OCCURRENCE_INDEX = 5;3772function getFromNestedHashTable(className, hashTable) {3773 var result = hashTable.get(className);3774 if (result === undefined) {3775 hashTable.put(className, new lang_extensions_1.HashTable());3776 result = hashTable.get(className);3777 }3778 return result;3779}3780function clearCache() {3781 var hasTables = utils_1.filter(utils_1.values(module.exports), function (currHashTable) { return currHashTable instanceof lang_extensions_1.HashTable; });3782 utils_1.forEach(hasTables, function (currHashTable) { return currHashTable.clear(); });3783}3784exports.clearCache = clearCache;3785//# sourceMappingURL=cache.js.map3786/***/ }),3787/* 8 */3788/***/ (function(module, exports, __webpack_require__) {3789"use strict";3790Object.defineProperty(exports, "__esModule", { value: true });3791var lexer_1 = __webpack_require__(22);3792var utils_1 = __webpack_require__(0);3793var tokens_1 = __webpack_require__(4);3794var LexerDefinitionErrorType;3795(function (LexerDefinitionErrorType) {3796 LexerDefinitionErrorType[LexerDefinitionErrorType["MISSING_PATTERN"] = 0] = "MISSING_PATTERN";3797 LexerDefinitionErrorType[LexerDefinitionErrorType["INVALID_PATTERN"] = 1] = "INVALID_PATTERN";3798 LexerDefinitionErrorType[LexerDefinitionErrorType["EOI_ANCHOR_FOUND"] = 2] = "EOI_ANCHOR_FOUND";3799 LexerDefinitionErrorType[LexerDefinitionErrorType["UNSUPPORTED_FLAGS_FOUND"] = 3] = "UNSUPPORTED_FLAGS_FOUND";3800 LexerDefinitionErrorType[LexerDefinitionErrorType["DUPLICATE_PATTERNS_FOUND"] = 4] = "DUPLICATE_PATTERNS_FOUND";3801 LexerDefinitionErrorType[LexerDefinitionErrorType["INVALID_GROUP_TYPE_FOUND"] = 5] = "INVALID_GROUP_TYPE_FOUND";3802 LexerDefinitionErrorType[LexerDefinitionErrorType["PUSH_MODE_DOES_NOT_EXIST"] = 6] = "PUSH_MODE_DOES_NOT_EXIST";3803 LexerDefinitionErrorType[LexerDefinitionErrorType["MULTI_MODE_LEXER_WITHOUT_DEFAULT_MODE"] = 7] = "MULTI_MODE_LEXER_WITHOUT_DEFAULT_MODE";3804 LexerDefinitionErrorType[LexerDefinitionErrorType["MULTI_MODE_LEXER_WITHOUT_MODES_PROPERTY"] = 8] = "MULTI_MODE_LEXER_WITHOUT_MODES_PROPERTY";3805 LexerDefinitionErrorType[LexerDefinitionErrorType["MULTI_MODE_LEXER_DEFAULT_MODE_VALUE_DOES_NOT_EXIST"] = 9] = "MULTI_MODE_LEXER_DEFAULT_MODE_VALUE_DOES_NOT_EXIST";3806 LexerDefinitionErrorType[LexerDefinitionErrorType["LEXER_DEFINITION_CANNOT_CONTAIN_UNDEFINED"] = 10] = "LEXER_DEFINITION_CANNOT_CONTAIN_UNDEFINED";3807 LexerDefinitionErrorType[LexerDefinitionErrorType["SOI_ANCHOR_FOUND"] = 11] = "SOI_ANCHOR_FOUND";3808 LexerDefinitionErrorType[LexerDefinitionErrorType["EMPTY_MATCH_PATTERN"] = 12] = "EMPTY_MATCH_PATTERN";3809 LexerDefinitionErrorType[LexerDefinitionErrorType["NO_LINE_BREAKS_FLAGS"] = 13] = "NO_LINE_BREAKS_FLAGS";3810 LexerDefinitionErrorType[LexerDefinitionErrorType["UNREACHABLE_PATTERN"] = 14] = "UNREACHABLE_PATTERN";3811})(LexerDefinitionErrorType = exports.LexerDefinitionErrorType || (exports.LexerDefinitionErrorType = {}));3812var DEFAULT_LEXER_CONFIG = {3813 deferDefinitionErrorsHandling: false,3814 positionTracking: "full",3815 lineTerminatorsPattern: /\n|\r\n?/g3816};3817Object.freeze(DEFAULT_LEXER_CONFIG);3818var Lexer = /** @class */ (function () {3819 /**3820 * @param {SingleModeLexerDefinition | IMultiModeLexerDefinition} lexerDefinition -3821 * Structure composed of constructor functions for the Tokens types this lexer will support.3822 *3823 * In the case of {SingleModeLexerDefinition} the structure is simply an array of TokenTypes.3824 * In the case of {IMultiModeLexerDefinition} the structure is an object with two properties:3825 * 1. a "modes" property where each value is an array of TokenTypes.3826 * 2. a "defaultMode" property specifying the initial lexer mode.3827 *3828 * for example:3829 * {3830 * "modes" : {3831 * "modeX" : [Token1, Token2]3832 * "modeY" : [Token3, Token4]3833 * }3834 *3835 * "defaultMode" : "modeY"3836 * }3837 *3838 * A lexer with {MultiModesDefinition} is simply multiple Lexers where only one (mode) can be active at the same time.3839 * This is useful for lexing languages where there are different lexing rules depending on context.3840 *3841 * The current lexing mode is selected via a "mode stack".3842 * The last (peek) value in the stack will be the current mode of the lexer.3843 *3844 * Each Token Type can define that it will cause the Lexer to (after consuming an "instance" of the Token):3845 * 1. PUSH_MODE : push a new mode to the "mode stack"3846 * 2. POP_MODE : pop the last mode from the "mode stack"3847 *3848 * Examples:3849 * export class Attribute {3850 * static PATTERN = ...3851 * static PUSH_MODE = "modeY"3852 * }3853 *3854 * export class EndAttribute {3855 * static PATTERN = ...3856 * static POP_MODE = true3857 * }3858 *3859 * The TokenTypes must be in one of these forms:3860 *3861 * 1. With a PATTERN property that has a RegExp value for tokens to match:3862 * example: -->class Integer { static PATTERN = /[1-9]\d }<--3863 *3864 * 2. With a PATTERN property that has the value of the var Lexer.NA defined above.3865 * This is a convenience form used to avoid matching Token classes that only act as categories.3866 * example: -->class Keyword { static PATTERN = NA }<--3867 *3868 *3869 * The following RegExp patterns are not supported:3870 * a. '$' for match at end of input3871 * b. /b global flag3872 * c. /m multi-line flag3873 *3874 * The Lexer will identify the first pattern that matches, Therefor the order of Token Constructors may be significant.3875 * For example when one pattern may match a prefix of another pattern.3876 *3877 * Note that there are situations in which we may wish to order the longer pattern after the shorter one.3878 * For example: keywords vs Identifiers.3879 * 'do'(/do/) and 'donald'(/w+)3880 *3881 * * If the Identifier pattern appears before the 'do' pattern, both 'do' and 'donald'3882 * will be lexed as an Identifier.3883 *3884 * * If the 'do' pattern appears before the Identifier pattern 'do' will be lexed correctly as a keyword.3885 * however 'donald' will be lexed as TWO separate tokens: keyword 'do' and identifier 'nald'.3886 *3887 * To resolve this problem, add a static property on the keyword's constructor named: LONGER_ALT3888 * example:3889 *3890 * export class Identifier extends Keyword { static PATTERN = /[_a-zA-Z][_a-zA-Z0-9]/ }3891 * export class Keyword Token {3892 * static PATTERN = Lexer.NA3893 * static LONGER_ALT = Identifier3894 * }3895 * export class Do extends Keyword { static PATTERN = /do/ }3896 * export class While extends Keyword { static PATTERN = /while/ }3897 * export class Return extends Keyword { static PATTERN = /return/ }3898 *3899 * The lexer will then also attempt to match a (longer) Identifier each time a keyword is matched.3900 *3901 *3902 * @param {ILexerConfig} [config=DEFAULT_LEXER_CONFIG] -3903 * The Lexer's configuration @see {ILexerConfig} for details.3904 */3905 function Lexer(lexerDefinition, config) {3906 if (config === void 0) { config = DEFAULT_LEXER_CONFIG; }3907 var _this = this;3908 this.lexerDefinition = lexerDefinition;3909 this.lexerDefinitionErrors = [];3910 this.patternIdxToConfig = {};3911 this.modes = [];3912 this.emptyGroups = {};3913 this.config = undefined;3914 this.trackStartLines = true;3915 this.trackEndLines = true;3916 this.hasCustom = false;3917 if (typeof config === "boolean") {3918 throw Error("The second argument to the Lexer constructor is now an ILexerConfig Object.\n" +3919 "a boolean 2nd argument is no longer supported");3920 }3921 // todo: defaults func?3922 this.config = utils_1.merge(DEFAULT_LEXER_CONFIG, config);3923 if (this.config.lineTerminatorsPattern ===3924 DEFAULT_LEXER_CONFIG.lineTerminatorsPattern) {3925 // optimized built-in implementation for the defaults definition of lineTerminators3926 this.config.lineTerminatorsPattern = lexer_1.LineTerminatorOptimizedTester;3927 }3928 this.trackStartLines = /full|onlyStart/i.test(this.config.positionTracking);3929 this.trackEndLines = /full/i.test(this.config.positionTracking);3930 var hasOnlySingleMode = true;3931 var actualDefinition;3932 // Convert SingleModeLexerDefinition into a IMultiModeLexerDefinition.3933 if (utils_1.isArray(lexerDefinition)) {3934 actualDefinition = { modes: {} };3935 actualDefinition.modes[lexer_1.DEFAULT_MODE] = utils_1.cloneArr(lexerDefinition);3936 actualDefinition[lexer_1.DEFAULT_MODE] = lexer_1.DEFAULT_MODE;3937 }3938 else {3939 // no conversion needed, input should already be a IMultiModeLexerDefinition3940 hasOnlySingleMode = false;3941 actualDefinition = utils_1.cloneObj(lexerDefinition);3942 }3943 this.lexerDefinitionErrors = this.lexerDefinitionErrors.concat(lexer_1.performRuntimeChecks(actualDefinition, this.trackStartLines));3944 // for extra robustness to avoid throwing an none informative error message3945 actualDefinition.modes = actualDefinition.modes3946 ? actualDefinition.modes3947 : {};3948 // an error of undefined TokenTypes will be detected in "performRuntimeChecks" above.3949 // this transformation is to increase robustness in the case of partially invalid lexer definition.3950 utils_1.forEach(actualDefinition.modes, function (currModeValue, currModeName) {3951 actualDefinition.modes[currModeName] = utils_1.reject(currModeValue, function (currTokType) { return utils_1.isUndefined(currTokType); });3952 });3953 var allModeNames = utils_1.keys(actualDefinition.modes);3954 utils_1.forEach(actualDefinition.modes, function (currModDef, currModName) {3955 _this.modes.push(currModName);3956 _this.lexerDefinitionErrors = _this.lexerDefinitionErrors.concat(lexer_1.validatePatterns(currModDef, allModeNames));3957 // If definition errors were encountered, the analysis phase may fail unexpectedly/3958 // Considering a lexer with definition errors may never be used, there is no point3959 // to performing the analysis anyhow...3960 if (utils_1.isEmpty(_this.lexerDefinitionErrors)) {3961 tokens_1.augmentTokenTypes(currModDef);3962 var currAnalyzeResult = lexer_1.analyzeTokenTypes(currModDef);3963 _this.patternIdxToConfig[currModName] =3964 currAnalyzeResult.patternIdxToConfig;3965 _this.emptyGroups = utils_1.merge(_this.emptyGroups, currAnalyzeResult.emptyGroups);3966 _this.hasCustom =3967 currAnalyzeResult.hasCustom || _this.hasCustom;3968 }3969 });3970 this.defaultMode = actualDefinition.defaultMode;3971 if (!utils_1.isEmpty(this.lexerDefinitionErrors) &&3972 !this.config.deferDefinitionErrorsHandling) {3973 var allErrMessages = utils_1.map(this.lexerDefinitionErrors, function (error) {3974 return error.message;3975 });3976 var allErrMessagesString = allErrMessages.join("-----------------------\n");3977 throw new Error("Errors detected in definition of Lexer:\n" +3978 allErrMessagesString);3979 }3980 // Choose the relevant internal implementations for this specific parser.3981 // These implementations should be in-lined by the JavaScript engine3982 // to provide optimal performance in each scenario.3983 if (lexer_1.SUPPORT_STICKY) {3984 this.chopInput = utils_1.IDENTITY;3985 this.match = this.matchWithTest;3986 }3987 else {3988 this.updateLastIndex = utils_1.NOOP;3989 this.match = this.matchWithExec;3990 }3991 if (hasOnlySingleMode) {3992 this.handleModes = utils_1.NOOP;3993 }3994 if (this.trackStartLines === false) {3995 this.computeNewColumn = utils_1.IDENTITY;3996 }3997 if (this.trackEndLines === false) {3998 this.updateTokenEndLineColumnLocation = utils_1.NOOP;3999 }4000 if (/full/i.test(this.config.positionTracking)) {4001 this.createTokenInstance = this.createFullToken;4002 }4003 else if (/onlyStart/i.test(this.config.positionTracking)) {4004 this.createTokenInstance = this.createStartOnlyToken;4005 }4006 else if (/onlyOffset/i.test(this.config.positionTracking)) {4007 this.createTokenInstance = this.createOffsetOnlyToken;4008 }4009 else {4010 throw Error("Invalid <positionTracking> config option: \"" + this.config.positionTracking + "\"");4011 }4012 if (this.hasCustom) {4013 this.addToken = this.addTokenUsingPush;4014 }4015 else {4016 this.addToken = this.addTokenUsingMemberAccess;4017 }4018 }4019 /**4020 * Will lex(Tokenize) a string.4021 * Note that this can be called repeatedly on different strings as this method4022 * does not modify the state of the Lexer.4023 *4024 * @param {string} text - The string to lex4025 * @param {string} [initialMode] - The initial Lexer Mode to start with, by default this will be the first mode in the lexer's4026 * definition. If the lexer has no explicit modes it will be the implicit single 'default_mode' mode.4027 *4028 * @returns {ILexingResult}4029 */4030 Lexer.prototype.tokenize = function (text, initialMode) {4031 if (initialMode === void 0) { initialMode = this.defaultMode; }4032 if (!utils_1.isEmpty(this.lexerDefinitionErrors)) {4033 var allErrMessages = utils_1.map(this.lexerDefinitionErrors, function (error) {4034 return error.message;4035 });4036 var allErrMessagesString = allErrMessages.join("-----------------------\n");4037 throw new Error("Unable to Tokenize because Errors detected in definition of Lexer:\n" +4038 allErrMessagesString);4039 }4040 var lexResult = this.tokenizeInternal(text, initialMode);4041 return lexResult;4042 };4043 // There is quite a bit of duplication between this and "tokenizeInternalLazy"4044 // This is intentional due to performance considerations.4045 Lexer.prototype.tokenizeInternal = function (text, initialMode) {4046 var _this = this;4047 var i, j, matchAltImage, longerAltIdx, matchedImage, imageLength, group, tokType, newToken, errLength, droppedChar, msg, match;4048 var orgText = text;4049 var orgLength = orgText.length;4050 var offset = 0;4051 var matchedTokensIndex = 0;4052 // initializing the tokensArray to the "guessed" size.4053 // guessing too little will still reduce the number of array re-sizes on pushes.4054 // guessing too large (Tested by guessing x4 too large) may cost a bit more of memory4055 // but would still have a faster runtime by avoiding (All but one) array resizing.4056 var guessedNumberOfTokens = this.hasCustom4057 ? 0 // will break custom token pattern APIs the matchedTokens array will contain undefined elements.4058 : Math.floor(text.length / 10);4059 var matchedTokens = new Array(guessedNumberOfTokens);4060 var errors = [];4061 var line = this.trackStartLines ? 1 : undefined;4062 var column = this.trackStartLines ? 1 : undefined;4063 var groups = lexer_1.cloneEmptyGroups(this.emptyGroups);4064 var trackLines = this.trackStartLines;4065 var lineTerminatorPattern = this.config.lineTerminatorsPattern;4066 var currModePatternsLength = 0;4067 var patternIdxToConfig = [];4068 var modeStack = [];4069 var pop_mode = function (popToken) {4070 // TODO: perhaps avoid this error in the edge case there is no more input?4071 if (modeStack.length === 1 &&4072 // if we have both a POP_MODE and a PUSH_MODE this is in-fact a "transition"4073 // So no error should occur.4074 popToken.tokenType.PUSH_MODE === undefined) {4075 // if we try to pop the last mode there lexer will no longer have ANY mode.4076 // thus the pop is ignored, an error will be created and the lexer will continue parsing in the previous mode.4077 var msg_1 = "Unable to pop Lexer Mode after encountering Token ->" + popToken.image + "<- The Mode Stack is empty";4078 errors.push({4079 offset: popToken.startOffset,4080 line: popToken.startLine !== undefined4081 ? popToken.startLine4082 : undefined,4083 column: popToken.startColumn !== undefined4084 ? popToken.startColumn4085 : undefined,4086 length: popToken.image.length,4087 message: msg_14088 });4089 }4090 else {4091 modeStack.pop();4092 var newMode = utils_1.last(modeStack);4093 patternIdxToConfig = _this.patternIdxToConfig[newMode];4094 currModePatternsLength = patternIdxToConfig.length;4095 }4096 };4097 function push_mode(newMode) {4098 modeStack.push(newMode);4099 patternIdxToConfig = this.patternIdxToConfig[newMode];4100 currModePatternsLength = patternIdxToConfig.length;4101 }4102 // this pattern seems to avoid a V8 de-optimization, although that de-optimization does not4103 // seem to matter performance wise.4104 push_mode.call(this, initialMode);4105 var currConfig;4106 while (offset < orgLength) {4107 matchedImage = null;4108 for (i = 0; i < currModePatternsLength; i++) {4109 currConfig = patternIdxToConfig[i];4110 var currPattern = currConfig.pattern;4111 // manually in-lined because > 600 chars won't be in-lined in V84112 var singleCharCode = currConfig.short;4113 if (singleCharCode !== false) {4114 if (orgText.charCodeAt(offset) === singleCharCode) {4115 // single character string4116 matchedImage = currPattern;4117 }4118 }4119 else if (currConfig.isCustom === true) {4120 match = currPattern.exec(orgText, offset, matchedTokens, groups);4121 matchedImage = match !== null ? match[0] : match;4122 }4123 else {4124 this.updateLastIndex(currPattern, offset);4125 matchedImage = this.match(currPattern, text, offset);4126 }4127 if (matchedImage !== null) {4128 // even though this pattern matched we must try a another longer alternative.4129 // this can be used to prioritize keywords over identifiers4130 longerAltIdx = currConfig.longerAlt;4131 if (longerAltIdx !== undefined) {4132 // TODO: micro optimize, avoid extra prop access4133 // by saving/linking longerAlt on the original config?4134 var longerAltConfig = patternIdxToConfig[longerAltIdx];4135 var longerAltPattern = longerAltConfig.pattern;4136 // single Char can never be a longer alt so no need to test it.4137 // manually in-lined because > 600 chars won't be in-lined in V84138 if (longerAltConfig.isCustom === true) {4139 match = longerAltPattern.exec(orgText, offset, matchedTokens, groups);4140 matchAltImage = match !== null ? match[0] : match;4141 }4142 else {4143 this.updateLastIndex(longerAltPattern, offset);4144 matchAltImage = this.match(longerAltPattern, text, offset);4145 }4146 if (matchAltImage &&4147 matchAltImage.length > matchedImage.length) {4148 matchedImage = matchAltImage;4149 currConfig = longerAltConfig;4150 }4151 }4152 break;4153 }4154 }4155 // successful match4156 if (matchedImage !== null) {4157 // matchedImage = match[0]4158 imageLength = matchedImage.length;4159 group = currConfig.group;4160 if (group !== undefined) {4161 tokType = currConfig.tokenTypeIdx;4162 // TODO: "offset + imageLength" and the new column may be computed twice in case of "full" location information inside4163 // createFullToken method4164 newToken = this.createTokenInstance(matchedImage, offset, tokType, currConfig.tokenType, line, column, imageLength);4165 if (group === false) {4166 matchedTokensIndex = this.addToken(matchedTokens, matchedTokensIndex, newToken);4167 }4168 else {4169 groups[group].push(newToken);4170 }4171 }4172 text = this.chopInput(text, imageLength);4173 offset = offset + imageLength;4174 // TODO: with newlines the column may be assigned twice4175 column = this.computeNewColumn(column, imageLength);4176 if (trackLines === true &&4177 currConfig.canLineTerminator === true) {4178 var numOfLTsInMatch = 0;4179 var foundTerminator = void 0;4180 var lastLTEndOffset = void 0;4181 lineTerminatorPattern.lastIndex = 0;4182 do {4183 foundTerminator = lineTerminatorPattern.test(matchedImage);4184 if (foundTerminator === true) {4185 lastLTEndOffset =4186 lineTerminatorPattern.lastIndex - 1;4187 numOfLTsInMatch++;4188 }4189 } while (foundTerminator);4190 if (numOfLTsInMatch !== 0) {4191 line = line + numOfLTsInMatch;4192 column = imageLength - lastLTEndOffset;4193 this.updateTokenEndLineColumnLocation(newToken, group, lastLTEndOffset, numOfLTsInMatch, line, column, imageLength);4194 }4195 }4196 // will be NOOP if no modes present4197 this.handleModes(i, currConfig, pop_mode, push_mode, newToken);4198 }4199 else {4200 // error recovery, drop characters until we identify a valid token's start point4201 var errorStartOffset = offset;4202 var errorLine = line;4203 var errorColumn = column;4204 var foundResyncPoint = false;4205 while (!foundResyncPoint && offset < orgLength) {4206 // drop chars until we succeed in matching something4207 droppedChar = orgText.charCodeAt(offset);4208 // Identity Func (when sticky flag is enabled)4209 text = this.chopInput(text, 1);4210 offset++;4211 for (j = 0; j < currModePatternsLength; j++) {4212 var currConfig_1 = patternIdxToConfig[j];4213 var currPattern = currConfig_1.pattern;4214 // manually in-lined because > 600 chars won't be in-lined in V84215 var singleCharCode = currConfig_1.short;4216 if (singleCharCode !== false) {4217 if (orgText.charCodeAt(offset) === singleCharCode) {4218 // single character string4219 foundResyncPoint = true;4220 }4221 }4222 else if (currConfig_1.isCustom === true) {4223 foundResyncPoint =4224 currPattern.exec(orgText, offset, matchedTokens, groups) !== null;4225 }4226 else {4227 this.updateLastIndex(currPattern, offset);4228 foundResyncPoint = currPattern.exec(text) !== null;4229 }4230 if (foundResyncPoint === true) {4231 break;4232 }4233 }4234 }4235 errLength = offset - errorStartOffset;4236 // at this point we either re-synced or reached the end of the input text4237 msg =4238 "unexpected character: ->" + orgText.charAt(errorStartOffset) + "<- at offset: " + errorStartOffset + "," +4239 (" skipped " + (offset - errorStartOffset) + " characters.");4240 errors.push({4241 offset: errorStartOffset,4242 line: errorLine,4243 column: errorColumn,4244 length: errLength,4245 message: msg4246 });4247 }4248 }4249 // if we do have custom patterns which push directly into the4250 if (!this.hasCustom) {4251 // if we guessed a too large size for the tokens array this will shrink it to the right size.4252 matchedTokens.length = matchedTokensIndex;4253 }4254 return {4255 tokens: matchedTokens,4256 groups: groups,4257 errors: errors4258 };4259 };4260 Lexer.prototype.handleModes = function (i, config, pop_mode, push_mode, newToken) {4261 if (config.pop === true) {4262 // need to save the PUSH_MODE property as if the mode is popped4263 // patternIdxToPopMode is updated to reflect the new mode after popping the stack4264 var pushMode = config.push;4265 pop_mode(newToken);4266 if (pushMode !== undefined) {4267 push_mode.call(this, pushMode);4268 }4269 }4270 else if (config.push !== undefined) {4271 push_mode.call(this, config.push);4272 }4273 };4274 Lexer.prototype.chopInput = function (text, length) {4275 return text.substring(length);4276 };4277 Lexer.prototype.updateLastIndex = function (regExp, newLastIndex) {4278 regExp.lastIndex = newLastIndex;4279 };4280 // TODO: decrease this under 600 characters? inspect stripping comments option in TSC compiler4281 Lexer.prototype.updateTokenEndLineColumnLocation = function (newToken, group, lastLTIdx, numOfLTsInMatch, line, column, imageLength) {4282 var lastCharIsLT, fixForEndingInLT;4283 if (group !== undefined) {4284 // a none skipped multi line Token, need to update endLine/endColumn4285 lastCharIsLT = lastLTIdx === imageLength - 1;4286 fixForEndingInLT = lastCharIsLT ? -1 : 0;4287 if (!(numOfLTsInMatch === 1 && lastCharIsLT === true)) {4288 // if a token ends in a LT that last LT only affects the line numbering of following Tokens4289 newToken.endLine = line + fixForEndingInLT;4290 // the last LT in a token does not affect the endColumn either as the [columnStart ... columnEnd)4291 // inclusive to exclusive range.4292 newToken.endColumn = column - 1 + -fixForEndingInLT;4293 }4294 // else single LT in the last character of a token, no need to modify the endLine/EndColumn4295 }4296 };4297 Lexer.prototype.computeNewColumn = function (oldColumn, imageLength) {4298 return oldColumn + imageLength;4299 };4300 // Place holder, will be replaced by the correct variant according to the locationTracking option at runtime.4301 /* istanbul ignore next - place holder */4302 Lexer.prototype.createTokenInstance = function () {4303 var args = [];4304 for (var _i = 0; _i < arguments.length; _i++) {4305 args[_i] = arguments[_i];4306 }4307 return null;4308 };4309 Lexer.prototype.createOffsetOnlyToken = function (image, startOffset, tokenTypeIdx, tokenType) {4310 return {4311 image: image,4312 startOffset: startOffset,4313 tokenTypeIdx: tokenTypeIdx,4314 tokenType: tokenType4315 };4316 };4317 Lexer.prototype.createStartOnlyToken = function (image, startOffset, tokenTypeIdx, tokenType, startLine, startColumn) {4318 return {4319 image: image,4320 startOffset: startOffset,4321 startLine: startLine,4322 startColumn: startColumn,4323 tokenTypeIdx: tokenTypeIdx,4324 tokenType: tokenType4325 };4326 };4327 Lexer.prototype.createFullToken = function (image, startOffset, tokenTypeIdx, tokenType, startLine, startColumn, imageLength) {4328 return {4329 image: image,4330 startOffset: startOffset,4331 endOffset: startOffset + imageLength - 1,4332 startLine: startLine,4333 endLine: startLine,4334 startColumn: startColumn,4335 endColumn: startColumn + imageLength - 1,4336 tokenTypeIdx: tokenTypeIdx,4337 tokenType: tokenType4338 };4339 };4340 // Place holder, will be replaced by the correct variant according to the locationTracking option at runtime.4341 /* istanbul ignore next - place holder */4342 Lexer.prototype.addToken = function (tokenVector, index, tokenToAdd) {4343 return 666;4344 };4345 Lexer.prototype.addTokenUsingPush = function (tokenVector, index, tokenToAdd) {4346 tokenVector.push(tokenToAdd);4347 return index;4348 };4349 Lexer.prototype.addTokenUsingMemberAccess = function (tokenVector, index, tokenToAdd) {4350 tokenVector[index] = tokenToAdd;4351 index++;4352 return index;4353 };4354 /* istanbul ignore next - place holder to be replaced with chosen alternative at runtime */4355 Lexer.prototype.match = function (pattern, text, offset) {4356 return null;4357 };4358 Lexer.prototype.matchWithTest = function (pattern, text, offset) {4359 var found = pattern.test(text);4360 if (found === true) {4361 return text.substring(offset, pattern.lastIndex);4362 }4363 return null;4364 };4365 Lexer.prototype.matchWithExec = function (pattern, text) {4366 var regExpArray = pattern.exec(text);4367 return regExpArray !== null ? regExpArray[0] : regExpArray;4368 };4369 Lexer.SKIPPED = "This marks a skipped Token pattern, this means each token identified by it will" +4370 "be consumed and then thrown into oblivion, this can be used to for example to completely ignore whitespace.";4371 Lexer.NA = /NOT_APPLICABLE/;4372 return Lexer;4373}());4374exports.Lexer = Lexer;4375//# sourceMappingURL=lexer_public.js.map4376/***/ }),4377/* 9 */4378/***/ (function(module, exports, __webpack_require__) {4379"use strict";4380var __extends = (this && this.__extends) || (function () {4381 var extendStatics = Object.setPrototypeOf ||4382 ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||4383 function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };4384 return function (d, b) {4385 extendStatics(d, b);4386 function __() { this.constructor = d; }4387 d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());4388 };4389})();4390Object.defineProperty(exports, "__esModule", { value: true });4391var gast_public_1 = __webpack_require__(1);4392var utils_1 = __webpack_require__(0);4393function isSequenceProd(prod) {4394 return (prod instanceof gast_public_1.gast.Flat ||4395 prod instanceof gast_public_1.gast.Option ||4396 prod instanceof gast_public_1.gast.Repetition ||4397 prod instanceof gast_public_1.gast.RepetitionMandatory ||4398 prod instanceof gast_public_1.gast.RepetitionMandatoryWithSeparator ||4399 prod instanceof gast_public_1.gast.RepetitionWithSeparator ||4400 prod instanceof gast_public_1.gast.Terminal ||4401 prod instanceof gast_public_1.gast.Rule);4402}4403exports.isSequenceProd = isSequenceProd;4404function isOptionalProd(prod, alreadyVisited) {4405 if (alreadyVisited === void 0) { alreadyVisited = []; }4406 var isDirectlyOptional = prod instanceof gast_public_1.gast.Option ||4407 prod instanceof gast_public_1.gast.Repetition ||4408 prod instanceof gast_public_1.gast.RepetitionWithSeparator;4409 if (isDirectlyOptional) {4410 return true;4411 }4412 // note that this can cause infinite loop if one optional empty TOP production has a cyclic dependency with another4413 // empty optional top rule4414 // may be indirectly optional ((A?B?C?) | (D?E?F?))4415 if (prod instanceof gast_public_1.gast.Alternation) {4416 // for OR its enough for just one of the alternatives to be optional4417 return utils_1.some(prod.definition, function (subProd) {4418 return isOptionalProd(subProd, alreadyVisited);4419 });4420 }4421 else if (prod instanceof gast_public_1.gast.NonTerminal &&4422 utils_1.contains(alreadyVisited, prod)) {4423 // avoiding stack overflow due to infinite recursion4424 return false;4425 }4426 else if (prod instanceof gast_public_1.gast.AbstractProduction) {4427 if (prod instanceof gast_public_1.gast.NonTerminal) {4428 alreadyVisited.push(prod);4429 }4430 return utils_1.every(prod.definition, function (subProd) {4431 return isOptionalProd(subProd, alreadyVisited);4432 });4433 }4434 else {4435 return false;4436 }4437}4438exports.isOptionalProd = isOptionalProd;4439function isBranchingProd(prod) {4440 return prod instanceof gast_public_1.gast.Alternation;4441}4442exports.isBranchingProd = isBranchingProd;4443function getProductionDslName(prod) {4444 if (prod instanceof gast_public_1.gast.NonTerminal) {4445 return "SUBRULE";4446 }4447 else if (prod instanceof gast_public_1.gast.Option) {4448 return "OPTION";4449 }4450 else if (prod instanceof gast_public_1.gast.Alternation) {4451 return "OR";4452 }4453 else if (prod instanceof gast_public_1.gast.RepetitionMandatory) {4454 return "AT_LEAST_ONE";4455 }4456 else if (prod instanceof gast_public_1.gast.RepetitionMandatoryWithSeparator) {4457 return "AT_LEAST_ONE_SEP";4458 }4459 else if (prod instanceof gast_public_1.gast.RepetitionWithSeparator) {4460 return "MANY_SEP";4461 }4462 else if (prod instanceof gast_public_1.gast.Repetition) {4463 return "MANY";4464 }4465 else if (prod instanceof gast_public_1.gast.Terminal) {4466 return "CONSUME";4467 }4468 else {4469 /* istanbul ignore next */4470 throw Error("non exhaustive match");4471 }4472}4473exports.getProductionDslName = getProductionDslName;4474var GastCloneVisitor = /** @class */ (function (_super) {4475 __extends(GastCloneVisitor, _super);4476 function GastCloneVisitor() {4477 return _super !== null && _super.apply(this, arguments) || this;4478 }4479 GastCloneVisitor.prototype.visitNonTerminal = function (node) {4480 return new gast_public_1.gast.NonTerminal(node.nonTerminalName, undefined, node.occurrenceInParent, node.implicitOccurrenceIndex);4481 };4482 GastCloneVisitor.prototype.visitFlat = function (node) {4483 var _this = this;4484 var definition = utils_1.map(node.definition, function (currSubDef) {4485 return _this.visit(currSubDef);4486 });4487 return new gast_public_1.gast.Flat(definition, node.name);4488 };4489 GastCloneVisitor.prototype.visitOption = function (node) {4490 var _this = this;4491 var definition = utils_1.map(node.definition, function (currSubDef) {4492 return _this.visit(currSubDef);4493 });4494 return new gast_public_1.gast.Option(definition, node.occurrenceInParent, node.name, node.implicitOccurrenceIndex);4495 };4496 GastCloneVisitor.prototype.visitRepetition = function (node) {4497 var _this = this;4498 var definition = utils_1.map(node.definition, function (currSubDef) {4499 return _this.visit(currSubDef);4500 });4501 return new gast_public_1.gast.Repetition(definition, node.occurrenceInParent, node.name, node.implicitOccurrenceIndex);4502 };4503 GastCloneVisitor.prototype.visitRepetitionMandatory = function (node) {4504 var _this = this;4505 var definition = utils_1.map(node.definition, function (currSubDef) {4506 return _this.visit(currSubDef);4507 });4508 return new gast_public_1.gast.RepetitionMandatory(definition, node.occurrenceInParent, node.name, node.implicitOccurrenceIndex);4509 };4510 GastCloneVisitor.prototype.visitRepetitionMandatoryWithSeparator = function (node) {4511 var _this = this;4512 var definition = utils_1.map(node.definition, function (currSubDef) {4513 return _this.visit(currSubDef);4514 });4515 return new gast_public_1.gast.RepetitionMandatoryWithSeparator(definition, node.separator, node.occurrenceInParent, node.name, node.implicitOccurrenceIndex);4516 };4517 GastCloneVisitor.prototype.visitRepetitionWithSeparator = function (node) {4518 var _this = this;4519 var definition = utils_1.map(node.definition, function (currSubDef) {4520 return _this.visit(currSubDef);4521 });4522 return new gast_public_1.gast.RepetitionWithSeparator(definition, node.separator, node.occurrenceInParent, node.name, node.implicitOccurrenceIndex);4523 };4524 GastCloneVisitor.prototype.visitAlternation = function (node) {4525 var _this = this;4526 var definition = utils_1.map(node.definition, function (currSubDef) {4527 return _this.visit(currSubDef);4528 });4529 return new gast_public_1.gast.Alternation(definition, node.occurrenceInParent, node.name, node.implicitOccurrenceIndex);4530 };4531 GastCloneVisitor.prototype.visitTerminal = function (node) {4532 return new gast_public_1.gast.Terminal(node.terminalType, node.occurrenceInParent, node.implicitOccurrenceIndex);4533 };4534 GastCloneVisitor.prototype.visitRule = function (node) {4535 var _this = this;4536 var definition = utils_1.map(node.definition, function (currSubDef) {4537 return _this.visit(currSubDef);4538 });4539 return new gast_public_1.gast.Rule(node.name, definition, node.orgText);4540 };4541 return GastCloneVisitor;4542}(gast_public_1.gast.GAstVisitor));4543function cloneProduction(prod) {4544 var cloningVisitor = new GastCloneVisitor();4545 return cloningVisitor.visit(prod);4546}4547exports.cloneProduction = cloneProduction;4548//# sourceMappingURL=gast.js.map4549/***/ }),4550/* 10 */4551/***/ (function(module, exports, __webpack_require__) {4552"use strict";4553Object.defineProperty(exports, "__esModule", { value: true });4554var gast_public_1 = __webpack_require__(1);4555var utils_1 = __webpack_require__(0);4556/**4557 * A Grammar Walker that computes the "remaining" grammar "after" a productions in the grammar.4558 */4559var RestWalker = /** @class */ (function () {4560 function RestWalker() {4561 }4562 RestWalker.prototype.walk = function (prod, prevRest) {4563 var _this = this;4564 if (prevRest === void 0) { prevRest = []; }4565 utils_1.forEach(prod.definition, function (subProd, index) {4566 var currRest = utils_1.drop(prod.definition, index + 1);4567 if (subProd instanceof gast_public_1.gast.NonTerminal) {4568 _this.walkProdRef(subProd, currRest, prevRest);4569 }4570 else if (subProd instanceof gast_public_1.gast.Terminal) {4571 _this.walkTerminal(subProd, currRest, prevRest);4572 }4573 else if (subProd instanceof gast_public_1.gast.Flat) {4574 _this.walkFlat(subProd, currRest, prevRest);4575 }4576 else if (subProd instanceof gast_public_1.gast.Option) {4577 _this.walkOption(subProd, currRest, prevRest);4578 }4579 else if (subProd instanceof gast_public_1.gast.RepetitionMandatory) {4580 _this.walkAtLeastOne(subProd, currRest, prevRest);4581 }4582 else if (subProd instanceof gast_public_1.gast.RepetitionMandatoryWithSeparator) {4583 _this.walkAtLeastOneSep(subProd, currRest, prevRest);4584 }4585 else if (subProd instanceof gast_public_1.gast.RepetitionWithSeparator) {4586 _this.walkManySep(subProd, currRest, prevRest);4587 }4588 else if (subProd instanceof gast_public_1.gast.Repetition) {4589 _this.walkMany(subProd, currRest, prevRest);4590 }4591 else if (subProd instanceof gast_public_1.gast.Alternation) {4592 _this.walkOr(subProd, currRest, prevRest);4593 }4594 else {4595 /* istanbul ignore next */4596 throw Error("non exhaustive match");4597 }4598 });4599 };4600 RestWalker.prototype.walkTerminal = function (terminal, currRest, prevRest) { };4601 RestWalker.prototype.walkProdRef = function (refProd, currRest, prevRest) { };4602 RestWalker.prototype.walkFlat = function (flatProd, currRest, prevRest) {4603 // ABCDEF => after the D the rest is EF4604 var fullOrRest = currRest.concat(prevRest);4605 this.walk(flatProd, fullOrRest);4606 };4607 RestWalker.prototype.walkOption = function (optionProd, currRest, prevRest) {4608 // ABC(DE)?F => after the (DE)? the rest is F4609 var fullOrRest = currRest.concat(prevRest);4610 this.walk(optionProd, fullOrRest);4611 };4612 RestWalker.prototype.walkAtLeastOne = function (atLeastOneProd, currRest, prevRest) {4613 // ABC(DE)+F => after the (DE)+ the rest is (DE)?F4614 var fullAtLeastOneRest = [4615 new gast_public_1.gast.Option(atLeastOneProd.definition)4616 ].concat(currRest, prevRest);4617 this.walk(atLeastOneProd, fullAtLeastOneRest);4618 };4619 RestWalker.prototype.walkAtLeastOneSep = function (atLeastOneSepProd, currRest, prevRest) {4620 // ABC DE(,DE)* F => after the (,DE)+ the rest is (,DE)?F4621 var fullAtLeastOneSepRest = restForRepetitionWithSeparator(atLeastOneSepProd, currRest, prevRest);4622 this.walk(atLeastOneSepProd, fullAtLeastOneSepRest);4623 };4624 RestWalker.prototype.walkMany = function (manyProd, currRest, prevRest) {4625 // ABC(DE)*F => after the (DE)* the rest is (DE)?F4626 var fullManyRest = [4627 new gast_public_1.gast.Option(manyProd.definition)4628 ].concat(currRest, prevRest);4629 this.walk(manyProd, fullManyRest);4630 };4631 RestWalker.prototype.walkManySep = function (manySepProd, currRest, prevRest) {4632 // ABC (DE(,DE)*)? F => after the (,DE)* the rest is (,DE)?F4633 var fullManySepRest = restForRepetitionWithSeparator(manySepProd, currRest, prevRest);4634 this.walk(manySepProd, fullManySepRest);4635 };4636 RestWalker.prototype.walkOr = function (orProd, currRest, prevRest) {4637 var _this = this;4638 // ABC(D|E|F)G => when finding the (D|E|F) the rest is G4639 var fullOrRest = currRest.concat(prevRest);4640 // walk all different alternatives4641 utils_1.forEach(orProd.definition, function (alt) {4642 // wrapping each alternative in a single definition wrapper4643 // to avoid errors in computing the rest of that alternative in the invocation to computeInProdFollows4644 // (otherwise for OR([alt1,alt2]) alt2 will be considered in 'rest' of alt14645 var prodWrapper = new gast_public_1.gast.Flat([alt]);4646 _this.walk(prodWrapper, fullOrRest);4647 });4648 };4649 return RestWalker;4650}());4651exports.RestWalker = RestWalker;4652function restForRepetitionWithSeparator(repSepProd, currRest, prevRest) {4653 var repSepRest = [4654 new gast_public_1.gast.Option([new gast_public_1.gast.Terminal(repSepProd.separator)].concat(repSepProd.definition))4655 ];4656 var fullRepSepRest = repSepRest.concat(currRest, prevRest);4657 return fullRepSepRest;4658}4659//# sourceMappingURL=rest.js.map4660/***/ }),4661/* 11 */4662/***/ (function(module, exports, __webpack_require__) {4663"use strict";4664Object.defineProperty(exports, "__esModule", { value: true });4665// needs a separate module as this is required inside chevrotain productive code4666// and also in the entry point for webpack(api.ts).4667// A separate file avoids cyclic dependencies and webpack errors.4668exports.VERSION = "1.0.1";4669//# sourceMappingURL=version.js.map4670/***/ }),4671/* 12 */4672/***/ (function(module, exports, __webpack_require__) {4673"use strict";4674Object.defineProperty(exports, "__esModule", { value: true });4675var utils_1 = __webpack_require__(0);4676var exceptions;4677(function (exceptions) {4678 var MISMATCHED_TOKEN_EXCEPTION = "MismatchedTokenException";4679 var NO_VIABLE_ALT_EXCEPTION = "NoViableAltException";4680 var EARLY_EXIT_EXCEPTION = "EarlyExitException";4681 var NOT_ALL_INPUT_PARSED_EXCEPTION = "NotAllInputParsedException";4682 var RECOGNITION_EXCEPTION_NAMES = [4683 MISMATCHED_TOKEN_EXCEPTION,4684 NO_VIABLE_ALT_EXCEPTION,4685 EARLY_EXIT_EXCEPTION,4686 NOT_ALL_INPUT_PARSED_EXCEPTION4687 ];4688 Object.freeze(RECOGNITION_EXCEPTION_NAMES);4689 // hacks to bypass no support for custom Errors in javascript/typescript4690 function isRecognitionException(error) {4691 // can't do instanceof on hacked custom js exceptions4692 return utils_1.contains(RECOGNITION_EXCEPTION_NAMES, error.name);4693 }4694 exceptions.isRecognitionException = isRecognitionException;4695 function MismatchedTokenException(message, token) {4696 this.name = MISMATCHED_TOKEN_EXCEPTION;4697 this.message = message;4698 this.token = token;4699 this.resyncedTokens = [];4700 }4701 exceptions.MismatchedTokenException = MismatchedTokenException;4702 // must use the "Error.prototype" instead of "new Error"4703 // because the stack trace points to where "new Error" was invoked"4704 MismatchedTokenException.prototype = Error.prototype;4705 function NoViableAltException(message, token) {4706 this.name = NO_VIABLE_ALT_EXCEPTION;4707 this.message = message;4708 this.token = token;4709 this.resyncedTokens = [];4710 }4711 exceptions.NoViableAltException = NoViableAltException;4712 NoViableAltException.prototype = Error.prototype;4713 function NotAllInputParsedException(message, token) {4714 this.name = NOT_ALL_INPUT_PARSED_EXCEPTION;4715 this.message = message;4716 this.token = token;4717 this.resyncedTokens = [];4718 }4719 exceptions.NotAllInputParsedException = NotAllInputParsedException;4720 NotAllInputParsedException.prototype = Error.prototype;4721 function EarlyExitException(message, token, previousToken) {4722 this.name = EARLY_EXIT_EXCEPTION;4723 this.message = message;4724 this.token = token;4725 this.previousToken = previousToken;4726 this.resyncedTokens = [];4727 }4728 exceptions.EarlyExitException = EarlyExitException;4729 EarlyExitException.prototype = Error.prototype;4730})(exceptions = exports.exceptions || (exports.exceptions = {}));4731//# sourceMappingURL=exceptions_public.js.map4732/***/ }),4733/* 13 */4734/***/ (function(module, exports, __webpack_require__) {4735"use strict";4736var __extends = (this && this.__extends) || (function () {4737 var extendStatics = Object.setPrototypeOf ||4738 ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||4739 function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };4740 return function (d, b) {4741 extendStatics(d, b);4742 function __() { this.constructor = d; }4743 d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());4744 };4745})();4746Object.defineProperty(exports, "__esModule", { value: true });4747var utils = __webpack_require__(0);4748var utils_1 = __webpack_require__(0);4749var parser_public_1 = __webpack_require__(6);4750var gast_public_1 = __webpack_require__(1);4751var gast_1 = __webpack_require__(9);4752var tokens_public_1 = __webpack_require__(2);4753var lookahead_1 = __webpack_require__(14);4754var version_1 = __webpack_require__(11);4755var cst_1 = __webpack_require__(16);4756var interpreter_1 = __webpack_require__(5);4757function validateGrammar(topLevels, maxLookahead, tokens, ignoredIssues) {4758 var duplicateErrors = utils.map(topLevels, validateDuplicateProductions);4759 var leftRecursionErrors = utils.map(topLevels, function (currTopRule) {4760 return validateNoLeftRecursion(currTopRule, currTopRule);4761 });4762 var emptyAltErrors = [];4763 var ambiguousAltsErrors = [];4764 // left recursion could cause infinite loops in the following validations.4765 // It is safest to first have the user fix the left recursion errors first and only then examine farther issues.4766 if (utils_1.every(leftRecursionErrors, utils_1.isEmpty)) {4767 emptyAltErrors = utils_1.map(topLevels, validateEmptyOrAlternative);4768 ambiguousAltsErrors = utils_1.map(topLevels, function (currTopRule) {4769 return validateAmbiguousAlternationAlternatives(currTopRule, maxLookahead, ignoredIssues);4770 });4771 }4772 var ruleNames = utils_1.map(topLevels, function (currTopLevel) { return currTopLevel.name; });4773 var tokenNames = utils_1.map(tokens, function (currToken) { return tokens_public_1.tokenName(currToken); });4774 var termsNamespaceConflictErrors = checkTerminalAndNoneTerminalsNameSpace(ruleNames, tokenNames);4775 var tokenNameErrors = utils.map(tokenNames, validateTokenName);4776 var nestedRulesNameErrors = validateNestedRulesNames(topLevels);4777 var nestedRulesDuplicateErrors = validateDuplicateNestedRules(topLevels);4778 var emptyRepetitionErrors = validateSomeNonEmptyLookaheadPath(topLevels, maxLookahead);4779 var tooManyAltsErrors = utils.map(topLevels, validateTooManyAlts);4780 return utils.flatten(duplicateErrors.concat(tokenNameErrors, nestedRulesNameErrors, nestedRulesDuplicateErrors, emptyRepetitionErrors, leftRecursionErrors, emptyAltErrors, ambiguousAltsErrors, termsNamespaceConflictErrors, tooManyAltsErrors));4781}4782exports.validateGrammar = validateGrammar;4783function validateNestedRulesNames(topLevels) {4784 var result = [];4785 utils_1.forEach(topLevels, function (curTopLevel) {4786 var namedCollectorVisitor = new cst_1.NamedDSLMethodsCollectorVisitor("");4787 curTopLevel.accept(namedCollectorVisitor);4788 var nestedNamesPerRule = utils_1.map(namedCollectorVisitor.result, function (currItem) { return currItem.name; });4789 var currTopRuleName = curTopLevel.name;4790 result.push(utils_1.map(nestedNamesPerRule, function (currNestedName) {4791 return validateNestedRuleName(currNestedName, currTopRuleName);4792 }));4793 });4794 return utils_1.flatten(result);4795}4796function validateDuplicateProductions(topLevelRule) {4797 var collectorVisitor = new OccurrenceValidationCollector();4798 topLevelRule.accept(collectorVisitor);4799 var allRuleProductions = collectorVisitor.allProductions;4800 var productionGroups = utils.groupBy(allRuleProductions, identifyProductionForDuplicates);4801 var duplicates = utils.pick(productionGroups, function (currGroup) {4802 return currGroup.length > 1;4803 });4804 var errors = utils.map(utils.values(duplicates), function (currDuplicates) {4805 var firstProd = utils.first(currDuplicates);4806 var msg = createDuplicatesErrorMessage(currDuplicates, topLevelRule.name);4807 var dslName = gast_1.getProductionDslName(firstProd);4808 var defError = {4809 message: msg,4810 type: parser_public_1.ParserDefinitionErrorType.DUPLICATE_PRODUCTIONS,4811 ruleName: topLevelRule.name,4812 dslName: dslName,4813 occurrence: firstProd.occurrenceInParent4814 };4815 var param = getExtraProductionArgument(firstProd);4816 if (param) {4817 defError.parameter = param;4818 }4819 return defError;4820 });4821 return errors;4822}4823function createDuplicatesErrorMessage(duplicateProds, topLevelName) {4824 var firstProd = utils.first(duplicateProds);4825 var index = firstProd.occurrenceInParent;4826 var dslName = gast_1.getProductionDslName(firstProd);4827 var extraArgument = getExtraProductionArgument(firstProd);4828 var msg = "->" + dslName + "<- with occurrence index: ->" + index + "<-\n " + (extraArgument ? "and argument: " + extraArgument : "") + "\n appears more than once (" + duplicateProds.length + " times) in the top level rule: " + topLevelName + ".\n " + (index === 14829 ? "note that " + dslName + " and " + dslName + "1 both have the same occurrence index 1}"4830 : "") + "}\n to fix this make sure each usage of " + dslName + " " + (extraArgument ? "with the argument: " + extraArgument : "") + "\n in the rule " + topLevelName + " has a different occurrence index (1-5), as that combination acts as a unique\n position key in the grammar, which is needed by the parsing engine.";4831 // white space trimming time! better to trim afterwards as it allows to use WELL formatted multi line template strings...4832 msg = msg.replace(/[ \t]+/g, " ");4833 msg = msg.replace(/\s\s+/g, "\n");4834 return msg;4835}4836function identifyProductionForDuplicates(prod) {4837 return gast_1.getProductionDslName(prod) + "_#_" + prod.occurrenceInParent + "_#_" + getExtraProductionArgument(prod);4838}4839exports.identifyProductionForDuplicates = identifyProductionForDuplicates;4840function getExtraProductionArgument(prod) {4841 if (prod instanceof gast_public_1.gast.Terminal) {4842 return tokens_public_1.tokenName(prod.terminalType);4843 }4844 else if (prod instanceof gast_public_1.gast.NonTerminal) {4845 return prod.nonTerminalName;4846 }4847 else {4848 return "";4849 }4850}4851var OccurrenceValidationCollector = /** @class */ (function (_super) {4852 __extends(OccurrenceValidationCollector, _super);4853 function OccurrenceValidationCollector() {4854 var _this = _super !== null && _super.apply(this, arguments) || this;4855 _this.allProductions = [];4856 return _this;4857 }4858 OccurrenceValidationCollector.prototype.visitNonTerminal = function (subrule) {4859 this.allProductions.push(subrule);4860 };4861 OccurrenceValidationCollector.prototype.visitOption = function (option) {4862 this.allProductions.push(option);4863 };4864 OccurrenceValidationCollector.prototype.visitRepetitionWithSeparator = function (manySep) {4865 this.allProductions.push(manySep);4866 };4867 OccurrenceValidationCollector.prototype.visitRepetitionMandatory = function (atLeastOne) {4868 this.allProductions.push(atLeastOne);4869 };4870 OccurrenceValidationCollector.prototype.visitRepetitionMandatoryWithSeparator = function (atLeastOneSep) {4871 this.allProductions.push(atLeastOneSep);4872 };4873 OccurrenceValidationCollector.prototype.visitRepetition = function (many) {4874 this.allProductions.push(many);4875 };4876 OccurrenceValidationCollector.prototype.visitAlternation = function (or) {4877 this.allProductions.push(or);4878 };4879 OccurrenceValidationCollector.prototype.visitTerminal = function (terminal) {4880 this.allProductions.push(terminal);4881 };4882 return OccurrenceValidationCollector;4883}(gast_public_1.gast.GAstVisitor));4884exports.OccurrenceValidationCollector = OccurrenceValidationCollector;4885exports.validTermsPattern = /^[a-zA-Z_]\w*$/;4886exports.validNestedRuleName = new RegExp(exports.validTermsPattern.source.replace("^", "^\\$"));4887function validateRuleName(ruleName) {4888 var errors = [];4889 var errMsg;4890 if (!ruleName.match(exports.validTermsPattern)) {4891 errMsg = "Invalid Grammar rule name: ->" + ruleName + "<- it must match the pattern: ->" + exports.validTermsPattern.toString() + "<-";4892 errors.push({4893 message: errMsg,4894 type: parser_public_1.ParserDefinitionErrorType.INVALID_RULE_NAME,4895 ruleName: ruleName4896 });4897 }4898 return errors;4899}4900exports.validateRuleName = validateRuleName;4901function validateNestedRuleName(nestedRuleName, containingRuleName) {4902 var errors = [];4903 var errMsg;4904 if (!nestedRuleName.match(exports.validNestedRuleName)) {4905 errMsg =4906 "Invalid nested rule name: ->" + nestedRuleName + "<- inside rule: ->" + containingRuleName + "<-\n" +4907 ("it must match the pattern: ->" + exports.validNestedRuleName.toString() + "<-.\n") +4908 "Note that this means a nested rule name must start with the '$'(dollar) sign.";4909 errors.push({4910 message: errMsg,4911 type: parser_public_1.ParserDefinitionErrorType.INVALID_NESTED_RULE_NAME,4912 ruleName: nestedRuleName4913 });4914 }4915 return errors;4916}4917exports.validateNestedRuleName = validateNestedRuleName;4918function validateTokenName(tokenNAme) {4919 var errors = [];4920 var errMsg;4921 if (!tokenNAme.match(exports.validTermsPattern)) {4922 errMsg = "Invalid Grammar Token name: ->" + tokenNAme + "<- it must match the pattern: ->" + exports.validTermsPattern.toString() + "<-";4923 errors.push({4924 message: errMsg,4925 type: parser_public_1.ParserDefinitionErrorType.INVALID_TOKEN_NAME4926 });4927 }4928 return errors;4929}4930exports.validateTokenName = validateTokenName;4931function validateRuleDoesNotAlreadyExist(ruleName, definedRulesNames, className) {4932 var errors = [];4933 var errMsg;4934 if (utils.contains(definedRulesNames, ruleName)) {4935 errMsg = "Duplicate definition, rule: ->" + ruleName + "<- is already defined in the grammar: ->" + className + "<-";4936 errors.push({4937 message: errMsg,4938 type: parser_public_1.ParserDefinitionErrorType.DUPLICATE_RULE_NAME,4939 ruleName: ruleName4940 });4941 }4942 return errors;4943}4944exports.validateRuleDoesNotAlreadyExist = validateRuleDoesNotAlreadyExist;4945// TODO: is there anyway to get only the rule names of rules inherited from the super grammars?4946function validateRuleIsOverridden(ruleName, definedRulesNames, className) {4947 var errors = [];4948 var errMsg;4949 if (!utils.contains(definedRulesNames, ruleName)) {4950 errMsg =4951 "Invalid rule override, rule: ->" + ruleName + "<- cannot be overridden in the grammar: ->" + className + "<-" +4952 "as it is not defined in any of the super grammars ";4953 errors.push({4954 message: errMsg,4955 type: parser_public_1.ParserDefinitionErrorType.INVALID_RULE_OVERRIDE,4956 ruleName: ruleName4957 });4958 }4959 return errors;4960}4961exports.validateRuleIsOverridden = validateRuleIsOverridden;4962function validateNoLeftRecursion(topRule, currRule, path) {4963 if (path === void 0) { path = []; }4964 var errors = [];4965 var nextNonTerminals = getFirstNoneTerminal(currRule.definition);4966 if (utils.isEmpty(nextNonTerminals)) {4967 return [];4968 }4969 else {4970 var ruleName = topRule.name;4971 var foundLeftRecursion = utils.contains(nextNonTerminals, topRule);4972 var pathNames = utils.map(path, function (currRule) { return currRule.name; });4973 var leftRecursivePath = ruleName + " --> " + pathNames4974 .concat([ruleName])4975 .join(" --> ");4976 if (foundLeftRecursion) {4977 var errMsg = "Left Recursion found in grammar.\n" +4978 ("rule: <" + ruleName + "> can be invoked from itself (directly or indirectly)\n") +4979 ("without consuming any Tokens. The grammar path that causes this is: \n " + leftRecursivePath + "\n") +4980 " To fix this refactor your grammar to remove the left recursion.\n" +4981 "see: https://en.wikipedia.org/wiki/LL_parser#Left_Factoring.";4982 errors.push({4983 message: errMsg,4984 type: parser_public_1.ParserDefinitionErrorType.LEFT_RECURSION,4985 ruleName: ruleName4986 });4987 }4988 // we are only looking for cyclic paths leading back to the specific topRule4989 // other cyclic paths are ignored, we still need this difference to avoid infinite loops...4990 var validNextSteps = utils.difference(nextNonTerminals, path.concat([topRule]));4991 var errorsFromNextSteps = utils.map(validNextSteps, function (currRefRule) {4992 var newPath = utils.cloneArr(path);4993 newPath.push(currRefRule);4994 return validateNoLeftRecursion(topRule, currRefRule, newPath);4995 });4996 return errors.concat(utils.flatten(errorsFromNextSteps));4997 }4998}4999exports.validateNoLeftRecursion = validateNoLeftRecursion;5000function getFirstNoneTerminal(definition) {5001 var result = [];5002 if (utils.isEmpty(definition)) {5003 return result;5004 }5005 var firstProd = utils.first(definition);5006 if (firstProd instanceof gast_public_1.gast.NonTerminal) {5007 result.push(firstProd.referencedRule);5008 }5009 else if (firstProd instanceof gast_public_1.gast.Flat ||5010 firstProd instanceof gast_public_1.gast.Option ||5011 firstProd instanceof gast_public_1.gast.RepetitionMandatory ||5012 firstProd instanceof gast_public_1.gast.RepetitionMandatoryWithSeparator ||5013 firstProd instanceof gast_public_1.gast.RepetitionWithSeparator ||5014 firstProd instanceof gast_public_1.gast.Repetition) {5015 result = result.concat(getFirstNoneTerminal(firstProd.definition));5016 }5017 else if (firstProd instanceof gast_public_1.gast.Alternation) {5018 // each sub definition in alternation is a FLAT5019 result = utils.flatten(utils.map(firstProd.definition, function (currSubDef) {5020 return getFirstNoneTerminal(currSubDef.definition);5021 }));5022 }5023 else if (firstProd instanceof gast_public_1.gast.Terminal) {5024 // nothing to see, move along5025 }5026 else {5027 /* istanbul ignore next */5028 throw Error("non exhaustive match");5029 }5030 var isFirstOptional = gast_1.isOptionalProd(firstProd);5031 var hasMore = definition.length > 1;5032 if (isFirstOptional && hasMore) {5033 var rest = utils.drop(definition);5034 return result.concat(getFirstNoneTerminal(rest));5035 }5036 else {5037 return result;5038 }5039}5040exports.getFirstNoneTerminal = getFirstNoneTerminal;5041var OrCollector = /** @class */ (function (_super) {5042 __extends(OrCollector, _super);5043 function OrCollector() {5044 var _this = _super !== null && _super.apply(this, arguments) || this;5045 _this.alternations = [];5046 return _this;5047 }5048 OrCollector.prototype.visitAlternation = function (node) {5049 this.alternations.push(node);5050 };5051 return OrCollector;5052}(gast_public_1.gast.GAstVisitor));5053function validateEmptyOrAlternative(topLevelRule) {5054 var orCollector = new OrCollector();5055 topLevelRule.accept(orCollector);5056 var ors = orCollector.alternations;5057 var errors = utils.reduce(ors, function (errors, currOr) {5058 var exceptLast = utils.dropRight(currOr.definition);5059 var currErrors = utils.map(exceptLast, function (currAlternative, currAltIdx) {5060 var possibleFirstInAlt = interpreter_1.nextPossibleTokensAfter([currAlternative], [], null, 1);5061 if (utils.isEmpty(possibleFirstInAlt)) {5062 return {5063 message: "Ambiguous empty alternative: <" + (currAltIdx +5064 1) + ">" +5065 (" in <OR" + currOr.occurrenceInParent + "> inside <" + topLevelRule.name + "> Rule.\n") +5066 "Only the last alternative may be an empty alternative.",5067 type: parser_public_1.ParserDefinitionErrorType.NONE_LAST_EMPTY_ALT,5068 ruleName: topLevelRule.name,5069 occurrence: currOr.occurrenceInParent,5070 alternative: currAltIdx + 15071 };5072 }5073 else {5074 return null;5075 }5076 });5077 return errors.concat(utils.compact(currErrors));5078 }, []);5079 return errors;5080}5081exports.validateEmptyOrAlternative = validateEmptyOrAlternative;5082function validateAmbiguousAlternationAlternatives(topLevelRule, maxLookahead, ignoredIssues) {5083 var orCollector = new OrCollector();5084 topLevelRule.accept(orCollector);5085 var ors = orCollector.alternations;5086 var ignoredIssuesForCurrentRule = ignoredIssues[topLevelRule.name];5087 if (ignoredIssuesForCurrentRule) {5088 ors = utils_1.reject(ors, function (currOr) {5089 return ignoredIssuesForCurrentRule[gast_1.getProductionDslName(currOr) + currOr.occurrenceInParent];5090 });5091 }5092 var errors = utils.reduce(ors, function (result, currOr) {5093 var currOccurrence = currOr.occurrenceInParent;5094 var alternatives = lookahead_1.getLookaheadPathsForOr(currOccurrence, topLevelRule, maxLookahead);5095 var altsAmbiguityErrors = checkAlternativesAmbiguities(alternatives, currOr, topLevelRule.name);5096 var altsPrefixAmbiguityErrors = checkPrefixAlternativesAmbiguities(alternatives, currOr, topLevelRule.name);5097 return result.concat(altsAmbiguityErrors, altsPrefixAmbiguityErrors);5098 }, []);5099 return errors;5100}5101exports.validateAmbiguousAlternationAlternatives = validateAmbiguousAlternationAlternatives;5102var RepetionCollector = /** @class */ (function (_super) {5103 __extends(RepetionCollector, _super);5104 function RepetionCollector() {5105 var _this = _super !== null && _super.apply(this, arguments) || this;5106 _this.allProductions = [];5107 return _this;5108 }5109 RepetionCollector.prototype.visitRepetitionWithSeparator = function (manySep) {5110 this.allProductions.push(manySep);5111 };5112 RepetionCollector.prototype.visitRepetitionMandatory = function (atLeastOne) {5113 this.allProductions.push(atLeastOne);5114 };5115 RepetionCollector.prototype.visitRepetitionMandatoryWithSeparator = function (atLeastOneSep) {5116 this.allProductions.push(atLeastOneSep);5117 };5118 RepetionCollector.prototype.visitRepetition = function (many) {5119 this.allProductions.push(many);5120 };5121 return RepetionCollector;5122}(gast_public_1.gast.GAstVisitor));5123exports.RepetionCollector = RepetionCollector;5124function validateTooManyAlts(topLevelRule) {5125 var orCollector = new OrCollector();5126 topLevelRule.accept(orCollector);5127 var ors = orCollector.alternations;5128 var errors = utils.reduce(ors, function (errors, currOr) {5129 if (currOr.definition.length > 255) {5130 errors.push({5131 message: "An Alternation cannot have more than 256 alternatives:\n" +5132 ("<OR" + currOr.occurrenceInParent + "> inside <" + topLevelRule.name + "> Rule.\n has " + (currOr.definition.length +5133 1) + " alternatives."),5134 type: parser_public_1.ParserDefinitionErrorType.TOO_MANY_ALTS,5135 ruleName: topLevelRule.name,5136 occurrence: currOr.occurrenceInParent5137 });5138 }5139 return errors;5140 }, []);5141 return errors;5142}5143exports.validateTooManyAlts = validateTooManyAlts;5144function validateSomeNonEmptyLookaheadPath(topLevelRules, maxLookahead) {5145 var errors = [];5146 utils_1.forEach(topLevelRules, function (currTopRule) {5147 var collectorVisitor = new RepetionCollector();5148 currTopRule.accept(collectorVisitor);5149 var allRuleProductions = collectorVisitor.allProductions;5150 utils_1.forEach(allRuleProductions, function (currProd) {5151 var prodType = lookahead_1.getProdType(currProd);5152 var currOccurrence = currProd.occurrenceInParent;5153 var paths = lookahead_1.getLookaheadPathsForOptionalProd(currOccurrence, currTopRule, prodType, maxLookahead);5154 var pathsInsideProduction = paths[0];5155 if (utils_1.isEmpty(utils_1.flatten(pathsInsideProduction))) {5156 var implicitOccurrence = currProd.implicitOccurrenceIndex;5157 var dslName = gast_1.getProductionDslName(currProd);5158 if (!implicitOccurrence) {5159 dslName += currOccurrence;5160 }5161 var errMsg = "The repetition <" + dslName + "> within Rule <" + currTopRule.name + "> can never consume any tokens.\n" +5162 "This could lead to an infinite loop.";5163 errors.push({5164 message: errMsg,5165 type: parser_public_1.ParserDefinitionErrorType.NO_NON_EMPTY_LOOKAHEAD,5166 ruleName: currTopRule.name5167 });5168 }5169 });5170 });5171 return errors;5172}5173exports.validateSomeNonEmptyLookaheadPath = validateSomeNonEmptyLookaheadPath;5174function checkAlternativesAmbiguities(alternatives, alternation, topRuleName) {5175 var foundAmbiguousPaths = [];5176 var identicalAmbiguities = utils_1.reduce(alternatives, function (result, currAlt, currAltIdx) {5177 utils_1.forEach(currAlt, function (currPath) {5178 var altsCurrPathAppearsIn = [currAltIdx];5179 utils_1.forEach(alternatives, function (currOtherAlt, currOtherAltIdx) {5180 if (currAltIdx !== currOtherAltIdx &&5181 lookahead_1.containsPath(currOtherAlt, currPath)) {5182 altsCurrPathAppearsIn.push(currOtherAltIdx);5183 }5184 });5185 if (altsCurrPathAppearsIn.length > 1 &&5186 !lookahead_1.containsPath(foundAmbiguousPaths, currPath)) {5187 foundAmbiguousPaths.push(currPath);5188 result.push({5189 alts: altsCurrPathAppearsIn,5190 path: currPath5191 });5192 }5193 });5194 return result;5195 }, []);5196 var currErrors = utils.map(identicalAmbiguities, function (currAmbDescriptor) {5197 var ambgIndices = utils_1.map(currAmbDescriptor.alts, function (currAltIdx) { return currAltIdx + 1; });5198 var pathMsg = utils_1.map(currAmbDescriptor.path, function (currtok) {5199 return tokens_public_1.tokenLabel(currtok);5200 }).join(", ");5201 var occurrence = alternation.implicitOccurrenceIndex5202 ? ""5203 : alternation.occurrenceInParent;5204 var currMessage = "Ambiguous alternatives: <" + ambgIndices.join(" ,") + "> in <OR" + occurrence + ">" +5205 (" inside <" + topRuleName + "> Rule,\n") +5206 ("<" + pathMsg + "> may appears as a prefix path in all these alternatives.\n");5207 var docs_version = version_1.VERSION.replace(/\./g, "_");5208 // Should this information be on the error message or in some common errors docs?5209 currMessage =5210 currMessage +5211 "To Resolve this, try one of of the following: \n" +5212 "1. Refactor your grammar to be LL(K) for the current value of k (by default k=5)\n" +5213 "2. Increase the value of K for your grammar by providing a larger 'maxLookahead' value in the parser's config\n" +5214 "3. This issue can be ignored (if you know what you are doing...), see" +5215 " http://sap.github.io/chevrotain/documentation/" +5216 docs_version +5217 "/interfaces/_chevrotain_d_.iparserconfig.html#ignoredissues for more" +5218 " details\n";5219 return {5220 message: currMessage,5221 type: parser_public_1.ParserDefinitionErrorType.AMBIGUOUS_ALTS,5222 ruleName: topRuleName,5223 occurrence: alternation.occurrenceInParent,5224 alternatives: [currAmbDescriptor.alts]5225 };5226 });5227 return currErrors;5228}5229function checkPrefixAlternativesAmbiguities(alternatives, alternation, ruleName) {5230 var errors = [];5231 // flatten5232 var pathsAndIndices = utils_1.reduce(alternatives, function (result, currAlt, idx) {5233 var currPathsAndIdx = utils_1.map(currAlt, function (currPath) {5234 return { idx: idx, path: currPath };5235 });5236 return result.concat(currPathsAndIdx);5237 }, []);5238 utils_1.forEach(pathsAndIndices, function (currPathAndIdx) {5239 var targetIdx = currPathAndIdx.idx;5240 var targetPath = currPathAndIdx.path;5241 var prefixAmbiguitiesPathsAndIndices = utils_1.findAll(pathsAndIndices, function (searchPathAndIdx) {5242 // prefix ambiguity can only be created from lower idx (higher priority) path5243 return (searchPathAndIdx.idx < targetIdx &&5244 // checking for strict prefix because identical lookaheads5245 // will be be detected using a different validation.5246 lookahead_1.isStrictPrefixOfPath(searchPathAndIdx.path, targetPath));5247 });5248 var currPathPrefixErrors = utils_1.map(prefixAmbiguitiesPathsAndIndices, function (currAmbPathAndIdx) {5249 var ambgIndices = [currAmbPathAndIdx.idx + 1, targetIdx + 1];5250 var pathMsg = utils_1.map(currAmbPathAndIdx.path, function (currTok) {5251 return tokens_public_1.tokenLabel(currTok);5252 }).join(", ");5253 var occurrence = alternation.implicitOccurrenceIndex5254 ? ""5255 : alternation.occurrenceInParent;5256 var currMessage = "Ambiguous alternatives: <" + ambgIndices.join(" ,") + "> due to common lookahead prefix\n" +5257 ("in <OR" + occurrence + "> inside <" + ruleName + "> Rule,\n") +5258 ("<" + pathMsg + "> may appears as a prefix path in all these alternatives.\n") +5259 "http://sap.github.io/chevrotain/website/Building_Grammars/resolving_grammar_errors.html#COMMON_PREFIX " +5260 "For farther details.";5261 return {5262 message: currMessage,5263 type: parser_public_1.ParserDefinitionErrorType.AMBIGUOUS_PREFIX_ALTS,5264 ruleName: ruleName,5265 occurrence: occurrence,5266 alternatives: ambgIndices5267 };5268 });5269 errors = errors.concat(currPathPrefixErrors);5270 });5271 return errors;5272}5273function checkTerminalAndNoneTerminalsNameSpace(ruleNames, terminalNames) {5274 var errors = [];5275 utils_1.forEach(ruleNames, function (currRuleName) {5276 if (utils_1.contains(terminalNames, currRuleName)) {5277 var errMsg = "Namespace conflict found in grammar.\n" +5278 ("The grammar has both a Terminal(Token) and a Non-Terminal(Rule) named: <" + currRuleName + ">.\n") +5279 "To resolve this make sure each Terminal and Non-Terminal names are unique\n" +5280 "This is easy to accomplish by using the convention that Terminal names start with an uppercase letter\n" +5281 "and Non-Terminal names start with a lower case letter.";5282 errors.push({5283 message: errMsg,5284 type: parser_public_1.ParserDefinitionErrorType.CONFLICT_TOKENS_RULES_NAMESPACE,5285 ruleName: currRuleName5286 });5287 }5288 });5289 return errors;5290}5291function validateDuplicateNestedRules(topLevelRules) {5292 var errors = [];5293 utils_1.forEach(topLevelRules, function (currTopRule) {5294 var namedCollectorVisitor = new cst_1.NamedDSLMethodsCollectorVisitor("");5295 currTopRule.accept(namedCollectorVisitor);5296 var nestedNames = utils_1.map(namedCollectorVisitor.result, function (currItem) { return currItem.name; });5297 var namesGroups = utils_1.groupBy(nestedNames, function (item) { return item; });5298 var duplicates = utils_1.pick(namesGroups, function (currGroup) {5299 return currGroup.length > 1;5300 });5301 utils_1.forEach(utils_1.values(duplicates), function (currDuplicates) {5302 var duplicateName = utils.first(currDuplicates);5303 var errMsg = "Duplicate nested rule name: ->" + duplicateName + "<- inside rule: ->" + currTopRule.name + "<-\n" +5304 "A nested name must be unique in the scope of a top level grammar rule.";5305 errors.push({5306 message: errMsg,5307 type: parser_public_1.ParserDefinitionErrorType.DUPLICATE_NESTED_NAME,5308 ruleName: currTopRule.name5309 });5310 });5311 });5312 return errors;5313}5314//# sourceMappingURL=checks.js.map5315/***/ }),5316/* 14 */5317/***/ (function(module, exports, __webpack_require__) {5318"use strict";5319var __extends = (this && this.__extends) || (function () {5320 var extendStatics = Object.setPrototypeOf ||5321 ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||5322 function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };5323 return function (d, b) {5324 extendStatics(d, b);5325 function __() { this.constructor = d; }5326 d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());5327 };5328})();5329Object.defineProperty(exports, "__esModule", { value: true });5330var utils_1 = __webpack_require__(0);5331var gast_public_1 = __webpack_require__(1);5332var interpreter_1 = __webpack_require__(5);5333var rest_1 = __webpack_require__(10);5334var tokens_1 = __webpack_require__(4);5335var PROD_TYPE;5336(function (PROD_TYPE) {5337 PROD_TYPE[PROD_TYPE["OPTION"] = 0] = "OPTION";5338 PROD_TYPE[PROD_TYPE["REPETITION"] = 1] = "REPETITION";5339 PROD_TYPE[PROD_TYPE["REPETITION_MANDATORY"] = 2] = "REPETITION_MANDATORY";5340 PROD_TYPE[PROD_TYPE["REPETITION_MANDATORY_WITH_SEPARATOR"] = 3] = "REPETITION_MANDATORY_WITH_SEPARATOR";5341 PROD_TYPE[PROD_TYPE["REPETITION_WITH_SEPARATOR"] = 4] = "REPETITION_WITH_SEPARATOR";5342 PROD_TYPE[PROD_TYPE["ALTERNATION"] = 5] = "ALTERNATION";5343})(PROD_TYPE = exports.PROD_TYPE || (exports.PROD_TYPE = {}));5344function getProdType(prod) {5345 if (prod instanceof gast_public_1.gast.Option) {5346 return PROD_TYPE.OPTION;5347 }5348 else if (prod instanceof gast_public_1.gast.Repetition) {5349 return PROD_TYPE.REPETITION;5350 }5351 else if (prod instanceof gast_public_1.gast.RepetitionMandatory) {5352 return PROD_TYPE.REPETITION_MANDATORY;5353 }5354 else if (prod instanceof gast_public_1.gast.RepetitionMandatoryWithSeparator) {5355 return PROD_TYPE.REPETITION_MANDATORY_WITH_SEPARATOR;5356 }5357 else if (prod instanceof gast_public_1.gast.RepetitionWithSeparator) {5358 return PROD_TYPE.REPETITION_WITH_SEPARATOR;5359 }5360 else if (prod instanceof gast_public_1.gast.Alternation) {5361 return PROD_TYPE.ALTERNATION;5362 }5363 else {5364 /* istanbul ignore next */5365 throw Error("non exhaustive match");5366 }5367}5368exports.getProdType = getProdType;5369function buildLookaheadFuncForOr(occurrence, ruleGrammar, k, hasPredicates, dynamicTokensEnabled, laFuncBuilder) {5370 var lookAheadPaths = getLookaheadPathsForOr(occurrence, ruleGrammar, k);5371 var tokenMatcher = areTokenCategoriesNotUsed(lookAheadPaths)5372 ? tokens_1.tokenStructuredMatcherNoCategories5373 : tokens_1.tokenStructuredMatcher;5374 return laFuncBuilder(lookAheadPaths, hasPredicates, tokenMatcher, dynamicTokensEnabled);5375}5376exports.buildLookaheadFuncForOr = buildLookaheadFuncForOr;5377/**5378 * When dealing with an Optional production (OPTION/MANY/2nd iteration of AT_LEAST_ONE/...) we need to compare5379 * the lookahead "inside" the production and the lookahead immediately "after" it in the same top level rule (context free).5380 *5381 * Example: given a production:5382 * ABC(DE)?DF5383 *5384 * The optional '(DE)?' should only be entered if we see 'DE'. a single Token 'D' is not sufficient to distinguish between the two5385 * alternatives.5386 *5387 * @returns A Lookahead function which will return true IFF the parser should parse the Optional production.5388 */5389function buildLookaheadFuncForOptionalProd(occurrence, ruleGrammar, k, dynamicTokensEnabled, prodType, lookaheadBuilder) {5390 var lookAheadPaths = getLookaheadPathsForOptionalProd(occurrence, ruleGrammar, prodType, k);5391 var tokenMatcher = areTokenCategoriesNotUsed(lookAheadPaths)5392 ? tokens_1.tokenStructuredMatcherNoCategories5393 : tokens_1.tokenStructuredMatcher;5394 return lookaheadBuilder(lookAheadPaths[0], tokenMatcher, dynamicTokensEnabled);5395}5396exports.buildLookaheadFuncForOptionalProd = buildLookaheadFuncForOptionalProd;5397function buildAlternativesLookAheadFunc(alts, hasPredicates, tokenMatcher, dynamicTokensEnabled) {5398 var numOfAlts = alts.length;5399 var areAllOneTokenLookahead = utils_1.every(alts, function (currAlt) {5400 return utils_1.every(currAlt, function (currPath) {5401 return currPath.length === 1;5402 });5403 });5404 // This version takes into account the predicates as well.5405 if (hasPredicates) {5406 /**5407 * @returns {number} - The chosen alternative index5408 */5409 return function (orAlts) {5410 // unfortunately the predicates must be extracted every single time5411 // as they cannot be cached due to keep references to parameters(vars) which are no longer valid.5412 // note that in the common case of no predicates, no cpu time will be wasted on this (see else block)5413 var predicates = utils_1.map(orAlts, function (currAlt) { return currAlt.GATE; });5414 for (var t = 0; t < numOfAlts; t++) {5415 var currAlt = alts[t];5416 var currNumOfPaths = currAlt.length;5417 var currPredicate = predicates[t];5418 if (currPredicate && !currPredicate.call(this)) {5419 // if the predicate does not match there is no point in checking the paths5420 continue;5421 }5422 nextPath: for (var j = 0; j < currNumOfPaths; j++) {5423 var currPath = currAlt[j];5424 var currPathLength = currPath.length;5425 for (var i = 0; i < currPathLength; i++) {5426 var nextToken = this.LA(i + 1);5427 if (tokenMatcher(nextToken, currPath[i]) === false) {5428 // mismatch in current path5429 // try the next pth5430 continue nextPath;5431 }5432 }5433 // found a full path that matches.5434 // this will also work for an empty ALT as the loop will be skipped5435 return t;5436 }5437 // none of the paths for the current alternative matched5438 // try the next alternative5439 }5440 // none of the alternatives could be matched5441 return undefined;5442 };5443 }5444 else if (areAllOneTokenLookahead && !dynamicTokensEnabled) {5445 // optimized (common) case of all the lookaheads paths requiring only5446 // a single token lookahead. These Optimizations cannot work if dynamically defined Tokens are used.5447 var singleTokenAlts = utils_1.map(alts, function (currAlt) {5448 return utils_1.flatten(currAlt);5449 });5450 var choiceToAlt_1 = utils_1.reduce(singleTokenAlts, function (result, currAlt, idx) {5451 utils_1.forEach(currAlt, function (currTokType) {5452 if (!utils_1.has(result, currTokType.tokenTypeIdx)) {5453 result[currTokType.tokenTypeIdx] = idx;5454 }5455 utils_1.forEach(currTokType.categoryMatches, function (currExtendingType) {5456 if (!utils_1.has(result, currExtendingType)) {5457 result[currExtendingType] = idx;5458 }5459 });5460 });5461 return result;5462 }, {});5463 /**5464 * @returns {number} - The chosen alternative index5465 */5466 return function () {5467 var nextToken = this.LA(1);5468 return choiceToAlt_1[nextToken.tokenTypeIdx];5469 };5470 }5471 else {5472 // optimized lookahead without needing to check the predicates at all.5473 // this causes code duplication which is intentional to improve performance.5474 /**5475 * @returns {number} - The chosen alternative index5476 */5477 return function () {5478 for (var t = 0; t < numOfAlts; t++) {5479 var currAlt = alts[t];5480 var currNumOfPaths = currAlt.length;5481 nextPath: for (var j = 0; j < currNumOfPaths; j++) {5482 var currPath = currAlt[j];5483 var currPathLength = currPath.length;5484 for (var i = 0; i < currPathLength; i++) {5485 var nextToken = this.LA(i + 1);5486 if (tokenMatcher(nextToken, currPath[i]) === false) {5487 // mismatch in current path5488 // try the next pth5489 continue nextPath;5490 }5491 }5492 // found a full path that matches.5493 // this will also work for an empty ALT as the loop will be skipped5494 return t;5495 }5496 // none of the paths for the current alternative matched5497 // try the next alternative5498 }5499 // none of the alternatives could be matched5500 return undefined;5501 };5502 }5503}5504exports.buildAlternativesLookAheadFunc = buildAlternativesLookAheadFunc;5505function buildSingleAlternativeLookaheadFunction(alt, tokenMatcher, dynamicTokensEnabled) {5506 var areAllOneTokenLookahead = utils_1.every(alt, function (currPath) {5507 return currPath.length === 1;5508 });5509 var numOfPaths = alt.length;5510 // optimized (common) case of all the lookaheads paths requiring only5511 // a single token lookahead.5512 if (areAllOneTokenLookahead && !dynamicTokensEnabled) {5513 var singleTokensTypes = utils_1.flatten(alt);5514 if (singleTokensTypes.length === 1 &&5515 utils_1.isEmpty(singleTokensTypes[0].categoryMatches)) {5516 var expectedTokenType = singleTokensTypes[0];5517 var expectedTokenUniqueKey_1 = expectedTokenType.tokenTypeIdx;5518 return function () {5519 return this.LA(1).tokenTypeIdx === expectedTokenUniqueKey_1;5520 };5521 }5522 else {5523 var choiceToAlt_2 = utils_1.reduce(singleTokensTypes, function (result, currTokType, idx) {5524 result[currTokType.tokenTypeIdx] = true;5525 utils_1.forEach(currTokType.categoryMatches, function (currExtendingType) {5526 result[currExtendingType] = true;5527 });5528 return result;5529 }, {});5530 return function () {5531 var nextToken = this.LA(1);5532 return choiceToAlt_2[nextToken.tokenTypeIdx] === true;5533 };5534 }5535 }5536 else {5537 return function () {5538 nextPath: for (var j = 0; j < numOfPaths; j++) {5539 var currPath = alt[j];5540 var currPathLength = currPath.length;5541 for (var i = 0; i < currPathLength; i++) {5542 var nextToken = this.LA(i + 1);5543 if (tokenMatcher(nextToken, currPath[i]) === false) {5544 // mismatch in current path5545 // try the next pth5546 continue nextPath;5547 }5548 }5549 // found a full path that matches.5550 return true;5551 }5552 // none of the paths matched5553 return false;5554 };5555 }5556}5557exports.buildSingleAlternativeLookaheadFunction = buildSingleAlternativeLookaheadFunction;5558var RestDefinitionFinderWalker = /** @class */ (function (_super) {5559 __extends(RestDefinitionFinderWalker, _super);5560 function RestDefinitionFinderWalker(topProd, targetOccurrence, targetProdType) {5561 var _this = _super.call(this) || this;5562 _this.topProd = topProd;5563 _this.targetOccurrence = targetOccurrence;5564 _this.targetProdType = targetProdType;5565 return _this;5566 }5567 RestDefinitionFinderWalker.prototype.startWalking = function () {5568 this.walk(this.topProd);5569 return this.restDef;5570 };5571 RestDefinitionFinderWalker.prototype.checkIsTarget = function (node, expectedProdType, currRest, prevRest) {5572 if (node.occurrenceInParent === this.targetOccurrence &&5573 this.targetProdType === expectedProdType) {5574 this.restDef = currRest.concat(prevRest);5575 return true;5576 }5577 // performance optimization, do not iterate over the entire Grammar ast after we have found the target5578 return false;5579 };5580 RestDefinitionFinderWalker.prototype.walkOption = function (optionProd, currRest, prevRest) {5581 if (!this.checkIsTarget(optionProd, PROD_TYPE.OPTION, currRest, prevRest)) {5582 _super.prototype.walkOption.call(this, optionProd, currRest, prevRest);5583 }5584 };5585 RestDefinitionFinderWalker.prototype.walkAtLeastOne = function (atLeastOneProd, currRest, prevRest) {5586 if (!this.checkIsTarget(atLeastOneProd, PROD_TYPE.REPETITION_MANDATORY, currRest, prevRest)) {5587 _super.prototype.walkOption.call(this, atLeastOneProd, currRest, prevRest);5588 }5589 };5590 RestDefinitionFinderWalker.prototype.walkAtLeastOneSep = function (atLeastOneSepProd, currRest, prevRest) {5591 if (!this.checkIsTarget(atLeastOneSepProd, PROD_TYPE.REPETITION_MANDATORY_WITH_SEPARATOR, currRest, prevRest)) {5592 _super.prototype.walkOption.call(this, atLeastOneSepProd, currRest, prevRest);5593 }5594 };5595 RestDefinitionFinderWalker.prototype.walkMany = function (manyProd, currRest, prevRest) {5596 if (!this.checkIsTarget(manyProd, PROD_TYPE.REPETITION, currRest, prevRest)) {5597 _super.prototype.walkOption.call(this, manyProd, currRest, prevRest);5598 }5599 };5600 RestDefinitionFinderWalker.prototype.walkManySep = function (manySepProd, currRest, prevRest) {5601 if (!this.checkIsTarget(manySepProd, PROD_TYPE.REPETITION_WITH_SEPARATOR, currRest, prevRest)) {5602 _super.prototype.walkOption.call(this, manySepProd, currRest, prevRest);5603 }5604 };5605 return RestDefinitionFinderWalker;5606}(rest_1.RestWalker));5607/**5608 * Returns the definition of a target production in a top level level rule.5609 */5610var InsideDefinitionFinderVisitor = /** @class */ (function (_super) {5611 __extends(InsideDefinitionFinderVisitor, _super);5612 function InsideDefinitionFinderVisitor(targetOccurrence, targetProdType) {5613 var _this = _super.call(this) || this;5614 _this.targetOccurrence = targetOccurrence;5615 _this.targetProdType = targetProdType;5616 _this.result = [];5617 return _this;5618 }5619 InsideDefinitionFinderVisitor.prototype.checkIsTarget = function (node, expectedProdName) {5620 if (node.occurrenceInParent === this.targetOccurrence &&5621 this.targetProdType === expectedProdName) {5622 this.result = node.definition;5623 }5624 };5625 InsideDefinitionFinderVisitor.prototype.visitOption = function (node) {5626 this.checkIsTarget(node, PROD_TYPE.OPTION);5627 };5628 InsideDefinitionFinderVisitor.prototype.visitRepetition = function (node) {5629 this.checkIsTarget(node, PROD_TYPE.REPETITION);5630 };5631 InsideDefinitionFinderVisitor.prototype.visitRepetitionMandatory = function (node) {5632 this.checkIsTarget(node, PROD_TYPE.REPETITION_MANDATORY);5633 };5634 InsideDefinitionFinderVisitor.prototype.visitRepetitionMandatoryWithSeparator = function (node) {5635 this.checkIsTarget(node, PROD_TYPE.REPETITION_MANDATORY_WITH_SEPARATOR);5636 };5637 InsideDefinitionFinderVisitor.prototype.visitRepetitionWithSeparator = function (node) {5638 this.checkIsTarget(node, PROD_TYPE.REPETITION_WITH_SEPARATOR);5639 };5640 InsideDefinitionFinderVisitor.prototype.visitAlternation = function (node) {5641 this.checkIsTarget(node, PROD_TYPE.ALTERNATION);5642 };5643 return InsideDefinitionFinderVisitor;5644}(gast_public_1.gast.GAstVisitor));5645function lookAheadSequenceFromAlternatives(altsDefs, k) {5646 function getOtherPaths(pathsAndSuffixes, filterIdx) {5647 return utils_1.reduce(pathsAndSuffixes, function (result, currPathsAndSuffixes, currIdx) {5648 if (currIdx !== filterIdx) {5649 var currPartialPaths = utils_1.map(currPathsAndSuffixes, function (singlePathAndSuffix) { return singlePathAndSuffix.partialPath; });5650 return result.concat(currPartialPaths);5651 }5652 return result;5653 }, []);5654 }5655 function isUniquePrefix(arr, item) {5656 return (utils_1.find(arr, function (currOtherPath) {5657 return utils_1.every(item, function (currPathTok, idx) { return currPathTok === currOtherPath[idx]; });5658 }) === undefined);5659 }5660 function initializeArrayOfArrays(size) {5661 var result = [];5662 for (var i = 0; i < size; i++) {5663 result.push([]);5664 }5665 return result;5666 }5667 var partialAlts = utils_1.map(altsDefs, function (currAlt) { return interpreter_1.possiblePathsFrom([currAlt], 1); });5668 var finalResult = initializeArrayOfArrays(partialAlts.length);5669 var newData = partialAlts;5670 // maxLookahead loop5671 for (var pathLength = 1; pathLength <= k; pathLength++) {5672 var currDataset = newData;5673 newData = initializeArrayOfArrays(currDataset.length);5674 // alternatives loop5675 for (var resultIdx = 0; resultIdx < currDataset.length; resultIdx++) {5676 var currAltPathsAndSuffixes = currDataset[resultIdx];5677 var otherPaths = getOtherPaths(currDataset, resultIdx);5678 // paths in current alternative loop5679 for (var currPathIdx = 0; currPathIdx < currAltPathsAndSuffixes.length; currPathIdx++) {5680 var currPathPrefix = currAltPathsAndSuffixes[currPathIdx].partialPath;5681 var suffixDef = currAltPathsAndSuffixes[currPathIdx].suffixDef;5682 var isUnique = isUniquePrefix(otherPaths, currPathPrefix);5683 // even if a path is not unique, but there are no longer alternatives to try5684 // or if we have reached the maximum lookahead (k) permitted.5685 if (isUnique ||5686 utils_1.isEmpty(suffixDef) ||5687 currPathPrefix.length === k) {5688 var currAltResult = finalResult[resultIdx];5689 if (!containsPath(currAltResult, currPathPrefix)) {5690 currAltResult.push(currPathPrefix);5691 }5692 }5693 else {5694 var newPartialPathsAndSuffixes = interpreter_1.possiblePathsFrom(suffixDef, pathLength + 1, currPathPrefix);5695 newData[resultIdx] = newData[resultIdx].concat(newPartialPathsAndSuffixes);5696 }5697 }5698 }5699 }5700 return finalResult;5701}5702exports.lookAheadSequenceFromAlternatives = lookAheadSequenceFromAlternatives;5703function getLookaheadPathsForOr(occurrence, ruleGrammar, k) {5704 var visitor = new InsideDefinitionFinderVisitor(occurrence, PROD_TYPE.ALTERNATION);5705 ruleGrammar.accept(visitor);5706 return lookAheadSequenceFromAlternatives(visitor.result, k);5707}5708exports.getLookaheadPathsForOr = getLookaheadPathsForOr;5709function getLookaheadPathsForOptionalProd(occurrence, ruleGrammar, prodType, k) {5710 var insideDefVisitor = new InsideDefinitionFinderVisitor(occurrence, prodType);5711 ruleGrammar.accept(insideDefVisitor);5712 var insideDef = insideDefVisitor.result;5713 var afterDefWalker = new RestDefinitionFinderWalker(ruleGrammar, occurrence, prodType);5714 var afterDef = afterDefWalker.startWalking();5715 var insideFlat = new gast_public_1.gast.Flat(insideDef);5716 var afterFlat = new gast_public_1.gast.Flat(afterDef);5717 return lookAheadSequenceFromAlternatives([insideFlat, afterFlat], k);5718}5719exports.getLookaheadPathsForOptionalProd = getLookaheadPathsForOptionalProd;5720function containsPath(alternative, path) {5721 var found = utils_1.find(alternative, function (otherPath) {5722 return (path.length === otherPath.length &&5723 utils_1.every(path, function (targetItem, idx) {5724 return targetItem === otherPath[idx];5725 }));5726 });5727 return found !== undefined;5728}5729exports.containsPath = containsPath;5730function isStrictPrefixOfPath(prefix, other) {5731 return (prefix.length < other.length &&5732 utils_1.every(prefix, function (tokType, idx) {5733 return tokType === other[idx];5734 }));5735}5736exports.isStrictPrefixOfPath = isStrictPrefixOfPath;5737function areTokenCategoriesNotUsed(lookAheadPaths) {5738 return utils_1.every(lookAheadPaths, function (singleAltPaths) {5739 return utils_1.every(singleAltPaths, function (singlePath) {5740 return utils_1.every(singlePath, function (token) { return utils_1.isEmpty(token.categoryMatches); });5741 });5742 });5743}5744exports.areTokenCategoriesNotUsed = areTokenCategoriesNotUsed;5745//# sourceMappingURL=lookahead.js.map5746/***/ }),5747/* 15 */5748/***/ (function(module, exports, __webpack_require__) {5749"use strict";5750Object.defineProperty(exports, "__esModule", { value: true });5751var gast_public_1 = __webpack_require__(1);5752var gast_1 = __webpack_require__(9);5753var utils_1 = __webpack_require__(0);5754function first(prod) {5755 if (prod instanceof gast_public_1.gast.NonTerminal) {5756 // this could in theory cause infinite loops if5757 // (1) prod A refs prod B.5758 // (2) prod B refs prod A5759 // (3) AB can match the empty set5760 // in other words a cycle where everything is optional so the first will keep5761 // looking ahead for the next optional part and will never exit5762 // currently there is no safeguard for this unique edge case because5763 // (1) not sure a grammar in which this can happen is useful for anything (productive)5764 return first(prod.referencedRule);5765 }5766 else if (prod instanceof gast_public_1.gast.Terminal) {5767 return firstForTerminal(prod);5768 }5769 else if (gast_1.isSequenceProd(prod)) {5770 return firstForSequence(prod);5771 }5772 else if (gast_1.isBranchingProd(prod)) {5773 return firstForBranching(prod);5774 }5775 else {5776 /* istanbul ignore next */5777 throw Error("non exhaustive match");5778 }5779}5780exports.first = first;5781function firstForSequence(prod) {5782 var firstSet = [];5783 var seq = prod.definition;5784 var nextSubProdIdx = 0;5785 var hasInnerProdsRemaining = seq.length > nextSubProdIdx;5786 var currSubProd;5787 // so we enter the loop at least once (if the definition is not empty5788 var isLastInnerProdOptional = true;5789 // scan a sequence until it's end or until we have found a NONE optional production in it5790 while (hasInnerProdsRemaining && isLastInnerProdOptional) {5791 currSubProd = seq[nextSubProdIdx];5792 isLastInnerProdOptional = gast_1.isOptionalProd(currSubProd);5793 firstSet = firstSet.concat(first(currSubProd));5794 nextSubProdIdx = nextSubProdIdx + 1;5795 hasInnerProdsRemaining = seq.length > nextSubProdIdx;5796 }5797 return utils_1.uniq(firstSet);5798}5799exports.firstForSequence = firstForSequence;5800function firstForBranching(prod) {5801 var allAlternativesFirsts = utils_1.map(prod.definition, function (innerProd) {5802 return first(innerProd);5803 });5804 return utils_1.uniq(utils_1.flatten(allAlternativesFirsts));5805}5806exports.firstForBranching = firstForBranching;5807function firstForTerminal(terminal) {5808 return [terminal.terminalType];5809}5810exports.firstForTerminal = firstForTerminal;5811//# sourceMappingURL=first.js.map5812/***/ }),5813/* 16 */5814/***/ (function(module, exports, __webpack_require__) {5815"use strict";5816var __extends = (this && this.__extends) || (function () {5817 var extendStatics = Object.setPrototypeOf ||5818 ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||5819 function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };5820 return function (d, b) {5821 extendStatics(d, b);5822 function __() { this.constructor = d; }5823 d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());5824 };5825})();5826Object.defineProperty(exports, "__esModule", { value: true });5827var tokens_public_1 = __webpack_require__(2);5828var gast_public_1 = __webpack_require__(1);5829var utils_1 = __webpack_require__(0);5830var lang_extensions_1 = __webpack_require__(3);5831var keys_1 = __webpack_require__(17);5832var GAstVisitor = gast_public_1.gast.GAstVisitor;5833function addTerminalToCst(node, token, tokenTypeName) {5834 ;5835 node.children[tokenTypeName].push(token);5836}5837exports.addTerminalToCst = addTerminalToCst;5838function addNoneTerminalToCst(node, ruleName, ruleResult) {5839 ;5840 node.children[ruleName].push(ruleResult);5841}5842exports.addNoneTerminalToCst = addNoneTerminalToCst;5843var NamedDSLMethodsCollectorVisitor = /** @class */ (function (_super) {5844 __extends(NamedDSLMethodsCollectorVisitor, _super);5845 function NamedDSLMethodsCollectorVisitor(ruleIdx) {5846 var _this = _super.call(this) || this;5847 _this.result = [];5848 _this.ruleIdx = ruleIdx;5849 return _this;5850 }5851 NamedDSLMethodsCollectorVisitor.prototype.collectNamedDSLMethod = function (node, newNodeConstructor, methodIdx) {5852 if (!utils_1.isUndefined(node.name)) {5853 // copy without name so this will indeed be processed later.5854 var nameLessNode = void 0;5855 if (utils_1.has(node, "separator")) {5856 // hack to avoid code duplication and refactoring the Gast type declaration / constructors arguments order.5857 nameLessNode = new newNodeConstructor(node.definition, node.separator, node.occurrenceInParent);5858 }5859 else {5860 nameLessNode = new newNodeConstructor(node.definition, node.occurrenceInParent);5861 }5862 var def = [nameLessNode];5863 var key = keys_1.getKeyForAutomaticLookahead(this.ruleIdx, methodIdx, node.occurrenceInParent);5864 this.result.push({ def: def, key: key, name: node.name });5865 }5866 };5867 NamedDSLMethodsCollectorVisitor.prototype.visitOption = function (node) {5868 this.collectNamedDSLMethod(node, gast_public_1.gast.Option, keys_1.OPTION_IDX);5869 };5870 NamedDSLMethodsCollectorVisitor.prototype.visitRepetition = function (node) {5871 this.collectNamedDSLMethod(node, gast_public_1.gast.Repetition, keys_1.MANY_IDX);5872 };5873 NamedDSLMethodsCollectorVisitor.prototype.visitRepetitionMandatory = function (node) {5874 this.collectNamedDSLMethod(node, gast_public_1.gast.RepetitionMandatory, keys_1.AT_LEAST_ONE_IDX);5875 };5876 NamedDSLMethodsCollectorVisitor.prototype.visitRepetitionMandatoryWithSeparator = function (node) {5877 this.collectNamedDSLMethod(node, gast_public_1.gast.RepetitionMandatoryWithSeparator, keys_1.AT_LEAST_ONE_SEP_IDX);5878 };5879 NamedDSLMethodsCollectorVisitor.prototype.visitRepetitionWithSeparator = function (node) {5880 this.collectNamedDSLMethod(node, gast_public_1.gast.RepetitionWithSeparator, keys_1.MANY_SEP_IDX);5881 };5882 NamedDSLMethodsCollectorVisitor.prototype.visitAlternation = function (node) {5883 var _this = this;5884 this.collectNamedDSLMethod(node, gast_public_1.gast.Alternation, keys_1.OR_IDX);5885 var hasMoreThanOneAlternative = node.definition.length > 1;5886 utils_1.forEach(node.definition, function (currFlatAlt, altIdx) {5887 if (!utils_1.isUndefined(currFlatAlt.name)) {5888 var def = currFlatAlt.definition;5889 if (hasMoreThanOneAlternative) {5890 def = [new gast_public_1.gast.Option(currFlatAlt.definition)];5891 }5892 else {5893 // mandatory5894 def = currFlatAlt.definition;5895 }5896 var key = keys_1.getKeyForAltIndex(_this.ruleIdx, keys_1.OR_IDX, node.occurrenceInParent, altIdx);5897 _this.result.push({5898 def: def,5899 key: key,5900 name: currFlatAlt.name5901 });5902 }5903 });5904 };5905 return NamedDSLMethodsCollectorVisitor;5906}(GAstVisitor));5907exports.NamedDSLMethodsCollectorVisitor = NamedDSLMethodsCollectorVisitor;5908function analyzeCst(topRules, fullToShortName) {5909 var result = { dictDef: new lang_extensions_1.HashTable(), allRuleNames: [] };5910 utils_1.forEach(topRules, function (currTopRule) {5911 var currChildrenNames = buildChildDictionaryDef(currTopRule.definition);5912 var currTopRuleShortName = fullToShortName.get(currTopRule.name);5913 result.dictDef.put(currTopRuleShortName, buildInitDefFunc(currChildrenNames));5914 result.allRuleNames.push(currTopRule.name);5915 var namedCollectorVisitor = new NamedDSLMethodsCollectorVisitor(currTopRuleShortName);5916 currTopRule.accept(namedCollectorVisitor);5917 utils_1.forEach(namedCollectorVisitor.result, function (_a) {5918 var def = _a.def, key = _a.key, name = _a.name;5919 var currNestedChildrenNames = buildChildDictionaryDef(def);5920 result.dictDef.put(key, buildInitDefFunc(currNestedChildrenNames));5921 result.allRuleNames.push(currTopRule.name + name);5922 });5923 });5924 return result;5925}5926exports.analyzeCst = analyzeCst;5927function buildInitDefFunc(childrenNames) {5928 var funcString = "return {\n";5929 funcString += utils_1.map(childrenNames, function (currName) { return "\"" + currName + "\" : []"; }).join(",\n");5930 funcString += "}";5931 // major performance optimization, faster to create the children dictionary this way5932 // versus iterating over the childrenNames each time.5933 return Function(funcString);5934}5935function buildChildDictionaryDef(initialDef) {5936 var result = [];5937 var possiblePaths = [];5938 possiblePaths.push({ def: initialDef });5939 var currDef;5940 var currInIteration;5941 var currInOption;5942 var currResult;5943 function addSingleItemToResult(itemName) {5944 result.push(itemName);5945 var nextPath = {5946 def: utils_1.drop(currDef),5947 inIteration: currInIteration,5948 inOption: currInOption,5949 currResult: utils_1.cloneObj(currResult)5950 };5951 possiblePaths.push(nextPath);5952 }5953 while (!utils_1.isEmpty(possiblePaths)) {5954 var currPath = possiblePaths.pop();5955 currDef = currPath.def;5956 currInIteration = currPath.inIteration;5957 currInOption = currPath.inOption;5958 currResult = currPath.currResult;5959 // For Example: an empty path could exist in a valid grammar in the case of an EMPTY_ALT5960 if (utils_1.isEmpty(currDef)) {5961 continue;5962 }5963 var prod = currDef[0];5964 if (prod instanceof gast_public_1.gast.Terminal) {5965 var terminalName = tokens_public_1.tokenName(prod.terminalType);5966 addSingleItemToResult(terminalName);5967 }5968 else if (prod instanceof gast_public_1.gast.NonTerminal) {5969 var nonTerminalName = prod.nonTerminalName;5970 addSingleItemToResult(nonTerminalName);5971 }5972 else if (prod instanceof gast_public_1.gast.Option) {5973 if (!utils_1.isUndefined(prod.name)) {5974 addSingleItemToResult(prod.name);5975 }5976 else {5977 var nextPathWith = {5978 def: prod.definition.concat(utils_1.drop(currDef))5979 };5980 possiblePaths.push(nextPathWith);5981 }5982 }5983 else if (prod instanceof gast_public_1.gast.RepetitionMandatory ||5984 prod instanceof gast_public_1.gast.Repetition) {5985 if (!utils_1.isUndefined(prod.name)) {5986 addSingleItemToResult(prod.name);5987 }5988 else {5989 var nextDef = prod.definition.concat(utils_1.drop(currDef));5990 var nextPath = {5991 def: nextDef5992 };5993 possiblePaths.push(nextPath);5994 }5995 }5996 else if (prod instanceof gast_public_1.gast.RepetitionMandatoryWithSeparator ||5997 prod instanceof gast_public_1.gast.RepetitionWithSeparator) {5998 if (!utils_1.isUndefined(prod.name)) {5999 addSingleItemToResult(prod.name);6000 }6001 else {6002 var separatorGast = new gast_public_1.gast.Terminal(prod.separator);6003 var secondIteration = new gast_public_1.gast.Repetition([separatorGast].concat(prod.definition), prod.occurrenceInParent);6004 // Hack: X (, X)* --> (, X) because it is identical in terms of identifying "isCollection?"6005 var nextDef = [secondIteration].concat(utils_1.drop(currDef));6006 var nextPath = {6007 def: nextDef6008 };6009 possiblePaths.push(nextPath);6010 }6011 }6012 else if (prod instanceof gast_public_1.gast.Alternation) {6013 /* istanbul ignore else */6014 // IGNORE ABOVE ELSE6015 if (!utils_1.isUndefined(prod.name)) {6016 addSingleItemToResult(prod.name);6017 }6018 else {6019 // the order of alternatives is meaningful, FILO (Last path will be traversed first).6020 for (var i = prod.definition.length - 1; i >= 0; i--) {6021 var currAlt = prod.definition[i];6022 // named alternatives6023 if (!utils_1.isUndefined(currAlt.name)) {6024 addSingleItemToResult(currAlt.name);6025 }6026 else {6027 var newDef = currAlt.definition.concat(utils_1.drop(currDef));6028 var currAltPath = {6029 def: newDef6030 };6031 possiblePaths.push(currAltPath);6032 }6033 }6034 }6035 }6036 else {6037 /* istanbul ignore next */ throw Error("non exhaustive match");6038 }6039 }6040 return result;6041}6042exports.buildChildDictionaryDef = buildChildDictionaryDef;6043//# sourceMappingURL=cst.js.map6044/***/ }),6045/* 17 */6046/***/ (function(module, exports, __webpack_require__) {6047"use strict";6048// Lookahead keys are 32Bit integers in the form6049// TTTTTTTTT-ZZZZZZZZZZZZZZZ-YYYY-XXXX6050// XXXX -> Occurrence Index bitmap.6051// YYYY -> DSL Method Name bitmap.6052// ZZZZZZZZZZZZZZZ -> Rule short Index bitmap.6053// TTTTTTTTT -> alternation alternative index bitmap6054Object.defineProperty(exports, "__esModule", { value: true });6055exports.BITS_FOR_METHOD_IDX = 4;6056exports.BITS_FOR_OCCURRENCE_IDX = 4;6057exports.BITS_FOR_RULE_IDX = 24;6058// TODO: validation, this means that there may at most 2^8 --> 256 alternatives for an alternation.6059exports.BITS_FOR_ALT_IDX = 8;6060// short string used as part of mapping keys.6061// being short improves the performance when composing KEYS for maps out of these6062// The 5 - 8 bits (16 possible values, are reserved for the DSL method indices)6063/* tslint:disable */6064exports.OR_IDX = 1 << exports.BITS_FOR_METHOD_IDX;6065exports.OPTION_IDX = 2 << exports.BITS_FOR_METHOD_IDX;6066exports.MANY_IDX = 3 << exports.BITS_FOR_METHOD_IDX;6067exports.AT_LEAST_ONE_IDX = 4 << exports.BITS_FOR_METHOD_IDX;6068exports.MANY_SEP_IDX = 5 << exports.BITS_FOR_METHOD_IDX;6069exports.AT_LEAST_ONE_SEP_IDX = 6 << exports.BITS_FOR_METHOD_IDX;6070/* tslint:enable */6071// this actually returns a number, but it is always used as a string (object prop key)6072function getKeyForAutomaticLookahead(ruleIdx, dslMethodIdx, occurrence) {6073 /* tslint:disable */6074 return occurrence | dslMethodIdx | ruleIdx;6075 /* tslint:enable */6076}6077exports.getKeyForAutomaticLookahead = getKeyForAutomaticLookahead;6078var BITS_START_FOR_ALT_IDX = 32 - exports.BITS_FOR_ALT_IDX;6079function getKeyForAltIndex(ruleIdx, dslMethodIdx, occurrence, altIdx) {6080 /* tslint:disable */6081 // alternative indices are zero based, thus must always add one (turn on one bit) to guarantee uniqueness.6082 var altIdxBitMap = (altIdx + 1) << BITS_START_FOR_ALT_IDX;6083 return (getKeyForAutomaticLookahead(ruleIdx, dslMethodIdx, occurrence) |6084 altIdxBitMap);6085 /* tslint:enable */6086}6087exports.getKeyForAltIndex = getKeyForAltIndex;6088//# sourceMappingURL=keys.js.map6089/***/ }),6090/* 18 */6091/***/ (function(module, exports, __webpack_require__) {6092"use strict";6093Object.defineProperty(exports, "__esModule", { value: true });6094// TODO: can this be removed? where is it used?6095exports.IN = "_~IN~_";6096//# sourceMappingURL=constants.js.map6097/***/ }),6098/* 19 */6099/***/ (function(module, exports, __webpack_require__) {6100"use strict";6101Object.defineProperty(exports, "__esModule", { value: true });6102var tokens_public_1 = __webpack_require__(2);6103var utils_1 = __webpack_require__(0);6104/**6105 * This is the default logic Chevrotain uses to construct error messages.6106 * When constructing a custom error message provider it may be used as a reference6107 * or reused.6108 */6109exports.defaultErrorProvider = {6110 buildMismatchTokenMessage: function (_a) {6111 var expected = _a.expected, actual = _a.actual, ruleName = _a.ruleName;6112 var hasLabel = tokens_public_1.hasTokenLabel(expected);6113 var expectedMsg = hasLabel6114 ? "--> " + tokens_public_1.tokenLabel(expected) + " <--"6115 : "token of type --> " + tokens_public_1.tokenName(expected) + " <--";6116 var msg = "Expecting " + expectedMsg + " but found --> '" + actual.image + "' <--";6117 return msg;6118 },6119 buildNotAllInputParsedMessage: function (_a) {6120 var firstRedundant = _a.firstRedundant, ruleName = _a.ruleName;6121 return ("Redundant input, expecting EOF but found: " + firstRedundant.image);6122 },6123 buildNoViableAltMessage: function (_a) {6124 var expectedPathsPerAlt = _a.expectedPathsPerAlt, actual = _a.actual, customUserDescription = _a.customUserDescription, ruleName = _a.ruleName;6125 var errPrefix = "Expecting: ";6126 // TODO: issue: No Viable Alternative Error may have incomplete details. #5026127 var actualText = utils_1.first(actual).image;6128 var errSuffix = "\nbut found: '" + actualText + "'";6129 if (customUserDescription) {6130 return errPrefix + customUserDescription + errSuffix;6131 }6132 else {6133 var allLookAheadPaths = utils_1.reduce(expectedPathsPerAlt, function (result, currAltPaths) { return result.concat(currAltPaths); }, []);6134 var nextValidTokenSequences = utils_1.map(allLookAheadPaths, function (currPath) {6135 return "[" + utils_1.map(currPath, function (currTokenType) {6136 return tokens_public_1.tokenLabel(currTokenType);6137 }).join(", ") + "]";6138 });6139 var nextValidSequenceItems = utils_1.map(nextValidTokenSequences, function (itemMsg, idx) { return " " + (idx + 1) + ". " + itemMsg; });6140 var calculatedDescription = "one of these possible Token sequences:\n" + nextValidSequenceItems.join("\n");6141 return errPrefix + calculatedDescription + errSuffix;6142 }6143 },6144 buildEarlyExitMessage: function (_a) {6145 var expectedIterationPaths = _a.expectedIterationPaths, actual = _a.actual, customUserDescription = _a.customUserDescription, ruleName = _a.ruleName;6146 var errPrefix = "Expecting: ";6147 // TODO: issue: No Viable Alternative Error may have incomplete details. #5026148 var actualText = utils_1.first(actual).image;6149 var errSuffix = "\nbut found: '" + actualText + "'";6150 if (customUserDescription) {6151 return errPrefix + customUserDescription + errSuffix;6152 }6153 else {6154 var nextValidTokenSequences = utils_1.map(expectedIterationPaths, function (currPath) {6155 return "[" + utils_1.map(currPath, function (currTokenType) {6156 return tokens_public_1.tokenLabel(currTokenType);6157 }).join(",") + "]";6158 });6159 var calculatedDescription = "expecting at least one iteration which starts with one of these possible Token sequences::\n " +6160 ("<" + nextValidTokenSequences.join(" ,") + ">");6161 return errPrefix + calculatedDescription + errSuffix;6162 }6163 }6164};6165Object.freeze(exports.defaultErrorProvider);6166//# sourceMappingURL=errors_public.js.map6167/***/ }),6168/* 20 */6169/***/ (function(module, exports, __webpack_require__) {6170"use strict";6171Object.defineProperty(exports, "__esModule", { value: true });6172var parser_public_1 = __webpack_require__(6);6173var lexer_public_1 = __webpack_require__(8);6174var tokens_public_1 = __webpack_require__(2);6175var exceptions_public_1 = __webpack_require__(12);6176var gast_public_1 = __webpack_require__(1);6177var cache_public_1 = __webpack_require__(27);6178var interpreter_1 = __webpack_require__(5);6179var version_1 = __webpack_require__(11);6180var errors_public_1 = __webpack_require__(19);6181var render_public_1 = __webpack_require__(28);6182/**6183 * defines the public API of6184 * changes here may require major version change. (semVer)6185 */6186var API = {};6187// semantic version6188API.VERSION = version_1.VERSION;6189// runtime API6190API.Parser = parser_public_1.Parser;6191API.ParserDefinitionErrorType = parser_public_1.ParserDefinitionErrorType;6192API.Lexer = lexer_public_1.Lexer;6193API.LexerDefinitionErrorType = lexer_public_1.LexerDefinitionErrorType;6194API.EOF = tokens_public_1.EOF;6195// Tokens utilities6196API.tokenName = tokens_public_1.tokenName;6197API.tokenLabel = tokens_public_1.tokenLabel;6198API.tokenMatcher = tokens_public_1.tokenMatcher;6199API.createToken = tokens_public_1.createToken;6200API.createTokenInstance = tokens_public_1.createTokenInstance;6201// Other Utilities6202API.EMPTY_ALT = parser_public_1.EMPTY_ALT;6203API.defaultErrorProvider = errors_public_1.defaultErrorProvider;6204API.exceptions = {};6205API.exceptions.isRecognitionException = exceptions_public_1.exceptions.isRecognitionException;6206API.exceptions.EarlyExitException = exceptions_public_1.exceptions.EarlyExitException;6207API.exceptions.MismatchedTokenException = exceptions_public_1.exceptions.MismatchedTokenException;6208API.exceptions.NotAllInputParsedException =6209 exceptions_public_1.exceptions.NotAllInputParsedException;6210API.exceptions.NoViableAltException = exceptions_public_1.exceptions.NoViableAltException;6211// grammar reflection API6212API.gast = {};6213API.gast.GAstVisitor = gast_public_1.gast.GAstVisitor;6214API.gast.Flat = gast_public_1.gast.Flat;6215API.gast.Repetition = gast_public_1.gast.Repetition;6216API.gast.RepetitionWithSeparator = gast_public_1.gast.RepetitionWithSeparator;6217API.gast.RepetitionMandatory = gast_public_1.gast.RepetitionMandatory;6218API.gast.RepetitionMandatoryWithSeparator =6219 gast_public_1.gast.RepetitionMandatoryWithSeparator;6220API.gast.Option = gast_public_1.gast.Option;6221API.gast.Alternation = gast_public_1.gast.Alternation;6222API.gast.NonTerminal = gast_public_1.gast.NonTerminal;6223API.gast.Terminal = gast_public_1.gast.Terminal;6224API.gast.Rule = gast_public_1.gast.Rule;6225API.gast.serializeGrammar = gast_public_1.gast.serializeGrammar;6226API.gast.serializeProduction = gast_public_1.gast.serializeProduction;6227API.interperter = {};6228API.interperter.NextAfterTokenWalker = interpreter_1.NextAfterTokenWalker;6229API.clearCache = cache_public_1.clearCache;6230API.createSyntaxDiagramsCode = render_public_1.createSyntaxDiagramsCode;6231module.exports = API;6232//# sourceMappingURL=api.js.map6233/***/ }),6234/* 21 */6235/***/ (function(module, exports, __webpack_require__) {6236"use strict";6237var __extends = (this && this.__extends) || (function () {6238 var extendStatics = Object.setPrototypeOf ||6239 ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||6240 function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };6241 return function (d, b) {6242 extendStatics(d, b);6243 function __() { this.constructor = d; }6244 d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());6245 };6246})();6247Object.defineProperty(exports, "__esModule", { value: true });6248var parser_public_1 = __webpack_require__(6);6249var gast_public_1 = __webpack_require__(1);6250var utils_1 = __webpack_require__(0);6251function resolveGrammar(topLevels) {6252 var refResolver = new GastRefResolverVisitor(topLevels);6253 refResolver.resolveRefs();6254 return refResolver.errors;6255}6256exports.resolveGrammar = resolveGrammar;6257var GastRefResolverVisitor = /** @class */ (function (_super) {6258 __extends(GastRefResolverVisitor, _super);6259 function GastRefResolverVisitor(nameToTopRule) {6260 var _this = _super.call(this) || this;6261 _this.nameToTopRule = nameToTopRule;6262 _this.errors = [];6263 return _this;6264 }6265 GastRefResolverVisitor.prototype.resolveRefs = function () {6266 var _this = this;6267 utils_1.forEach(this.nameToTopRule.values(), function (prod) {6268 _this.currTopLevel = prod;6269 prod.accept(_this);6270 });6271 };6272 GastRefResolverVisitor.prototype.visitNonTerminal = function (node) {6273 var ref = this.nameToTopRule.get(node.nonTerminalName);6274 if (!ref) {6275 var msg = "Invalid grammar, reference to a rule which is not defined: ->" +6276 node.nonTerminalName +6277 "<-\n" +6278 "inside top level rule: ->" +6279 this.currTopLevel.name +6280 "<-";6281 this.errors.push({6282 message: msg,6283 type: parser_public_1.ParserDefinitionErrorType.UNRESOLVED_SUBRULE_REF,6284 ruleName: this.currTopLevel.name,6285 unresolvedRefName: node.nonTerminalName6286 });6287 }6288 else {6289 node.referencedRule = ref;6290 }6291 };6292 return GastRefResolverVisitor;6293}(gast_public_1.gast.GAstVisitor));6294exports.GastRefResolverVisitor = GastRefResolverVisitor;6295//# sourceMappingURL=resolver.js.map6296/***/ }),6297/* 22 */6298/***/ (function(module, exports, __webpack_require__) {6299"use strict";6300Object.defineProperty(exports, "__esModule", { value: true });6301var tokens_public_1 = __webpack_require__(2);6302var lexer_public_1 = __webpack_require__(8);6303var utils_1 = __webpack_require__(0);6304var PATTERN = "PATTERN";6305exports.DEFAULT_MODE = "defaultMode";6306exports.MODES = "modes";6307exports.SUPPORT_STICKY = typeof new RegExp("(?:)").sticky === "boolean";6308function disableSticky() {6309 exports.SUPPORT_STICKY = false;6310}6311exports.disableSticky = disableSticky;6312function enableSticky() {6313 exports.SUPPORT_STICKY = true;6314}6315exports.enableSticky = enableSticky;6316function analyzeTokenTypes(tokenTypes, useSticky) {6317 if (useSticky === void 0) { useSticky = exports.SUPPORT_STICKY; }6318 var onlyRelevantTypes = utils_1.reject(tokenTypes, function (currType) {6319 return currType[PATTERN] === lexer_public_1.Lexer.NA;6320 });6321 var hasCustom = false;6322 var allTransformedPatterns = utils_1.map(onlyRelevantTypes, function (currType) {6323 var currPattern = currType[PATTERN];6324 if (utils_1.isRegExp(currPattern)) {6325 var regExpSource = currPattern.source;6326 if (regExpSource.length === 1 &&6327 // only these regExp meta characters which can appear in a length one regExp6328 regExpSource !== "^" &&6329 regExpSource !== "$" &&6330 regExpSource !== ".") {6331 return regExpSource;6332 }6333 else if (regExpSource.length === 2 &&6334 regExpSource[0] === "\\" &&6335 // not a meta character6336 !utils_1.contains([6337 "d",6338 "D",6339 "s",6340 "S",6341 "t",6342 "r",6343 "n",6344 "t",6345 "0",6346 "c",6347 "b",6348 "B",6349 "f",6350 "v",6351 "w",6352 "W"6353 ], regExpSource[1])) {6354 // escaped meta Characters: /\+/ /\[/6355 // or redundant escaping: /\a/6356 // without the escaping "\"6357 return regExpSource[1];6358 }6359 else {6360 return useSticky6361 ? addStickyFlag(currPattern)6362 : addStartOfInput(currPattern);6363 }6364 }6365 else if (utils_1.isFunction(currPattern)) {6366 hasCustom = true;6367 // CustomPatternMatcherFunc - custom patterns do not require any transformations, only wrapping in a RegExp Like object6368 return { exec: currPattern };6369 }6370 else if (utils_1.has(currPattern, "exec")) {6371 hasCustom = true;6372 // ICustomPattern6373 return currPattern;6374 }6375 else if (typeof currPattern === "string") {6376 // IGNORE ABOVE ELSE6377 if (currPattern.length === 1) {6378 return currPattern;6379 }6380 else {6381 var escapedRegExpString = currPattern.replace(/[\\^$.*+?()[\]{}|]/g, "\\$&");6382 var wrappedRegExp = new RegExp(escapedRegExpString);6383 // TODO: extract the "?" expression, it is duplicated6384 return useSticky6385 ? addStickyFlag(wrappedRegExp)6386 : addStartOfInput(wrappedRegExp);6387 }6388 }6389 else {6390 /* istanbul ignore next */6391 throw Error("non exhaustive match");6392 }6393 });6394 var patternIdxToType = utils_1.map(onlyRelevantTypes, function (currType) { return currType.tokenTypeIdx; });6395 var patternIdxToGroup = utils_1.map(onlyRelevantTypes, function (clazz) {6396 var groupName = clazz.GROUP;6397 if (groupName === lexer_public_1.Lexer.SKIPPED) {6398 return undefined;6399 }6400 else if (utils_1.isString(groupName)) {6401 return groupName;6402 }6403 else if (utils_1.isUndefined(groupName)) {6404 return false;6405 }6406 else {6407 /* istanbul ignore next */6408 throw Error("non exhaustive match");6409 }6410 });6411 var patternIdxToLongerAltIdx = utils_1.map(onlyRelevantTypes, function (clazz) {6412 var longerAltType = clazz.LONGER_ALT;6413 if (longerAltType) {6414 var longerAltIdx = utils_1.indexOf(onlyRelevantTypes, longerAltType);6415 return longerAltIdx;6416 }6417 });6418 var patternIdxToPushMode = utils_1.map(onlyRelevantTypes, function (clazz) { return clazz.PUSH_MODE; });6419 var patternIdxToPopMode = utils_1.map(onlyRelevantTypes, function (clazz) {6420 return utils_1.has(clazz, "POP_MODE");6421 });6422 var patternIdxToCanLineTerminator = utils_1.map(onlyRelevantTypes, function (clazz) { return clazz.LINE_BREAKS === true; });6423 var patternIdxToIsCustom = utils_1.map(onlyRelevantTypes, isCustomPattern);6424 var patternIdxToShort = utils_1.map(allTransformedPatterns, isShortPattern);6425 var emptyGroups = utils_1.reduce(onlyRelevantTypes, function (acc, clazz) {6426 var groupName = clazz.GROUP;6427 if (utils_1.isString(groupName) && !(groupName === lexer_public_1.Lexer.SKIPPED)) {6428 acc[groupName] = [];6429 }6430 return acc;6431 }, {});6432 var patternIdxToConfig = utils_1.map(allTransformedPatterns, function (x, idx) {6433 return {6434 pattern: allTransformedPatterns[idx],6435 longerAlt: patternIdxToLongerAltIdx[idx],6436 canLineTerminator: patternIdxToCanLineTerminator[idx],6437 isCustom: patternIdxToIsCustom[idx],6438 short: patternIdxToShort[idx],6439 group: patternIdxToGroup[idx],6440 push: patternIdxToPushMode[idx],6441 pop: patternIdxToPopMode[idx],6442 tokenTypeIdx: patternIdxToType[idx],6443 tokenType: onlyRelevantTypes[idx]6444 };6445 });6446 return {6447 emptyGroups: emptyGroups,6448 patternIdxToConfig: patternIdxToConfig,6449 hasCustom: hasCustom6450 };6451}6452exports.analyzeTokenTypes = analyzeTokenTypes;6453function validatePatterns(tokenTypes, validModesNames) {6454 var errors = [];6455 var missingResult = findMissingPatterns(tokenTypes);6456 errors = errors.concat(missingResult.errors);6457 var invalidResult = findInvalidPatterns(missingResult.valid);6458 var validTokenTypes = invalidResult.valid;6459 errors = errors.concat(invalidResult.errors);6460 errors = errors.concat(validateRegExpPattern(validTokenTypes));6461 errors = errors.concat(findInvalidGroupType(validTokenTypes));6462 errors = errors.concat(findModesThatDoNotExist(validTokenTypes, validModesNames));6463 errors = errors.concat(findUnreachablePatterns(validTokenTypes));6464 return errors;6465}6466exports.validatePatterns = validatePatterns;6467function validateRegExpPattern(tokenTypes) {6468 var errors = [];6469 var withRegExpPatterns = utils_1.filter(tokenTypes, function (currTokType) {6470 return utils_1.isRegExp(currTokType[PATTERN]);6471 });6472 errors = errors.concat(findEndOfInputAnchor(withRegExpPatterns));6473 errors = errors.concat(findStartOfInputAnchor(withRegExpPatterns));6474 errors = errors.concat(findUnsupportedFlags(withRegExpPatterns));6475 errors = errors.concat(findDuplicatePatterns(withRegExpPatterns));6476 errors = errors.concat(findEmptyMatchRegExps(withRegExpPatterns));6477 return errors;6478}6479function findMissingPatterns(tokenTypes) {6480 var tokenTypesWithMissingPattern = utils_1.filter(tokenTypes, function (currType) {6481 return !utils_1.has(currType, PATTERN);6482 });6483 var errors = utils_1.map(tokenTypesWithMissingPattern, function (currType) {6484 return {6485 message: "Token Type: ->" +6486 tokens_public_1.tokenName(currType) +6487 "<- missing static 'PATTERN' property",6488 type: lexer_public_1.LexerDefinitionErrorType.MISSING_PATTERN,6489 tokenTypes: [currType]6490 };6491 });6492 var valid = utils_1.difference(tokenTypes, tokenTypesWithMissingPattern);6493 return { errors: errors, valid: valid };6494}6495exports.findMissingPatterns = findMissingPatterns;6496function findInvalidPatterns(tokenTypes) {6497 var tokenTypesWithInvalidPattern = utils_1.filter(tokenTypes, function (currType) {6498 var pattern = currType[PATTERN];6499 return (!utils_1.isRegExp(pattern) &&6500 !utils_1.isFunction(pattern) &&6501 !utils_1.has(pattern, "exec") &&6502 !utils_1.isString(pattern));6503 });6504 var errors = utils_1.map(tokenTypesWithInvalidPattern, function (currType) {6505 return {6506 message: "Token Type: ->" +6507 tokens_public_1.tokenName(currType) +6508 "<- static 'PATTERN' can only be a RegExp, a" +6509 " Function matching the {CustomPatternMatcherFunc} type or an Object matching the {ICustomPattern} interface.",6510 type: lexer_public_1.LexerDefinitionErrorType.INVALID_PATTERN,6511 tokenTypes: [currType]6512 };6513 });6514 var valid = utils_1.difference(tokenTypes, tokenTypesWithInvalidPattern);6515 return { errors: errors, valid: valid };6516}6517exports.findInvalidPatterns = findInvalidPatterns;6518var end_of_input = /[^\\][\$]/;6519function findEndOfInputAnchor(tokenTypes) {6520 var invalidRegex = utils_1.filter(tokenTypes, function (currType) {6521 var pattern = currType[PATTERN];6522 return end_of_input.test(pattern.source);6523 });6524 var errors = utils_1.map(invalidRegex, function (currType) {6525 return {6526 message: "Unexpected RegExp Anchor Error:\n" +6527 "\tToken Type: ->" +6528 tokens_public_1.tokenName(currType) +6529 "<- static 'PATTERN' cannot contain end of input anchor '$'\n" +6530 "\tSee sap.github.io/chevrotain/website/Building_Grammars/resolving_lexer_errors.html#ANCHORS" +6531 "\tfor details.",6532 type: lexer_public_1.LexerDefinitionErrorType.EOI_ANCHOR_FOUND,6533 tokenTypes: [currType]6534 };6535 });6536 return errors;6537}6538exports.findEndOfInputAnchor = findEndOfInputAnchor;6539function findEmptyMatchRegExps(tokenTypes) {6540 var matchesEmptyString = utils_1.filter(tokenTypes, function (currType) {6541 var pattern = currType[PATTERN];6542 return pattern.test("");6543 });6544 var errors = utils_1.map(matchesEmptyString, function (currType) {6545 return {6546 message: "Token Type: ->" +6547 tokens_public_1.tokenName(currType) +6548 "<- static 'PATTERN' must not match an empty string",6549 type: lexer_public_1.LexerDefinitionErrorType.EMPTY_MATCH_PATTERN,6550 tokenTypes: [currType]6551 };6552 });6553 return errors;6554}6555exports.findEmptyMatchRegExps = findEmptyMatchRegExps;6556var start_of_input = /[^\\[][\^]|^\^/;6557function findStartOfInputAnchor(tokenTypes) {6558 var invalidRegex = utils_1.filter(tokenTypes, function (currType) {6559 var pattern = currType[PATTERN];6560 return start_of_input.test(pattern.source);6561 });6562 var errors = utils_1.map(invalidRegex, function (currType) {6563 return {6564 message: "Unexpected RegExp Anchor Error:\n" +6565 "\tToken Type: ->" +6566 tokens_public_1.tokenName(currType) +6567 "<- static 'PATTERN' cannot contain start of input anchor '^'\n" +6568 "\tSee https://github.com/SAP/chevrotain/blob/master/docs/resolving_lexer_errors.md#ANCHORS\n" +6569 "\tfor details.",6570 type: lexer_public_1.LexerDefinitionErrorType.SOI_ANCHOR_FOUND,6571 tokenTypes: [currType]6572 };6573 });6574 return errors;6575}6576exports.findStartOfInputAnchor = findStartOfInputAnchor;6577function findUnsupportedFlags(tokenTypes) {6578 var invalidFlags = utils_1.filter(tokenTypes, function (currType) {6579 var pattern = currType[PATTERN];6580 return (pattern instanceof RegExp && (pattern.multiline || pattern.global));6581 });6582 var errors = utils_1.map(invalidFlags, function (currType) {6583 return {6584 message: "Token Type: ->" +6585 tokens_public_1.tokenName(currType) +6586 "<- static 'PATTERN' may NOT contain global('g') or multiline('m')",6587 type: lexer_public_1.LexerDefinitionErrorType.UNSUPPORTED_FLAGS_FOUND,6588 tokenTypes: [currType]6589 };6590 });6591 return errors;6592}6593exports.findUnsupportedFlags = findUnsupportedFlags;6594// This can only test for identical duplicate RegExps, not semantically equivalent ones.6595function findDuplicatePatterns(tokenTypes) {6596 var found = [];6597 var identicalPatterns = utils_1.map(tokenTypes, function (outerType) {6598 return utils_1.reduce(tokenTypes, function (result, innerType) {6599 if (outerType.PATTERN.source === innerType.PATTERN.source &&6600 !utils_1.contains(found, innerType) &&6601 innerType.PATTERN !== lexer_public_1.Lexer.NA) {6602 // this avoids duplicates in the result, each Token Type may only appear in one "set"6603 // in essence we are creating Equivalence classes on equality relation.6604 found.push(innerType);6605 result.push(innerType);6606 return result;6607 }6608 return result;6609 }, []);6610 });6611 identicalPatterns = utils_1.compact(identicalPatterns);6612 var duplicatePatterns = utils_1.filter(identicalPatterns, function (currIdenticalSet) {6613 return currIdenticalSet.length > 1;6614 });6615 var errors = utils_1.map(duplicatePatterns, function (setOfIdentical) {6616 var tokenTypeNames = utils_1.map(setOfIdentical, function (currType) {6617 return tokens_public_1.tokenName(currType);6618 });6619 var dupPatternSrc = utils_1.first(setOfIdentical).PATTERN;6620 return {6621 message: "The same RegExp pattern ->" + dupPatternSrc + "<-" +6622 ("has been used in all of the following Token Types: " + tokenTypeNames.join(", ") + " <-"),6623 type: lexer_public_1.LexerDefinitionErrorType.DUPLICATE_PATTERNS_FOUND,6624 tokenTypes: setOfIdentical6625 };6626 });6627 return errors;6628}6629exports.findDuplicatePatterns = findDuplicatePatterns;6630function findInvalidGroupType(tokenTypes) {6631 var invalidTypes = utils_1.filter(tokenTypes, function (clazz) {6632 if (!utils_1.has(clazz, "GROUP")) {6633 return false;6634 }6635 var group = clazz.GROUP;6636 return group !== lexer_public_1.Lexer.SKIPPED && group !== lexer_public_1.Lexer.NA && !utils_1.isString(group);6637 });6638 var errors = utils_1.map(invalidTypes, function (currType) {6639 return {6640 message: "Token Type: ->" +6641 tokens_public_1.tokenName(currType) +6642 "<- static 'GROUP' can only be Lexer.SKIPPED/Lexer.NA/A String",6643 type: lexer_public_1.LexerDefinitionErrorType.INVALID_GROUP_TYPE_FOUND,6644 tokenTypes: [currType]6645 };6646 });6647 return errors;6648}6649exports.findInvalidGroupType = findInvalidGroupType;6650function findModesThatDoNotExist(tokenTypes, validModes) {6651 var invalidModes = utils_1.filter(tokenTypes, function (clazz) {6652 return (clazz.PUSH_MODE !== undefined &&6653 !utils_1.contains(validModes, clazz.PUSH_MODE));6654 });6655 var errors = utils_1.map(invalidModes, function (clazz) {6656 var msg = "Token Type: ->" + tokens_public_1.tokenName(clazz) + "<- static 'PUSH_MODE' value cannot refer to a Lexer Mode ->" + clazz.PUSH_MODE + "<-" + "which does not exist";6657 return {6658 message: msg,6659 type: lexer_public_1.LexerDefinitionErrorType.PUSH_MODE_DOES_NOT_EXIST,6660 tokenTypes: [clazz]6661 };6662 });6663 return errors;6664}6665exports.findModesThatDoNotExist = findModesThatDoNotExist;6666function findUnreachablePatterns(tokenTypes) {6667 var errors = [];6668 var canBeTested = utils_1.reduce(tokenTypes, function (result, tokType, idx) {6669 var pattern = tokType.PATTERN;6670 if (pattern === lexer_public_1.Lexer.NA) {6671 return result;6672 }6673 // a more comprehensive validation for all forms of regExps would require6674 // deeper regExp analysis capabilities6675 if (utils_1.isString(pattern)) {6676 result.push({ str: pattern, idx: idx, tokenType: tokType });6677 }6678 else if (utils_1.isRegExp(pattern) && noMetaChar(pattern)) {6679 result.push({ str: pattern.source, idx: idx, tokenType: tokType });6680 }6681 return result;6682 }, []);6683 utils_1.forEach(tokenTypes, function (tokType, testIdx) {6684 utils_1.forEach(canBeTested, function (_a) {6685 var str = _a.str, idx = _a.idx, tokenType = _a.tokenType;6686 if (testIdx < idx && testTokenType(str, tokType.PATTERN)) {6687 var msg = "Token: ->" + tokens_public_1.tokenName(tokenType) + "<- can never be matched.\n" +6688 ("Because it appears AFTER the Token Type ->" + tokens_public_1.tokenName(tokType) + "<-") +6689 "in the lexer's definition.\n" +6690 "See http://sap.github.io/chevrotain/website/Building_Grammars/resolving_lexer_errors.html#UNREACHABLE";6691 errors.push({6692 message: msg,6693 type: lexer_public_1.LexerDefinitionErrorType.UNREACHABLE_PATTERN,6694 tokenTypes: [tokType, tokenType]6695 });6696 }6697 });6698 });6699 return errors;6700}6701exports.findUnreachablePatterns = findUnreachablePatterns;6702function testTokenType(str, pattern) {6703 if (utils_1.isRegExp(pattern)) {6704 var regExpArray = pattern.exec(str);6705 return regExpArray !== null && regExpArray.index === 0;6706 }6707 else if (utils_1.isFunction(pattern)) {6708 // maintain the API of custom patterns6709 return pattern(str, 0, [], {});6710 }6711 else if (utils_1.has(pattern, "exec")) {6712 // maintain the API of custom patterns6713 return pattern.exec(str, 0, [], {});6714 }6715 else if (typeof pattern === "string") {6716 return pattern === str;6717 }6718 else {6719 /* istanbul ignore next */6720 throw Error("non exhaustive match");6721 }6722}6723function noMetaChar(regExp) {6724 //https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/RegExp6725 var metaChars = [6726 ".",6727 "\\",6728 "[",6729 "]",6730 "|",6731 "^",6732 "$",6733 "(",6734 ")",6735 "?",6736 "*",6737 "+",6738 "{"6739 ];6740 return (utils_1.find(metaChars, function (char) { return regExp.source.indexOf(char) !== -1; }) ===6741 undefined);6742}6743function addStartOfInput(pattern) {6744 var flags = pattern.ignoreCase ? "i" : "";6745 // always wrapping in a none capturing group preceded by '^' to make sure matching can only work on start of input.6746 // duplicate/redundant start of input markers have no meaning (/^^^^A/ === /^A/)6747 return new RegExp("^(?:" + pattern.source + ")", flags);6748}6749exports.addStartOfInput = addStartOfInput;6750function addStickyFlag(pattern) {6751 var flags = pattern.ignoreCase ? "iy" : "y";6752 // always wrapping in a none capturing group preceded by '^' to make sure matching can only work on start of input.6753 // duplicate/redundant start of input markers have no meaning (/^^^^A/ === /^A/)6754 return new RegExp("" + pattern.source, flags);6755}6756exports.addStickyFlag = addStickyFlag;6757function performRuntimeChecks(lexerDefinition, trackLines) {6758 var errors = [];6759 // some run time checks to help the end users.6760 if (!utils_1.has(lexerDefinition, exports.DEFAULT_MODE)) {6761 errors.push({6762 message: "A MultiMode Lexer cannot be initialized without a <" +6763 exports.DEFAULT_MODE +6764 "> property in its definition\n",6765 type: lexer_public_1.LexerDefinitionErrorType.MULTI_MODE_LEXER_WITHOUT_DEFAULT_MODE6766 });6767 }6768 if (!utils_1.has(lexerDefinition, exports.MODES)) {6769 errors.push({6770 message: "A MultiMode Lexer cannot be initialized without a <" +6771 exports.MODES +6772 "> property in its definition\n",6773 type: lexer_public_1.LexerDefinitionErrorType.MULTI_MODE_LEXER_WITHOUT_MODES_PROPERTY6774 });6775 }6776 if (utils_1.has(lexerDefinition, exports.MODES) &&6777 utils_1.has(lexerDefinition, exports.DEFAULT_MODE) &&6778 !utils_1.has(lexerDefinition.modes, lexerDefinition.defaultMode)) {6779 errors.push({6780 message: "A MultiMode Lexer cannot be initialized with a " + exports.DEFAULT_MODE + ": <" + lexerDefinition.defaultMode + ">" + "which does not exist\n",6781 type: lexer_public_1.LexerDefinitionErrorType.MULTI_MODE_LEXER_DEFAULT_MODE_VALUE_DOES_NOT_EXIST6782 });6783 }6784 if (utils_1.has(lexerDefinition, exports.MODES)) {6785 utils_1.forEach(lexerDefinition.modes, function (currModeValue, currModeName) {6786 utils_1.forEach(currModeValue, function (currTokType, currIdx) {6787 if (utils_1.isUndefined(currTokType)) {6788 errors.push({6789 message: "A Lexer cannot be initialized using an undefined Token Type. Mode:" +6790 ("<" + currModeName + "> at index: <" + currIdx + ">\n"),6791 type: lexer_public_1.LexerDefinitionErrorType.LEXER_DEFINITION_CANNOT_CONTAIN_UNDEFINED6792 });6793 }6794 });6795 });6796 }6797 var allTokenTypes = utils_1.flatten(utils_1.mapValues(lexerDefinition.modes, function (tokTypes) { return tokTypes; }));6798 if (trackLines &&6799 utils_1.find(allTokenTypes, function (currTokType) { return currTokType.LINE_BREAKS; }) === undefined) {6800 errors.push({6801 message: "No LINE_BREAKS Error:\n" +6802 "\tThis Lexer has been defined to track line and column information,\n" +6803 "\tyet none of the Token definitions contain a LINE_BREAK flag.\n" +6804 "\tSee http://sap.github.io/chevrotain/website/Building_Grammars/resolving_lexer_errors.html#LINE_BREAKS \n" +6805 "\tfor details.",6806 type: lexer_public_1.LexerDefinitionErrorType.NO_LINE_BREAKS_FLAGS6807 });6808 }6809 return errors;6810}6811exports.performRuntimeChecks = performRuntimeChecks;6812function cloneEmptyGroups(emptyGroups) {6813 var clonedResult = {};6814 var groupKeys = utils_1.keys(emptyGroups);6815 utils_1.forEach(groupKeys, function (currKey) {6816 var currGroupValue = emptyGroups[currKey];6817 /* istanbul ignore else */6818 if (utils_1.isArray(currGroupValue)) {6819 clonedResult[currKey] = [];6820 }6821 else {6822 /* istanbul ignore next */6823 throw Error("non exhaustive match");6824 }6825 });6826 return clonedResult;6827}6828exports.cloneEmptyGroups = cloneEmptyGroups;6829// TODO: refactor to avoid duplication6830function isCustomPattern(tokenType) {6831 var pattern = tokenType.PATTERN;6832 if (utils_1.isRegExp(pattern)) {6833 return false;6834 }6835 else if (utils_1.isFunction(pattern)) {6836 // CustomPatternMatcherFunc - custom patterns do not require any transformations, only wrapping in a RegExp Like object6837 return true;6838 }6839 else if (utils_1.has(pattern, "exec")) {6840 // ICustomPattern6841 return true;6842 }6843 else if (utils_1.isString(pattern)) {6844 return false;6845 }6846 else {6847 /* istanbul ignore next */6848 throw Error("non exhaustive match");6849 }6850}6851exports.isCustomPattern = isCustomPattern;6852function isShortPattern(pattern) {6853 if (utils_1.isString(pattern) && pattern.length === 1) {6854 return pattern.charCodeAt(0);6855 }6856 else {6857 return false;6858 }6859}6860exports.isShortPattern = isShortPattern;6861/**6862 * Faster than using a RegExp for default newline detection during lexing.6863 */6864exports.LineTerminatorOptimizedTester = {6865 // implements /\n|\r\n?/g.test6866 test: function (text) {6867 var len = text.length;6868 for (var i = this.lastIndex; i < len; i++) {6869 var c = text.charCodeAt(i);6870 if (c === 10) {6871 this.lastIndex = i + 1;6872 return true;6873 }6874 else if (c === 13) {6875 if (text.charCodeAt(i + 1) === 10) {6876 this.lastIndex = i + 2;6877 }6878 else {6879 this.lastIndex = i + 1;6880 }6881 return true;6882 }6883 }6884 return false;6885 },6886 lastIndex: 06887};6888//# sourceMappingURL=lexer.js.map6889/***/ }),6890/* 23 */6891/***/ (function(module, exports, __webpack_require__) {6892"use strict";6893var __extends = (this && this.__extends) || (function () {6894 var extendStatics = Object.setPrototypeOf ||6895 ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||6896 function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };6897 return function (d, b) {6898 extendStatics(d, b);6899 function __() { this.constructor = d; }6900 d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());6901 };6902})();6903Object.defineProperty(exports, "__esModule", { value: true });6904var rest_1 = __webpack_require__(10);6905var lang_extensions_1 = __webpack_require__(3);6906var gast_public_1 = __webpack_require__(1);6907var first_1 = __webpack_require__(15);6908var utils_1 = __webpack_require__(0);6909var constants_1 = __webpack_require__(18);6910var tokens_public_1 = __webpack_require__(2);6911// This ResyncFollowsWalker computes all of the follows required for RESYNC6912// (skipping reference production).6913var ResyncFollowsWalker = /** @class */ (function (_super) {6914 __extends(ResyncFollowsWalker, _super);6915 function ResyncFollowsWalker(topProd) {6916 var _this = _super.call(this) || this;6917 _this.topProd = topProd;6918 _this.follows = new lang_extensions_1.HashTable();6919 return _this;6920 }6921 ResyncFollowsWalker.prototype.startWalking = function () {6922 this.walk(this.topProd);6923 return this.follows;6924 };6925 ResyncFollowsWalker.prototype.walkTerminal = function (terminal, currRest, prevRest) {6926 // do nothing! just like in the public sector after 13:006927 };6928 ResyncFollowsWalker.prototype.walkProdRef = function (refProd, currRest, prevRest) {6929 var followName = buildBetweenProdsFollowPrefix(refProd.referencedRule, refProd.occurrenceInParent) + this.topProd.name;6930 var fullRest = currRest.concat(prevRest);6931 var restProd = new gast_public_1.gast.Flat(fullRest);6932 var t_in_topProd_follows = first_1.first(restProd);6933 this.follows.put(followName, t_in_topProd_follows);6934 };6935 return ResyncFollowsWalker;6936}(rest_1.RestWalker));6937exports.ResyncFollowsWalker = ResyncFollowsWalker;6938function computeAllProdsFollows(topProductions) {6939 var reSyncFollows = new lang_extensions_1.HashTable();6940 utils_1.forEach(topProductions, function (topProd) {6941 var currRefsFollow = new ResyncFollowsWalker(topProd).startWalking();6942 reSyncFollows.putAll(currRefsFollow);6943 });6944 return reSyncFollows;6945}6946exports.computeAllProdsFollows = computeAllProdsFollows;6947function buildBetweenProdsFollowPrefix(inner, occurenceInParent) {6948 return inner.name + occurenceInParent + constants_1.IN;6949}6950exports.buildBetweenProdsFollowPrefix = buildBetweenProdsFollowPrefix;6951function buildInProdFollowPrefix(terminal) {6952 var terminalName = tokens_public_1.tokenName(terminal.terminalType);6953 return terminalName + terminal.occurrenceInParent + constants_1.IN;6954}6955exports.buildInProdFollowPrefix = buildInProdFollowPrefix;6956//# sourceMappingURL=follow.js.map6957/***/ }),6958/* 24 */6959/***/ (function(module, exports, __webpack_require__) {6960"use strict";6961Object.defineProperty(exports, "__esModule", { value: true });6962var range_1 = __webpack_require__(25);6963var gast_public_1 = __webpack_require__(1);6964var utils_1 = __webpack_require__(0);6965var ProdType;6966(function (ProdType) {6967 ProdType[ProdType["OPTION"] = 0] = "OPTION";6968 ProdType[ProdType["OR"] = 1] = "OR";6969 ProdType[ProdType["MANY"] = 2] = "MANY";6970 ProdType[ProdType["MANY_SEP"] = 3] = "MANY_SEP";6971 ProdType[ProdType["AT_LEAST_ONE"] = 4] = "AT_LEAST_ONE";6972 ProdType[ProdType["AT_LEAST_ONE_SEP"] = 5] = "AT_LEAST_ONE_SEP";6973 ProdType[ProdType["REF"] = 6] = "REF";6974 ProdType[ProdType["TERMINAL"] = 7] = "TERMINAL";6975 ProdType[ProdType["FLAT"] = 8] = "FLAT";6976})(ProdType = exports.ProdType || (exports.ProdType = {}));6977var namePropRegExp = /(?:\s*{\s*NAME\s*:\s*["'`]([\w$]*)["'`])?/;6978var namePropRegExpNoCurlyFirstOfTwo = new RegExp(namePropRegExp.source6979 .replace("{", "")6980 .replace(")?", "\\s*,)?"));6981var terminalRegEx = /\.\s*CONSUME(\d)?\s*\(\s*(?:[a-zA-Z_$]\w*\s*\.\s*)*([a-zA-Z_$]\w*)/;6982var terminalRegGlobal = new RegExp(terminalRegEx.source, "g");6983var refRegEx = /\.\s*SUBRULE(\d)?\s*\(\s*(?:[a-zA-Z_$]\w*\s*\.\s*)*([a-zA-Z_$]\w*)/;6984var refRegExGlobal = new RegExp(refRegEx.source, "g");6985var optionPrefixRegEx = /\.\s*OPTION(\d)?\s*\(/;6986var optionRegEx = new RegExp(optionPrefixRegEx.source + namePropRegExp.source);6987var optionRegExGlobal = new RegExp(optionPrefixRegEx.source, "g");6988var manyPrefixRegEx = /\.\s*MANY(\d)?\s*\(/;6989var manyRegEx = new RegExp(manyPrefixRegEx.source + namePropRegExp.source);6990var manyRegExGlobal = new RegExp(manyPrefixRegEx.source, "g");6991var sepPropRegEx = /\s*SEP\s*:\s*(?:[a-zA-Z_$]\w*\s*\.\s*)*([a-zA-Z_$]\w*)/;6992var manySepPrefixRegEx = /\.\s*MANY_SEP(\d)?\s*\(\s*{/;6993var manyWithSeparatorRegEx = new RegExp(manySepPrefixRegEx.source +6994 namePropRegExpNoCurlyFirstOfTwo.source +6995 sepPropRegEx.source);6996var manyWithSeparatorRegExGlobal = new RegExp(manyWithSeparatorRegEx.source, "g");6997var atLeastOneSepPrefixRegEx = /\.\s*AT_LEAST_ONE_SEP(\d)?\s*\(\s*{/;6998var atLeastOneWithSeparatorRegEx = new RegExp(atLeastOneSepPrefixRegEx.source +6999 namePropRegExpNoCurlyFirstOfTwo.source +7000 sepPropRegEx.source);7001var atLeastOneWithSeparatorRegExGlobal = new RegExp(atLeastOneWithSeparatorRegEx.source, "g");7002var atLeastOnePrefixRegEx = /\.\s*AT_LEAST_ONE(\d)?\s*\(/;7003var atLeastOneRegEx = new RegExp(atLeastOnePrefixRegEx.source + namePropRegExp.source);7004var atLeastOneRegExGlobal = new RegExp(atLeastOnePrefixRegEx.source, "g");7005var orPrefixRegEx = /\.\s*OR(\d)?\s*\(/;7006var orRegEx = new RegExp(orPrefixRegEx.source + namePropRegExp.source);7007var orRegExGlobal = new RegExp(orPrefixRegEx.source, "g");7008var orPartSuffixRegEx = /\s*(ALT)\s*:/;7009var orPartRegEx = new RegExp(namePropRegExpNoCurlyFirstOfTwo.source + orPartSuffixRegEx.source);7010var orPartRegExGlobal = new RegExp(orPartRegEx.source, "g");7011exports.terminalNameToConstructor = {};7012function buildTopProduction(impelText, name, terminals) {7013 // pseudo state. so little state does not yet mandate the complexity of wrapping in a class...7014 // TODO: this is confusing, might be time to create a class..7015 exports.terminalNameToConstructor = terminals;7016 // the top most range must strictly contain all the other ranges7017 // which is why we prefix the text with " " (curr Range impel is only for positive ranges)7018 var spacedImpelText = " " + impelText;7019 // TODO: why do we add whitespace twice?7020 var txtWithoutComments = removeComments(" " + spacedImpelText);7021 var textWithoutCommentsAndStrings = removeStringLiterals(txtWithoutComments);7022 var prodRanges = createRanges(textWithoutCommentsAndStrings);7023 var topRange = new range_1.Range(0, impelText.length + 2);7024 var topRule = buildTopLevel(name, topRange, prodRanges, impelText);7025 return topRule;7026}7027exports.buildTopProduction = buildTopProduction;7028function buildTopLevel(name, topRange, allRanges, orgText) {7029 var topLevelProd = new gast_public_1.gast.Rule(name, [], orgText);7030 return buildAbstractProd(topLevelProd, topRange, allRanges);7031}7032function buildProdGast(prodRange, allRanges) {7033 ;7034 ("use strict");7035 switch (prodRange.type) {7036 case ProdType.AT_LEAST_ONE:7037 return buildAtLeastOneProd(prodRange, allRanges);7038 case ProdType.AT_LEAST_ONE_SEP:7039 return buildAtLeastOneSepProd(prodRange, allRanges);7040 case ProdType.MANY_SEP:7041 return buildManySepProd(prodRange, allRanges);7042 case ProdType.MANY:7043 return buildManyProd(prodRange, allRanges);7044 case ProdType.OPTION:7045 return buildOptionProd(prodRange, allRanges);7046 case ProdType.OR:7047 return buildOrProd(prodRange, allRanges);7048 case ProdType.FLAT:7049 return buildFlatProd(prodRange, allRanges);7050 case ProdType.REF:7051 return buildRefProd(prodRange);7052 case ProdType.TERMINAL:7053 return buildTerminalProd(prodRange);7054 /* istanbul ignore next */7055 default:7056 /* istanbul ignore next */7057 throw Error("non exhaustive match");7058 }7059}7060exports.buildProdGast = buildProdGast;7061function buildRefProd(prodRange) {7062 var reResult = refRegEx.exec(prodRange.text);7063 var isImplicitOccurrenceIdx = reResult[1] === undefined;7064 var refOccurrence = isImplicitOccurrenceIdx ? 1 : parseInt(reResult[1], 10);7065 var refProdName = reResult[2];7066 var newRef = new gast_public_1.gast.NonTerminal(refProdName, undefined, refOccurrence);7067 newRef.implicitOccurrenceIndex = isImplicitOccurrenceIdx;7068 return newRef;7069}7070function buildTerminalProd(prodRange) {7071 var reResult = terminalRegEx.exec(prodRange.text);7072 var isImplicitOccurrenceIdx = reResult[1] === undefined;7073 var terminalOccurrence = isImplicitOccurrenceIdx7074 ? 17075 : parseInt(reResult[1], 10);7076 var terminalName = reResult[2];7077 var terminalType = exports.terminalNameToConstructor[terminalName];7078 if (!terminalType) {7079 throw Error("Terminal Token name: " + terminalName + " not found");7080 }7081 var newTerminal = new gast_public_1.gast.Terminal(terminalType, terminalOccurrence);7082 newTerminal.implicitOccurrenceIndex = isImplicitOccurrenceIdx;7083 return newTerminal;7084}7085function buildProdWithOccurrence(regEx, prodInstance, prodRange, allRanges) {7086 var reResult = regEx.exec(prodRange.text);7087 var isImplicitOccurrenceIdx = reResult[1] === undefined;7088 prodInstance.occurrenceInParent = isImplicitOccurrenceIdx7089 ? 17090 : parseInt(reResult[1], 10);7091 prodInstance.implicitOccurrenceIndex = isImplicitOccurrenceIdx;7092 var nestedName = reResult[2];7093 if (!utils_1.isUndefined(nestedName)) {7094 ;7095 prodInstance.name = nestedName;7096 }7097 return buildAbstractProd(prodInstance, prodRange.range, allRanges);7098}7099function buildAtLeastOneProd(prodRange, allRanges) {7100 return buildProdWithOccurrence(atLeastOneRegEx, new gast_public_1.gast.RepetitionMandatory([]), prodRange, allRanges);7101}7102function buildAtLeastOneSepProd(prodRange, allRanges) {7103 return buildRepetitionWithSep(prodRange, allRanges, gast_public_1.gast.RepetitionMandatoryWithSeparator, atLeastOneWithSeparatorRegEx);7104}7105function buildManyProd(prodRange, allRanges) {7106 return buildProdWithOccurrence(manyRegEx, new gast_public_1.gast.Repetition([]), prodRange, allRanges);7107}7108function buildManySepProd(prodRange, allRanges) {7109 return buildRepetitionWithSep(prodRange, allRanges, gast_public_1.gast.RepetitionWithSeparator, manyWithSeparatorRegEx);7110}7111function buildRepetitionWithSep(prodRange, allRanges, repConstructor, regExp) {7112 var reResult = regExp.exec(prodRange.text);7113 var isImplicitOccurrenceIdx = reResult[1] === undefined;7114 var occurrenceIdx = isImplicitOccurrenceIdx ? 1 : parseInt(reResult[1], 10);7115 var sepName = reResult[3];7116 var separatorType = exports.terminalNameToConstructor[sepName];7117 if (!separatorType) {7118 throw Error("Separator Terminal Token name: " + sepName + " not found");7119 }7120 var repetitionInstance = new repConstructor([], separatorType, occurrenceIdx);7121 repetitionInstance.implicitOccurrenceIndex = isImplicitOccurrenceIdx;7122 var nestedName = reResult[2];7123 if (!utils_1.isUndefined(nestedName)) {7124 ;7125 repetitionInstance.name = nestedName;7126 }7127 return buildAbstractProd(repetitionInstance, prodRange.range, allRanges);7128}7129function buildOptionProd(prodRange, allRanges) {7130 return buildProdWithOccurrence(optionRegEx, new gast_public_1.gast.Option([]), prodRange, allRanges);7131}7132function buildOrProd(prodRange, allRanges) {7133 return buildProdWithOccurrence(orRegEx, new gast_public_1.gast.Alternation([]), prodRange, allRanges);7134}7135function buildFlatProd(prodRange, allRanges) {7136 var prodInstance = new gast_public_1.gast.Flat([]);7137 var reResult = orPartRegEx.exec(prodRange.text);7138 var nestedName = reResult[1];7139 if (!utils_1.isUndefined(nestedName)) {7140 ;7141 prodInstance.name = nestedName;7142 }7143 return buildAbstractProd(prodInstance, prodRange.range, allRanges);7144}7145function buildAbstractProd(prod, topLevelRange, allRanges) {7146 var secondLevelProds = getDirectlyContainedRanges(topLevelRange, allRanges);7147 var secondLevelInOrder = utils_1.sortBy(secondLevelProds, function (prodRng) {7148 return prodRng.range.start;7149 });7150 var definition = [];7151 utils_1.forEach(secondLevelInOrder, function (prodRng) {7152 definition.push(buildProdGast(prodRng, allRanges));7153 });7154 prod.definition = definition;7155 return prod;7156}7157function getDirectlyContainedRanges(y, prodRanges) {7158 return utils_1.filter(prodRanges, function (x) {7159 var isXDescendantOfY = y.strictlyContainsRange(x.range);7160 var xDoesNotHaveAnyAncestorWhichIsDecendantOfY = utils_1.every(prodRanges, function (maybeAnotherParent) {7161 var isParentOfX = maybeAnotherParent.range.strictlyContainsRange(x.range);7162 var isChildOfY = maybeAnotherParent.range.isStrictlyContainedInRange(y);7163 return !(isParentOfX && isChildOfY);7164 });7165 return isXDescendantOfY && xDoesNotHaveAnyAncestorWhichIsDecendantOfY;7166 });7167}7168exports.getDirectlyContainedRanges = getDirectlyContainedRanges;7169var singleLineCommentRegEx = /\/\/.*/g;7170var multiLineCommentRegEx = /\/\*([^*]|[\r\n]|(\*+([^*/]|[\r\n])))*\*+\//g;7171var doubleQuoteStringLiteralRegEx = /(NAME\s*:\s*)?"([^\\"]|\\([bfnrtv"\\/]|u[0-9a-fA-F]{4}))*"/g;7172var singleQuoteStringLiteralRegEx = /(NAME\s*:\s*)?'([^\\']|\\([bfnrtv'\\/]|u[0-9a-fA-F]{4}))*'/g;7173function removeComments(text) {7174 var noSingleLine = text.replace(singleLineCommentRegEx, "");7175 var noComments = noSingleLine.replace(multiLineCommentRegEx, "");7176 return noComments;7177}7178exports.removeComments = removeComments;7179function replaceWithEmptyStringExceptNestedRules(match, nestedRuleGroup) {7180 // do not replace with empty string if a nest rule (NAME:"bamba") was detected7181 if (nestedRuleGroup !== undefined) {7182 return match;7183 }7184 return "";7185}7186function removeStringLiterals(text) {7187 var noDoubleQuotes = text.replace(doubleQuoteStringLiteralRegEx, replaceWithEmptyStringExceptNestedRules);7188 var noSingleQuotes = noDoubleQuotes.replace(singleQuoteStringLiteralRegEx, replaceWithEmptyStringExceptNestedRules);7189 return noSingleQuotes;7190}7191exports.removeStringLiterals = removeStringLiterals;7192function createRanges(text) {7193 var terminalRanges = createTerminalRanges(text);7194 var refsRanges = createRefsRanges(text);7195 var atLeastOneRanges = createAtLeastOneRanges(text);7196 var atLeastOneSepRanges = createAtLeastOneSepRanges(text);7197 var manyRanges = createManyRanges(text);7198 var manySepRanges = createManySepRanges(text);7199 var optionRanges = createOptionRanges(text);7200 var orRanges = createOrRanges(text);7201 return [].concat(terminalRanges, refsRanges, atLeastOneRanges, atLeastOneSepRanges, manyRanges, manySepRanges, optionRanges, orRanges);7202}7203exports.createRanges = createRanges;7204function createTerminalRanges(text) {7205 return createRefOrTerminalProdRangeInternal(text, ProdType.TERMINAL, terminalRegGlobal);7206}7207exports.createTerminalRanges = createTerminalRanges;7208function createRefsRanges(text) {7209 return createRefOrTerminalProdRangeInternal(text, ProdType.REF, refRegExGlobal);7210}7211exports.createRefsRanges = createRefsRanges;7212function createAtLeastOneRanges(text) {7213 return createOperatorProdRangeParenthesis(text, ProdType.AT_LEAST_ONE, atLeastOneRegExGlobal);7214}7215exports.createAtLeastOneRanges = createAtLeastOneRanges;7216function createAtLeastOneSepRanges(text) {7217 return createOperatorProdRangeParenthesis(text, ProdType.AT_LEAST_ONE_SEP, atLeastOneWithSeparatorRegExGlobal);7218}7219exports.createAtLeastOneSepRanges = createAtLeastOneSepRanges;7220function createManyRanges(text) {7221 return createOperatorProdRangeParenthesis(text, ProdType.MANY, manyRegExGlobal);7222}7223exports.createManyRanges = createManyRanges;7224function createManySepRanges(text) {7225 return createOperatorProdRangeParenthesis(text, ProdType.MANY_SEP, manyWithSeparatorRegExGlobal);7226}7227exports.createManySepRanges = createManySepRanges;7228function createOptionRanges(text) {7229 return createOperatorProdRangeParenthesis(text, ProdType.OPTION, optionRegExGlobal);7230}7231exports.createOptionRanges = createOptionRanges;7232function createOrRanges(text) {7233 var orRanges = createOperatorProdRangeParenthesis(text, ProdType.OR, orRegExGlobal);7234 // have to split up the OR cases into separate FLAT productions7235 // (A |BB | CDE) ==> or.def[0] --> FLAT(A) , or.def[1] --> FLAT(BB) , or.def[2] --> FLAT(CCDE)7236 var orSubPartsRanges = createOrPartRanges(orRanges);7237 return orRanges.concat(orSubPartsRanges);7238}7239exports.createOrRanges = createOrRanges;7240var findClosingCurly = utils_1.partial(findClosingOffset, "{", "}");7241var findClosingParen = utils_1.partial(findClosingOffset, "(", ")");7242function createOrPartRanges(orRanges) {7243 var orPartRanges = [];7244 utils_1.forEach(orRanges, function (orRange) {7245 var currOrParts = createOperatorProdRangeInternal(orRange.text, ProdType.FLAT, orPartRegExGlobal, findClosingCurly);7246 var currOrRangeStart = orRange.range.start;7247 // fix offsets as we are working on a subset of the text7248 utils_1.forEach(currOrParts, function (orPart) {7249 orPart.range.start += currOrRangeStart;7250 orPart.range.end += currOrRangeStart;7251 });7252 orPartRanges = orPartRanges.concat(currOrParts);7253 });7254 var uniqueOrPartRanges = utils_1.uniq(orPartRanges, function (prodRange) {7255 // using "~" as a separator for the identify function as its not a valid char in javascript7256 return (prodRange.type +7257 "~" +7258 prodRange.range.start +7259 "~" +7260 prodRange.range.end +7261 "~" +7262 prodRange.text);7263 });7264 return uniqueOrPartRanges;7265}7266exports.createOrPartRanges = createOrPartRanges;7267function createRefOrTerminalProdRangeInternal(text, prodType, pattern) {7268 var prodRanges = [];7269 var matched;7270 while ((matched = pattern.exec(text))) {7271 var start = matched.index;7272 var stop_1 = pattern.lastIndex;7273 var currRange = new range_1.Range(start, stop_1);7274 var currText = matched[0];7275 prodRanges.push({7276 range: currRange,7277 text: currText,7278 type: prodType7279 });7280 }7281 return prodRanges;7282}7283function createOperatorProdRangeParenthesis(text, prodType, pattern) {7284 return createOperatorProdRangeInternal(text, prodType, pattern, findClosingParen);7285}7286function createOperatorProdRangeInternal(text, prodType, pattern, findTerminatorOffSet) {7287 var operatorRanges = [];7288 var matched;7289 while ((matched = pattern.exec(text))) {7290 var start = matched.index;7291 // note that (start + matched[0].length) is the first character AFTER the match7292 var stop_2 = findTerminatorOffSet(start + matched[0].length, text);7293 var currRange = new range_1.Range(start, stop_2);7294 var currText = text.substr(start, stop_2 - start + 1);7295 operatorRanges.push({7296 range: currRange,7297 text: currText,7298 type: prodType7299 });7300 }7301 return operatorRanges;7302}7303function findClosingOffset(opening, closing, start, text) {7304 var parenthesisStack = [1];7305 var i = -1;7306 while (!utils_1.isEmpty(parenthesisStack) && i + start < text.length) {7307 i++;7308 var nextChar = text.charAt(start + i);7309 if (nextChar === opening) {7310 parenthesisStack.push(1);7311 }7312 else if (nextChar === closing) {7313 parenthesisStack.pop();7314 }7315 }7316 // valid termination of the search loop7317 if (utils_1.isEmpty(parenthesisStack)) {7318 return i + start;7319 }7320 else {7321 throw new Error("INVALID INPUT TEXT, UNTERMINATED PARENTHESIS");7322 }7323}7324exports.findClosingOffset = findClosingOffset;7325//# sourceMappingURL=gast_builder.js.map7326/***/ }),7327/* 25 */7328/***/ (function(module, exports, __webpack_require__) {7329"use strict";7330Object.defineProperty(exports, "__esModule", { value: true });7331var Range = /** @class */ (function () {7332 function Range(start, end) {7333 this.start = start;7334 this.end = end;7335 if (!isValidRange(start, end)) {7336 throw new Error("INVALID RANGE");7337 }7338 }7339 Range.prototype.contains = function (num) {7340 return this.start <= num && this.end >= num;7341 };7342 Range.prototype.containsRange = function (other) {7343 return this.start <= other.start && this.end >= other.end;7344 };7345 Range.prototype.isContainedInRange = function (other) {7346 return other.containsRange(this);7347 };7348 Range.prototype.strictlyContainsRange = function (other) {7349 return this.start < other.start && this.end > other.end;7350 };7351 Range.prototype.isStrictlyContainedInRange = function (other) {7352 return other.strictlyContainsRange(this);7353 };7354 return Range;7355}());7356exports.Range = Range;7357function isValidRange(start, end) {7358 return !(start < 0 || end < start);7359}7360exports.isValidRange = isValidRange;7361//# sourceMappingURL=range.js.map7362/***/ }),7363/* 26 */7364/***/ (function(module, exports, __webpack_require__) {7365"use strict";7366Object.defineProperty(exports, "__esModule", { value: true });7367var utils_1 = __webpack_require__(0);7368var lang_extensions_1 = __webpack_require__(3);7369var checks_1 = __webpack_require__(13);7370function defaultVisit(ctx, param) {7371 var childrenNames = utils_1.keys(ctx);7372 var childrenNamesLength = childrenNames.length;7373 for (var i = 0; i < childrenNamesLength; i++) {7374 var currChildName = childrenNames[i];7375 var currChildArray = ctx[currChildName];7376 var currChildArrayLength = currChildArray.length;7377 for (var j = 0; j < currChildArrayLength; j++) {7378 var currChild = currChildArray[j];7379 // distinction between Tokens Children and CstNode children7380 if (currChild.tokenTypeIdx === undefined) {7381 if (currChild.fullName !== undefined) {7382 this[currChild.fullName](currChild.children, param);7383 }7384 else {7385 this[currChild.name](currChild.children, param);7386 }7387 }7388 }7389 }7390 // defaultVisit does not support generic out param7391 return undefined;7392}7393exports.defaultVisit = defaultVisit;7394function createBaseSemanticVisitorConstructor(grammarName, ruleNames) {7395 var derivedConstructor = function () { };7396 // can be overwritten according to:7397 // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function/7398 // name?redirectlocale=en-US&redirectslug=JavaScript%2FReference%2FGlobal_Objects%2FFunction%2Fname7399 lang_extensions_1.defineNameProp(derivedConstructor, grammarName + "BaseSemantics");7400 var semanticProto = {7401 visit: function (cstNode, param) {7402 // enables writing more concise visitor methods when CstNode has only a single child7403 if (utils_1.isArray(cstNode)) {7404 if (cstNode.length > 0) {7405 cstNode = cstNode[0];7406 }7407 else {7408 // enables passing optional CstNodes concisely.7409 return undefined;7410 }7411 }7412 if (cstNode.fullName !== undefined) {7413 return this[cstNode.fullName](cstNode.children, param);7414 }7415 else {7416 return this[cstNode.name](cstNode.children, param);7417 }7418 },7419 validateVisitor: function () {7420 var semanticDefinitionErrors = validateVisitor(this, ruleNames);7421 if (!utils_1.isEmpty(semanticDefinitionErrors)) {7422 var errorMessages = utils_1.map(semanticDefinitionErrors, function (currDefError) { return currDefError.msg; });7423 throw Error("Errors Detected in CST Visitor <" + lang_extensions_1.functionName(this.constructor) + ">:\n\t" +7424 ("" + errorMessages.join("\n\n").replace(/\n/g, "\n\t")));7425 }7426 }7427 };7428 derivedConstructor.prototype = semanticProto;7429 derivedConstructor.prototype.constructor = derivedConstructor;7430 derivedConstructor._RULE_NAMES = ruleNames;7431 return derivedConstructor;7432}7433exports.createBaseSemanticVisitorConstructor = createBaseSemanticVisitorConstructor;7434function createBaseVisitorConstructorWithDefaults(grammarName, ruleNames, baseConstructor) {7435 var derivedConstructor = function () { };7436 // can be overwritten according to:7437 // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function/7438 // name?redirectlocale=en-US&redirectslug=JavaScript%2FReference%2FGlobal_Objects%2FFunction%2Fname7439 lang_extensions_1.defineNameProp(derivedConstructor, grammarName + "BaseSemanticsWithDefaults");7440 var withDefaultsProto = Object.create(baseConstructor.prototype);7441 utils_1.forEach(ruleNames, function (ruleName) {7442 withDefaultsProto[ruleName] = defaultVisit;7443 });7444 derivedConstructor.prototype = withDefaultsProto;7445 derivedConstructor.prototype.constructor = derivedConstructor;7446 return derivedConstructor;7447}7448exports.createBaseVisitorConstructorWithDefaults = createBaseVisitorConstructorWithDefaults;7449var CstVisitorDefinitionError;7450(function (CstVisitorDefinitionError) {7451 CstVisitorDefinitionError[CstVisitorDefinitionError["REDUNDANT_METHOD"] = 0] = "REDUNDANT_METHOD";7452 CstVisitorDefinitionError[CstVisitorDefinitionError["MISSING_METHOD"] = 1] = "MISSING_METHOD";7453})(CstVisitorDefinitionError = exports.CstVisitorDefinitionError || (exports.CstVisitorDefinitionError = {}));7454function validateVisitor(visitorInstance, ruleNames) {7455 var missingErrors = validateMissingCstMethods(visitorInstance, ruleNames);7456 var redundantErrors = validateRedundantMethods(visitorInstance, ruleNames);7457 return missingErrors.concat(redundantErrors);7458}7459exports.validateVisitor = validateVisitor;7460function validateMissingCstMethods(visitorInstance, ruleNames) {7461 var errors = utils_1.map(ruleNames, function (currRuleName) {7462 if (!utils_1.isFunction(visitorInstance[currRuleName])) {7463 return {7464 msg: "Missing visitor method: <" + currRuleName + "> on " + lang_extensions_1.functionName(visitorInstance.constructor) + " CST Visitor.",7465 type: CstVisitorDefinitionError.MISSING_METHOD,7466 methodName: currRuleName7467 };7468 }7469 });7470 return utils_1.compact(errors);7471}7472exports.validateMissingCstMethods = validateMissingCstMethods;7473var VALID_PROP_NAMES = ["constructor", "visit", "validateVisitor"];7474function validateRedundantMethods(visitorInstance, ruleNames) {7475 var errors = [];7476 for (var prop in visitorInstance) {7477 if (checks_1.validTermsPattern.test(prop) &&7478 utils_1.isFunction(visitorInstance[prop]) &&7479 !utils_1.contains(VALID_PROP_NAMES, prop) &&7480 !utils_1.contains(ruleNames, prop)) {7481 errors.push({7482 msg: "Redundant visitor method: <" + prop + "> on " + lang_extensions_1.functionName(visitorInstance.constructor) + " CST Visitor\n" +7483 "There is no Grammar Rule corresponding to this method's name.\n" +7484 ("For utility methods on visitor classes use methods names that do not match /" + checks_1.validTermsPattern.source + "/."),7485 type: CstVisitorDefinitionError.REDUNDANT_METHOD,7486 methodName: prop7487 });7488 }7489 }7490 return errors;7491}7492exports.validateRedundantMethods = validateRedundantMethods;7493//# sourceMappingURL=cst_visitor.js.map7494/***/ }),7495/* 27 */7496/***/ (function(module, exports, __webpack_require__) {7497"use strict";7498Object.defineProperty(exports, "__esModule", { value: true });7499var cache_1 = __webpack_require__(7);7500/**7501 * Clears the chevrotain internal cache.7502 * This should not be used in regular work flows, This is intended for7503 * unique use cases for example: online playground where the a parser with the same name is initialized with7504 * different implementations multiple times.7505 */7506function clearCache() {7507 cache_1.clearCache();7508}7509exports.clearCache = clearCache;7510//# sourceMappingURL=cache_public.js.map7511/***/ }),7512/* 28 */7513/***/ (function(module, exports, __webpack_require__) {7514"use strict";7515Object.defineProperty(exports, "__esModule", { value: true });7516var version_1 = __webpack_require__(11);7517function createSyntaxDiagramsCode(grammar, _a) {7518 var _b = _a === void 0 ? {} : _a, _c = _b.resourceBase, resourceBase = _c === void 0 ? "https://unpkg.com/chevrotain@" + version_1.VERSION + "/diagrams/" : _c, _d = _b.css, css = _d === void 0 ? "https://unpkg.com/chevrotain@" + version_1.VERSION + "/diagrams/diagrams.css" : _d;7519 var header = "\n<!-- This is a generated file -->\n<!DOCTYPE html>\n<meta charset=\"utf-8\">\n<style>\n body {\n background-color: hsl(30, 20%, 95%)\n }\n</style>\n\n";7520 var cssHtml = "\n<link rel='stylesheet' href='" + css + "'>\n";7521 var scripts = "\n<script src='" + resourceBase + "vendor/railroad-diagrams.js'></script>\n<script src='" + resourceBase + "src/diagrams_builder.js'></script>\n<script src='" + resourceBase + "src/diagrams_behavior.js'></script>\n<script src='" + resourceBase + "src/main.js'></script>\n";7522 var diagramsDiv = "\n<div id=\"diagrams\" align=\"center\"></div> \n";7523 var serializedGrammar = "\n<script>\n window.serializedGrammar = " + JSON.stringify(grammar, null, " ") + ";\n</script>\n";7524 var initLogic = "\n<script>\n var diagramsDiv = document.getElementById(\"diagrams\");\n main.drawDiagramsFromSerializedGrammar(serializedGrammar, diagramsDiv);\n</script>\n";7525 return (header + cssHtml + scripts + diagramsDiv + serializedGrammar + initLogic);7526}7527exports.createSyntaxDiagramsCode = createSyntaxDiagramsCode;7528//# sourceMappingURL=render_public.js.map7529/***/ })7530/******/ ]);...

Full Screen

Full Screen

parser.py

Source:parser.py Github

copy

Full Screen

1from typing import *2from .scanner import (3 Token,4 Identifier,5 Operator,6 RealLiteral,7 IntLiteral,8 Special,9 NullToken,10 Terminate,11)12from .ast import *13from .types import TypeCheckError14import warnings15import jax.numpy16# https://mc-stan.org/docs/2_18/reference-manual/bnf-grammars.html17# https://mc-stan.org/docs/2_28/reference-manual/arithmetic-expressions.html18# define group parsing rules for operators19class PrefixOps:20 """21 A utility class that's used to identify and build prefix-operation expressions.22 """23 ops = ["!", "-"]24 precedence = {"!": 50, "-": 50}25 @staticmethod26 def check(tok: Token):27 if isinstance(tok, Operator) and tok.value in PrefixOps.ops:28 return True29 return False30 @staticmethod31 def generate(expr: Expr, tok: Operator):32 match tok.value:33 case "-":34 return PrefixNegation(expr, line_index=tok.line_index, column_index=tok.column_index)35class PostfixOps:36 ops = ["'"]37 @staticmethod38 def check(tok: Token):39 if isinstance(tok, Operator) and tok.value in PostfixOps.ops:40 return True41 return False42class InfixOps:43 """44 A utility class that's used to identify and build binary operation expressions.45 Currently supported operations are:46 `ops.Sum`, `ops.Diff`, `ops.Mul`, `ops.Pow`, `ops.Mod`, `ops.Div`47 """48 ops = ["+", "-", "*", "^", "/", "%", "<", ">"]49 precedence = {"+": 10, "-": 10, "*": 30, "/": 30, "^": 40, "%": 30, "<": 5, ">": 5}50 @staticmethod51 def check(tok: Type[Token]):52 if isinstance(tok, Operator) and tok.value in InfixOps.ops:53 return True54 return False55 @staticmethod56 def generate(left: Expr, right: Expr, operator: Type[Token]):57 if operator.value == "+":58 return Sum(left=left, right=right, line_index=operator.line_index, column_index=operator.column_index)59 elif operator.value == "-":60 return Diff(left=left, right=right, line_index=operator.line_index, column_index=operator.column_index)61 elif operator.value == "*":62 return Mul(left=left, right=right, line_index=operator.line_index, column_index=operator.column_index)63 elif operator.value == "/":64 return Div(left=left, right=right, line_index=operator.line_index, column_index=operator.column_index)65 elif operator.value == "^":66 return Pow(base=left, exponent=right, line_index=operator.line_index, column_index=operator.column_index)67 elif operator.value == "%":68 return Mod(left=left, right=right, line_index=operator.line_index, column_index=operator.column_index)69 else:70 raise Exception(f"InfixOps: Unknown operator type {operator.value}")71# group parsing rules for statements72class AssignmentOps:73 """74 A utility class that's used to identify and build assignments in statements.75 Currently supports the following assignment types:76 `ops.Assignment`77 """78 ops = ["="]79 @staticmethod80 def check(tok: Type[Token]):81 if isinstance(tok, Operator) and tok.value in AssignmentOps.ops:82 return True83 return False84 @staticmethod85 def generate(lhs: Expr, rhs: Expr, operator: Operator):86 # check() has been ran for operator87 if operator.value == "=":88 return Assignment(lhs=lhs, rhs=rhs, line_index=operator.line_index, column_index=operator.column_index)89class UnaryFunctions:90 """91 A utility class that's used to identify and build unary functions.92 """93 names = ["exp", "log", "abs", "floor", "ceil", "round", "sin", "cos", "tan", "arcsin", "arccos", "arctan", "logit", "inverse_logit"]94 precedence = {95 "exp": 100,96 "log": 100,97 "abs": 100,98 "floor": 100,99 "ceil": 100,100 "round": 100,101 "sin": 100,102 "cos": 100,103 "tan": 100,104 "arcsin": 100,105 "arccos": 100,106 "arctan": 100,107 "logit": 100,108 "inverse_logit": 100,109 }110 @staticmethod111 def check(tok: Type[Token]):112 if isinstance(tok, Identifier) and tok.value in UnaryFunctions.names:113 return True114 return False115 @staticmethod116 def generate(subexpr: Expr, func_type: Identifier):117 if func_type.value == "exp":118 return Exp(subexpr=subexpr, line_index=func_type.line_index, column_index=func_type.column_index)119 elif func_type.value == "log":120 return Log(subexpr=subexpr, line_index=func_type.line_index, column_index=func_type.column_index)121 elif func_type.value == "abs":122 return Abs(subexpr=subexpr, line_index=func_type.line_index, column_index=func_type.column_index)123 elif func_type.value == "floor":124 return Floor(subexpr=subexpr, line_index=func_type.line_index, column_index=func_type.column_index)125 elif func_type.value == "ceil":126 return Ceil(subexpr=subexpr, line_index=func_type.line_index, column_index=func_type.column_index)127 elif func_type.value == "round":128 return Round(subexpr=subexpr, line_index=func_type.line_index, column_index=func_type.column_index)129 elif func_type.value == "sin":130 return Sin(subexpr=subexpr, line_index=func_type.line_index, column_index=func_type.column_index)131 elif func_type.value == "cos":132 return Cos(subexpr=subexpr, line_index=func_type.line_index, column_index=func_type.column_index)133 elif func_type.value == "tan":134 return Tan(subexpr=subexpr, line_index=func_type.line_index, column_index=func_type.column_index)135 elif func_type.value == "arcsin":136 return Arcsin(subexpr=subexpr, line_index=func_type.line_index, column_index=func_type.column_index)137 elif func_type.value == "arccos":138 return Arccos(subexpr=subexpr, line_index=func_type.line_index, column_index=func_type.column_index)139 elif func_type.value == "arctan":140 return Arctan(subexpr=subexpr, line_index=func_type.line_index, column_index=func_type.column_index)141 elif func_type.value == "logit":142 return Logit(subexpr=subexpr, line_index=func_type.line_index, column_index=func_type.column_index)143 elif func_type.value == "inverse_logit":144 return InverseLogit(subexpr=subexpr, line_index=func_type.line_index, column_index=func_type.column_index)145class BinaryFunctions:146 """147 A utility class that's used to identify and build binary functions.148 """149 names = ["shift"]150 precedence = {151 "shift": 100,152 }153 @staticmethod154 def check(tok: Type[Token]):155 if isinstance(tok, Identifier) and tok.value in BinaryFunctions.names:156 return True157 return False158 @staticmethod159 def generate(arg1: Expr, arg2: Expr, func_type: Identifier):160 return Shift(subscript_column=arg1, shift_expr=arg2, line_index=func_type.line_index, column_index=func_type.column_index)161class Distributions:162 """163 A utility class that's used to identify and build distributions.164 Currently supported distributions are:165 `ops.Normal`, `ops.BernoulliLogit`, `ops.LogNormal`, `ops.Cauchy`, `ops.Exponential`166 """167 names = ["normal", "bernoulli_logit", "log_normal", "cauchy", "exponential"]168 @staticmethod169 def check(tok: Type[Token]):170 if isinstance(tok, Identifier) and tok.value in Distributions.names:171 return True172 return False173 @staticmethod174 def generate(lhs: Expr, expressions: List[Expr], dist_type: Identifier):175 if dist_type.value == "normal":176 if len(expressions) != 2:177 raise Exception(f"normal distribution needs 2 parameters, but got {len(expressions)}!")178 return Normal(179 variate=lhs, mean=expressions[0], std=expressions[1], line_index=dist_type.line_index, column_index=dist_type.column_index180 )181 elif dist_type.value == "bernoulli_logit":182 if len(expressions) != 1:183 raise Exception(f"bernoulli_logit distribution needs 1 parameter, but got {len(expressions)}!")184 return BernoulliLogit(variate=lhs, logit_p=expressions[0], line_index=dist_type.line_index, column_index=dist_type.column_index)185 elif dist_type.value == "log_normal":186 if len(expressions) != 2:187 raise Exception(f"log_normal distribution needs 2 parameters, but got {len(expressions)}!")188 return LogNormal(189 variate=lhs, mean=expressions[0], std=expressions[1], line_index=dist_type.line_index, column_index=dist_type.column_index190 )191 elif dist_type.value == "cauchy":192 if len(expressions) != 2:193 raise Exception(f"cauchy distribution needs 2 parameters, but got {len(expressions)}!")194 return Cauchy(195 variate=lhs,196 location=expressions[0],197 scale=expressions[1],198 line_index=dist_type.line_index,199 column_index=dist_type.column_index,200 )201 elif dist_type.value == "exponential":202 if len(expressions) != 1:203 raise Exception(f"exponential distribution needs 1 parameter, but got {len(expressions)}!")204 return Exponential(variate=lhs, scale=expressions[0], line_index=dist_type.line_index, column_index=dist_type.column_index)205class ParseError(Exception):206 def __init__(self, message, code_string: str, line_num: int, column_num: int):207 code_string = code_string.split("\n")[line_num]208 exception_message = f"An error occured while parsing the following line({line_num}:{column_num}):\n{code_string}\n{' ' * column_num + '^'}\n{message}"209 super().__init__(exception_message)210class Parser:211 """212 The parser for rat is a modified Pratt parser.213 Since rat programs are defined within the context of data, the parser needs to know214 the column names of the data.215 """216 def __init__(self, tokens: List[Token], data_names: List[str], model_string: str = ""):217 """218 Initialize the parser219 :param tokens: A list of `scanner.Token`. This should be the output format of `scanner.scanner`220 :param data_names: A list of data column names221 :param model_string: Optional. The original model code string. If supplied, used to generate detailed errors.222 """223 self.out_tree = []224 self.tokens = tokens225 self.data_names = data_names226 self.model_string = model_string227 def peek(self, k=0) -> Type[Token]:228 """229 k-token lookahead. Returns `scanner.NullToken` if there are no tokens in the token stack.230 """231 if k >= len(self.tokens):232 return NullToken()233 return self.tokens[k]234 def remove(self, index=0):235 self.tokens.pop(index)236 def expect_token(237 self,238 token_types: Union[Type[Token], List[Type[Token]]],239 token_value: Union[None, str, List[str]] = None,240 remove: bool = False,241 lookahead: int = 0,242 ):243 """244 Checks if the next token in the token stack is of designated type and value. If not, raise an Exception.245 :param token_types: A list of `scanner.Token` types or a single `scanner.Token` type that's allowed.246 :param token_value: A single or a list of allowed token value strings247 :param remove: Boolean, whether to remove the token after checking or not. Defaults to False248 :param lookahead: lookahead. Defaults to 0(immediate token)249 :return: None250 """251 next_token = self.peek(lookahead)252 if not token_value:253 token_value = [next_token.value]254 if isinstance(token_value, str):255 token_value = [token_value]256 if not isinstance(token_types, tuple):257 token_types = (token_types,)258 for token_type in token_types:259 if isinstance(next_token, token_type) and next_token.value in token_value:260 if remove:261 self.remove()262 return True263 raise ParseError(264 f"Expected token type(s) {[x.__name__ for x in token_types]} with value in {token_value}, but received {next_token.__class__.__name__} with value '{next_token.value}'!",265 self.model_string,266 next_token.line_index,267 next_token.column_index,268 )269 def expressions(self, entry_token_value, is_subscript=False) -> List[Expr]:270 """271 expressions are used to evaluate repeated, comma-separated expressions in the form "expr, expr, expr"272 It's primarily used to evaluate subscripts or function arguments. In the case it's evaluating subscripts, it273 will also return the shift amounts of each subscript.274 :param entry_token_value: A single character which denotes the boundary token that starts the expression275 sequence. For example, "myFunc(expr1, expr2, expr3)" would mean the 3-expression sequence is present between the276 parantheses. So the entry token would be "(" and exit token ")".277 For subscripts, it would be something like "my_variable[sub_1, shift(sub_2, 1)]. That would mean entry token278 "[" and exit token "]".279 :return: list of expressions280 """281 if entry_token_value == "[":282 exit_value = "]"283 elif entry_token_value == "(":284 exit_value = ")"285 else:286 raise Exception(f"expressions() received invalid entry token value with value {entry_token_value}, but expected '[' or ']'")287 expressions = []288 while True:289 token = self.peek()290 if isinstance(token, Special):291 self.expect_token(Special, (exit_value, ","))292 if token.value == exit_value:293 break294 elif token.value == ",":295 self.remove() # character ,296 continue297 else:298 expression = self.expression(is_subscript=is_subscript)299 expressions.append(expression)300 return expressions301 def parse_nud(self, is_lhs=False, is_subscript=False):302 token = self.peek()303 if isinstance(token, RealLiteral): # if just a real number, return it304 exp = RealConstant(value=float(token.value), line_index=token.line_index, column_index=token.column_index)305 self.remove() # real306 return exp307 elif isinstance(token, IntLiteral): # if just an integer, return it308 exp = IntegerConstant(value=int(token.value), line_index=token.line_index, column_index=token.column_index)309 self.remove() # integer310 return exp311 elif PrefixOps.check(token): # prefixOp expression312 self.expect_token(Operator, PrefixOps.ops) # operator313 self.remove()314 next_expression = self.expression(PrefixOps.precedence[token.value], is_lhs=is_lhs, is_subscript=is_subscript)315 try:316 exp = PrefixOps.generate(next_expression, token)317 except TypeCheckError as e:318 raise ParseError(str(e), self.model_string, token.line_index, token.column_index)319 return exp320 elif UnaryFunctions.check(token): # unaryFunction '(' expression ')'321 self.remove() # functionName322 self.expect_token(Special, "(")323 self.remove() # (324 argument = self.expression(is_lhs=is_lhs, is_subscript=is_subscript)325 self.expect_token(Special, ")")326 self.remove() # )327 try:328 exp = UnaryFunctions.generate(argument, token)329 except TypeCheckError as e:330 raise ParseError(str(e), self.model_string, token.line_index, token.column_index)331 return exp332 elif BinaryFunctions.check(token): # binaryFunction '(' expression, expression ')'333 self.remove() # function name334 self.expect_token(Special, "(")335 self.remove() # (336 arguments = self.expressions("(", is_subscript=is_subscript)337 self.expect_token(Special, ")")338 self.remove() # )339 try:340 exp = BinaryFunctions.generate(arguments[0], arguments[1], token)341 except TypeCheckError as e:342 raise ParseError(str(e), self.model_string, token.line_index, token.column_index)343 except Exception as e:344 raise ParseError(str(e), self.model_string, token.line_index, token.column_index)345 return exp346 elif isinstance(token, Identifier): # parse data and param347 if token.value in self.data_names:348 if not is_subscript:349 exp = Data(name=token.value, line_index=token.line_index, column_index=token.column_index)350 else:351 exp = SubscriptColumn(name=token.value, line_index=token.line_index, column_index=token.column_index)352 self.remove() # identifier353 elif token.value in Distributions.names:354 raise ParseError("A distribution has been found in an expressions", self.model_string, token.line_index, token.column_index)355 else:356 if not is_subscript:357 exp = self.parse_param(is_lhs=is_lhs)358 else:359 exp = SubscriptColumn(name=token.value, line_index=token.line_index, column_index=token.column_index)360 self.remove() # token identifier(subscript)361 next_token = self.peek()362 if isinstance(next_token, Special) and next_token.value == "[":363 # identifier '[' subscript_expressions ']'364 self.remove() # [365 subscript_expressions = self.expressions("[", is_subscript=True) # list of expressions366 # The data types in the parsed expressions are being used as subscripts367 self.expect_token(Special, "]")368 self.remove() # ]369 try:370 subscript_names, shift_amounts = [], []371 for subscript_expr in subscript_expressions:372 match subscript_expr:373 case Shift():374 subscript_names.append(subscript_expr.subscript_column)375 shift_amounts.append(subscript_expr.shift_expr)376 case SubscriptColumn():377 subscript_names.append(subscript_expr)378 shift_amounts.append(IntegerConstant(value=0))379 case _:380 raise ParseError(381 f"Found unknown expression class {subscript_expr.__class__.__name__} when parsing subscripts",382 self.model_string,383 subscript_expr.line_index,384 subscript_expr.column_index,385 )386 exp.subscript = Subscript(387 names=tuple(subscript_names),388 shifts=tuple(shift_amounts),389 line_index=next_token.line_index,390 column_index=next_token.column_index,391 )392 except TypeCheckError as e:393 raise ParseError(str(e), self.model_string, next_token.line_index, next_token.column_index)394 next_token = self.peek()395 if isinstance(next_token, Operator) and next_token.value == "'":396 self.remove()397 exp.prime = True398 return exp399 elif isinstance(token, Special) and token.value == "(": # ( expression )400 self.remove() # (401 exp = self.expression(is_lhs=is_lhs, is_subscript=is_subscript)402 self.expect_token(Special, ")")403 self.remove() # )404 return exp405 else:406 raise ParseError(407 f"{token.value} can't be in the beginning of a construct!", self.model_string, token.line_index, token.column_index408 )409 def parse_param(self, is_lhs=False):410 self.expect_token(Identifier)411 token = self.peek()412 exp = Param(name=token.value, line_index=token.line_index, column_index=token.column_index)413 self.remove() # identifier414 # check for constraints param<lower = 0.0, upper = 1.0>415 # 3-token lookahead: "<" + "lower" or "upper"416 lookahead_1 = self.peek() # <417 lookahead_2 = self.peek(1) # lower, upper418 if lookahead_1.value == "<" and lookahead_2.value in (419 "lower",420 "upper",421 ):422 if not is_lhs:423 raise ParseError(424 "Constraints for parameters/variables are only allowed on LHS.",425 self.model_string,426 lookahead_1.line_index,427 lookahead_1.column_index,428 )429 self.remove() # <430 # the problem is that ">" is considered as an operator, but in the case of constraints, it is431 # not an operator, but a delimeter denoting the end of the constraint region.432 # Therefore, we need to find the matching ">" and change it from operator type to special, so433 # the expression parser does not think of it as a "greater than" operator. This goes away from434 # the ll(k) approach and therefore is a very hacky way to fix the issue.435 n_openbrackets = 0436 for idx in range(len(self.tokens)):437 if self.peek(idx).value == "<":438 n_openbrackets += 1439 if self.peek(idx).value == ">":440 if n_openbrackets == 0:441 # switch from Operator to Special442 self.tokens[idx] = Special(">", self.peek(idx).line_index, self.peek(idx).column_index)443 break444 else:445 n_openbrackets -= 1446 # now actually parse the constraints447 lower = RealConstant(value=float("-inf"))448 upper = RealConstant(value=float("inf"))449 for _ in range(2):450 # loop at max 2 times, once for lower, once for upper451 if lookahead_2.value == "lower":452 self.remove() # "lower"453 self.expect_token(Operator, token_value="=")454 self.remove() # =455 lower = self.expression()456 elif lookahead_2.value == "upper":457 self.remove() # "upper"458 self.expect_token(Operator, token_value="=")459 self.remove() # =460 upper = self.expression()461 lookahead_1 = self.peek()462 # can be either ",", which means loop again, or ">", which breaks463 lookahead_2 = self.peek(1)464 # either "lower", or "upper" if lookahead_1 == ","465 if lookahead_1.value == ",":466 self.remove() # ,467 elif lookahead_1.value == ">":468 self.remove() # >469 break470 else:471 raise ParseError(472 f"Found unknown token with value {lookahead_1.value} when evaluating constraints",473 self.model_string,474 lookahead_1.line_index,475 lookahead_1.column_index,476 )477 # the for loop takes of the portion "<lower= ... >478 # this means the constraint part of been processed and479 # removed from the token queue at this point480 exp.lower = lower481 exp.upper = upper482 return exp483 def expression(self, min_precedence=0, is_lhs=False, is_subscript=False):484 """485 This function is used to evaluate an expression. Please refer to the BNF grammer to see what types of486 rules are being applied.487 :param min_precedence: Minimum precedence value to evaluate488 :return: An `ops.Expr` object.489 """490 left = self.parse_nud(is_lhs=is_lhs, is_subscript=is_subscript)491 while True:492 token = self.peek()493 if isinstance(token, Special) and token.value in (";", ",", ">", ")", "]"):494 break495 elif isinstance(token, NullToken) or isinstance(token, Terminate):496 break497 elif isinstance(token, Special) and token.value == "(": # '(' expression ')'498 self.remove() # (499 next_expression = self.expression(is_lhs=is_lhs, is_subscript=is_subscript)500 self.expect_token(Special, ")") # )501 self.remove() # )502 exp = next_expression # expression503 elif PostfixOps.check(token): # expression infixOps expression504 if PostfixOps.precedence[token.value] <= min_precedence:505 break506 self.expect_token(Operator, PostfixOps.ops)507 self.remove() # op508 exp = PostfixOps.generate(left, token)509 elif InfixOps.check(token): # expression infixOps expression510 if InfixOps.precedence[token.value] <= min_precedence:511 break512 self.expect_token(Operator, InfixOps.ops)513 self.remove() # op514 rhs = self.expression(min_precedence=InfixOps.precedence[token.value], is_lhs=is_lhs, is_subscript=is_subscript)515 try:516 exp = InfixOps.generate(left, rhs, token)517 except TypeCheckError as e:518 raise ParseError(str(e), self.model_string, token.line_index, token.column_index)519 elif isinstance(token, Identifier):520 if UnaryFunctions.check(token): # unaryFunction '(' expression ')'521 if UnaryFunctions.precedence[token.value] <= min_precedence:522 break523 self.remove() # functionName524 self.expect_token(Special, "(")525 self.remove() # (526 argument = self.expression(is_lhs=is_lhs, is_subscript=is_subscript)527 self.expect_token(Special, ")")528 self.remove() # )529 try:530 exp = UnaryFunctions.generate(argument, token)531 except TypeCheckError as e:532 raise ParseError(str(e), self.model_string, token.line_index, token.column_index)533 elif BinaryFunctions.check(token):534 if BinaryFunctions.precedence[token.value] <= min_precedence:535 break536 self.remove() # function name537 self.expect_token(Special, "(")538 self.remove() # (539 arguments = self.expressions(entry_token_value="(", is_subscript=is_subscript)540 self.expect_token(Special, ")")541 self.remove() # )542 try:543 exp = BinaryFunctions.generate(arguments[0], arguments[1], token)544 except TypeCheckError as e:545 raise ParseError(str(e), self.model_string, token.line_index, token.column_index)546 else:547 raise ParseError(f"Unknown token '{token.value}'", self.model_string, token.line_index, token.column_index)548 else:549 raise ParseError(f"Unknown token '{token.value}'", self.model_string, token.line_index, token.column_index)550 left = exp551 return left552 def statement(self):553 """554 Evaluates a single statement. Statements in rat are either assignments or sampling statements. They will get555 resolved into an `ops.Assignment` or an `ops.Distr` object.556 :return:557 """558 return_statement = None559 token = self.peek()560 if Distributions.check(token):561 raise ParseError("Cannot assign to a distribution.", self.model_string, token.line_index, token.column_index)562 # Step 1. evaluate lhs, assume it's expression563 lhs = self.parse_nud(is_lhs=True)564 if isinstance(lhs, Expr):565 op = self.peek()566 if AssignmentOps.check(op):567 self.remove() # assignment operator568 rhs = self.expression()569 try:570 return_statement = AssignmentOps.generate(lhs, rhs, op)571 except TypeCheckError as e:572 raise ParseError(str(e), self.model_string, op.line_index, op.column_index)573 elif isinstance(op, Special) and op.value == "~":574 # distribution declaration575 self.expect_token(Special, "~")576 self.remove() # ~577 distribution = self.peek()578 self.expect_token(Identifier, Distributions.names)579 self.remove() # distribution580 self.expect_token(Special, "(")581 self.remove() # (582 expressions = self.expressions("(") # list of expression583 self.expect_token(Special, ")")584 self.remove() # )585 try:586 return_statement = Distributions.generate(lhs, expressions, distribution)587 except TypeCheckError as e:588 raise ParseError(str(e), self.model_string, distribution.line_index, distribution.column_index)589 else:590 if op.value == "<":591 raise ParseError(592 f"Constraints must be present in front of subscripts", self.model_string, op.line_index, op.column_index593 )594 else:595 raise ParseError(f"Unknown operator '{op.value}' in statement", self.model_string, op.line_index, op.column_index)596 if return_statement is not None:597 return return_statement...

Full Screen

Full Screen

index.js

Source:index.js Github

copy

Full Screen

1const JisonLex = require("jison-lex"),2 path = require("path").win32,3 fs = require("fs"),4 escapeRegexpString = require("escape-string-regexp");5const grammar = fs.readFileSync(require.resolve("./comspec.l")).toString(),6 lexer = new JisonLex(grammar);7// ;;;;;;;;;;;;;;;;;;;;;;;8// ;; Utility functions ;;9// ;;;;;;;;;;;;;;;;;;;;;;;10//11function stringify_tokens(tokens) {12 return tokens.map(t => t.text).join("");13}14function has (obj, key) {15 return obj.hasOwnProperty(key);16}17function hasAll (obj, keys) {18 return keys.every(k => obj.hasOwnProperty(k));19}20function hasAny (obj, keys) {21 return keys.some(k => obj.hasOwnProperty(k));22}23// ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;24// ;; ;;25// ;; TYPE DEFINITIONS FOR DOCUMENTATION ;;26// ;; ;;27// ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;28//29// Token30// =====31/**32 * @typedef {Object} Token33 * @property {string} name - Name of the token (LITERAL, SET, ESCAPE, ...).34 * @property {string} match - Match object for the matched token.35 * @property {string} line - Line number upon which the token was found.36 * @property {string} text - Similar to 'match'.37 * @property {number} len - Length (in chars) of the matched token.38 * @property {Object} loc - Location of match (first/last line, first/last col).39 */40//41// Identified Command42// ==================43/**44 * @typedef {Object} IdentifiedCommand45 * @property {string} command - The name of the command ("" if unknown).46 * @property {number} offset - The offset of where the command identifier47 * ends in the tokens array.48 */49/**50 * Command dispatch table maps commands we can handle to functions51 * which do the command handling. Generally, when we're talking about52 * "handling a command", we mean altering the parsed token stream so53 * that the tokens are correctly altered based upon the command54 * handling them. For example, we may have been passed something55 * like:56 *57 * cmd "set foo=bar"58 *59 * The token stream for this command will look something like:60 *61 * LIT(c), LIT(m), LIT(d), STR("), STRLIT(s), STRLIT(e), ...62 *63 * If we remove the leading "cmd " from the string, and strip the64 * surrounding double quotes from the command and re-tokenise it,65 * we'll get a new tokenised sequence which we can handle.66 *67 */68const cmd_dispatch = {69 cmd: FILTER_handle_cmd70};71/**72 * Provides specific command clean-up for the 'cmd.exe' command.73 *74 * @param {IdentifiedCommand} ident - The identified command object.75 * @param {Tokens|Array} tokens - The array of tokens from parsing the cmd.76 *77 * @returns {Tokens|Array}78 */79function FILTER_handle_cmd (ident, tokens) {80 tokens = Array.prototype.slice.call(tokens);81 if (ident.offset >= tokens.length) {82 return {83 tokens: tokens,84 switches: {},85 finished: true86 };87 }88 let cmd = tokens89 .slice(ident.offset)90 .map(t => t.text)91 .join("")92 .replace(/^\s+/, "");93 // A note about argument parsing94 // =============================95 //96 // The CMD.EXE help page says that the syntax of the 'cmd.exe'97 // command is:98 //99 // CMD [charset] [options] [/C command]100 // CMD [charset] [options] [/K command]101 //102 // For example:103 //104 // CMD /V:on "set foo=bar& calc.exe"105 //106 //107 // The important part is the location of the first dquote, which108 // tells us where the COMMAND part of the line begins. We capture109 // the location of this char (if exists), and use its string110 // offset as the point at which we stop looking for command111 // switches.112 //113 const first_dquote_offset = cmd.split("").findIndex(chr => chr === '"'),114 switch_re = /\/([A-Z])([:][^\s]+)?(?:$|\s)/ig;115 let match = undefined,116 last_match_offset = undefined,117 switches = {118 delayed_expansion: false119 };120 const switch_lookup = {121 "c": "run_then_terminate",122 "C": "run_then_terminate",123 "v": "delayed_expansion",124 "V": "delayed_expansion",125 "e": "cmd_extensions",126 "E": "cmd_extensions",127 "f": "path_autocomplete",128 "F": "path_autocomplete"129 };130 while ((match = switch_re.exec(cmd))) {131 let wholematch = match[0],132 _switch = match[1],133 _value = "",134 match_end_offset = match[0].length + match.index;135 last_match_offset = match_end_offset;136 if (match[2] !== undefined) {137 _value = match[2].replace(/^:/, "");138 }139 if (/^[efv]$/i.test(_switch)) {140 _switch = switch_lookup[_switch];141 switch (_value.toLowerCase()) {142 case "off":143 _value = false;144 break;145 default:146 _value = true;147 }148 }149 else if (has(switch_lookup, _switch)) {150 _switch = switch_lookup[_switch];151 }152 switches[_switch] = _value;153 if (match_end_offset && (match_end_offset > first_dquote_offset)) {154 break;155 }156 }157 // Now we've finished parsing the command arguments, we can strip158 // the args leaving only the next part of the command string.159 if (last_match_offset !== undefined) {160 cmd = cmd.substr(last_match_offset);161 }162 // If the remaining command part starts and ends with a double163 // quote, we strip them.164 cmd = cmd.replace(/^\"|\"$/g, "");165 return {166 tokens: tokenise(cmd),167 switches: switches,168 finished: false169 };170}171/**172 * Parses a given command string in to individual commands, before173 * applying expansion and de-obfuscation filters to each command.174 *175 * @param {string} cmdstr - The original command string to be176 * de-obfuscated.177 *178*/179function parse_cmdstr (cmdstr, options) {180 const DEFAULTS = {181 delayed_expansion: false,182 expand_inline: false,183 vars: {}184 };185 options = options || {};186 options = Object.assign({}, DEFAULTS, options);187 let collector = { vars: {}, switches: {}, output: [] };188 cmdstr = expand_environment_variables(cmdstr, options.vars);189 (function parse_cmdstr_rec (cmdstr, switches) {190 switches = switches || {};191 split_command(cmdstr).forEach(cmd => {192 let result = interpret_command(cmd);193 collector.vars = Object.assign(collector.vars, result.vars);194 if (result.ident.finished) {195 collector.output.push(stringify_tokens(result.ident.tokens));196 }197 else if (result.ident.command === "cmd") {198 //199 // NOTES ON DELAYED EXPANSION200 // ==========================201 //202 // We have support for delayed expansion. Tests on203 // Win7 and Win10 hosts show that delayed expansion is204 // only enabled for the current CMD context, for205 // example, given the following command:206 //207 // cmd /V "set foo=bar& echo !foo!"208 //209 // The output will be "echo bar" because delayed210 // expansion is set. However, it does not cascade in211 // to lower-down CMD instances, for example:212 //213 // cmd /V "cmd \"set foo=bar& echo !foo!\""214 //215 // This will produce "echo !foo!" because we created a216 // sub-cmd context, and the default was applied.217 //218 let new_cmd = stringify_tokens(result.ident.tokens);219 if (new_cmd.toLowerCase() !== "cmd") { // infinite loop protection.220 parse_cmdstr_rec(221 stringify_tokens(result.ident.tokens),222 result.ident.switches223 );224 }225 }226 else {227 let delayed_exp = Object.assign({}, options, switches).delayed_expansion;228 // We do not want to expand percentage vars as229 // that time has passed.230 cmd = expand_environment_variables(231 result.clean,232 collector.vars,233 {234 expand_percent_vars: options.expand_inline,235 delayed_expansion: delayed_exp236 }237 );238 collector.output.push(cmd);239 }240 });241 }(cmdstr));242 return collector.output;243}244/**245 * Given an array of Token objects, attempts to identify the command246 * being run. If a command is found, an IdentifiedCommand object is247 * returned which will contain both the command name and the offset248 * from where abouts in the tokens array the command string ends. If249 * the command cannot be found, returns an empty name ("") and -1 for250 * the offset.251 *252 * For best results, this command should be called AFTER all filtering253 * has taken place, thus ensuring the command is in the least254 * obfuscated state BEFORE attempting command identification.255 *256 * @param {Token|Array} tokens - The command string to analyse.257 * @returns {IdentifiedCommand}258 */259function try_identify_command (tokens) {260 tokens = Array.prototype.slice.call(tokens);261 let identified_command = {262 command : "",263 switches: {},264 offset : -1265 };266 /*267 * Double-Quoted commands268 * ======================269 *270 * For example, matches something similar to:271 *272 * "C:\Windows\System32\cmd.exe"273 * ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^274 */275 if (tokens[0].name === "STRING_DQUOTE_BEGIN") {276 let dquote_end_index = tokens.findIndex(277 (t, i) => i > 0 && t.name === "STRING_DQUOTE_END"278 );279 if (!dquote_end_index) {280 // We can't do much if the CMD doesn't have an ending281 // DQUOTE. Bad command.282 return identified_command;283 }284 let cmd = tokens285 .splice(0, dquote_end_index)286 .map(tok => tok.text)287 .join("")288 .replace(/^\"|\"$/g, "");289 identified_command.command = path.basename(cmd).replace(/\.exe$/i, "");290 identified_command.offset = dquote_end_index + 1;291 }292 else if (tokens[0].name === "SET") {293 identified_command.command = "set";294 identified_command.offset = 1;295 }296 else {297 let end_index = tokens.findIndex(t => (t.text === " " || t.name === "SEMICOLON"));298 end_index = (end_index < 0) ? tokens.length : end_index;299 let cmd = tokens300 .splice(0, end_index)301 .map(tok => tok.text)302 .join("");303 if (/[\\/]/.test(cmd) || /^[a-z]:/i.test(cmd)) {304 // If the path contains path separators, or some drive305 // identifier such as 'C:', then clean-up the path and306 // return the command.307 identified_command.command = path.basename(cmd).replace(/\.exe$/i, "");308 identified_command.offset = end_index + 1;309 }310 else if (cmd) {311 identified_command.command = cmd.replace(/\.exe$/i, "");312 identified_command.offset = end_index;313 }314 }315 return identified_command;316}317/**318 * Given an array of Token objects, attempts to remove all non-quoted319 * contiguous whitespace LITERALS, leaving a single space between each320 * word boundary.321 *322 * @param {Token|Array} tokens - An array of tokens.323 * @returns {Token|Array}324 */325function FILTER_strip_excessive_whitespace (tokens) {326 for (let i = 0; i < tokens.length; i++) {327 let token = tokens[i],328 lookahead = tokens[i + 1];329 if (token.name === "LITERAL" && token.text === " ") {330 if (i === 0) {331 tokens.splice(0,1);332 i = -1;333 }334 else if (i === (tokens.length - 1) && token.text === " ") {335 tokens.splice(i, 1);336 i = -1;337 }338 else if (lookahead && lookahead.name === "LITERAL" && lookahead.text === " ") {339 tokens.splice(i, 1);340 i = -1;341 }342 }343 }344 return tokens;345}346/**347 * Given a command string, attempts to slurp all LITERAL,348 * non-whitespace tokens surrounding a string inside that string. For349 * example:350 *351 * c"alc".exe --[slurped]--> "calc.exe"352 *353 * @param {Token|Array} tokens - An array of tokens.354 * @returns {Token|Array}355 */356function FILTER_slurp_literals_into_strings (tokens) {357 for (let i = 0; i < tokens.length; i++) {358 let token = tokens[i],359 lookahead = tokens[i + 1],360 lookbehind = tokens[i - 1];361 if (token.name === "STRING_DQUOTE_BEGIN") {362 if (lookbehind && lookbehind.name === "LITERAL" && lookbehind.text !== " ") {363 tokens[i - 1] = token;364 tokens[i] = lookbehind;365 tokens[i].name = "STRING_DQUOTE_CHAR";366 i = 0;367 }368 }369 else if (token.name === "STRING_DQUOTE_END") {370 if (lookahead && lookahead.name === "LITERAL" && lookahead.text !== " ") {371 tokens[i + 1] = token;372 tokens[i] = lookahead;373 tokens[i].name = "STRING_DQUOTE_CHAR";374 }375 }376 }377 // We need to clean-up the tokens. Consider the following input:378 //379 // h"t"t"p"380 //381 // The way the algorithm works, we'll end up with our tokens being382 // ordered:383 //384 // "htt""p"385 //386 for (i = 0; i < tokens.length; i++) {387 let token = tokens[i],388 lookahead_1 = tokens[i + 1],389 lookahead_2 = tokens[i + 2];390 if (token.name === "STRING_DQUOTE_END") {391 if (lookahead_1 && lookahead_1.name === "STRING_DQUOTE_BEGIN") {392 tokens.splice(i, 2);393 }394 }395 }396 return tokens;397}398/**399 * Given an array of Tokens, attempts to remove all empty strings ("")400 * from the list, returning a new list of tokens with empty string401 * tokens removed.402 *403 * @param {Token|Array} tokens - An array of tokens.404 * @returns {Token|Array}405 */406function FILTER_strip_empty_strings (tokens) {407 let out_tokens = [],408 skip_token = false;409 for (let i = 0; i < tokens.length; i++) {410 let token = tokens[i],411 lookahead = tokens[i + 1];412 if (skip_token) {413 out_tokens.pop();414 skip_token = false;415 continue;416 }417 out_tokens.push(token);418 switch (token.name) {419 case "STRING_DQUOTE_BEGIN":420 if (lookahead && lookahead.name === "STRING_DQUOTE_END") {421 skip_token = true;422 }423 break;424 }425 }426 return out_tokens;427}428/**429 * Given an array of Tokens, attempts to remove all unnecessary commas430 * from the tokenised sequence.431 *432 * @param {Token|Array} tokens - An array of tokens.433 * @returns {Token|Array}434 */435function FILTER_strip_commas (tokens) {436 return tokens.filter(token => token.name !== "COMMA");437}438/**439 * Given an array of Tokens, attempts to fix-up all tokens which were440 * previously escaped tokens.441 *442 * @param {Token|Array} tokens - An array of tokens.443 * @returns {Token|Array}444 */445function FILTER_apply_escapes (tokens) {446 let filtered = tokens447 .filter(tok => tok.name !== "ESCAPE")448 .map((tok, i, tokens) => {449 if (tok.name === "ESCAPED_LITERAL") tokens[i].name = "LITERAL";450 return tokens[i];451 });452 return filtered;453}454/**455 * Given a command string, attempts to partially interpret the456 * command, returning an object which can be used to present the457 * command in an easy-to-understand way.458 *459 * @param {string} cmdstr - The command to run/parse.460 * @returns {Object}461 */462function interpret_command (cmdstr) {463 let clean_cmdstr = cmdstr.replace(/^\s+|\s+$/, "");464 // Parse the command string in to an array of Token objects.465 let tokens = tokenise(clean_cmdstr),466 ident = try_identify_command(tokens);467 if (cmd_dispatch.hasOwnProperty(ident.command)) {468 let handled = cmd_dispatch[ident.command](ident, tokens);469 ident.tokens = handled.tokens;470 ident.switches = handled.switches;471 ident.finished = handled.finished;472 }473 let flags = {474 in_set_cmd : false,475 capturing_env_var_name : false,476 capturing_env_var_value : false477 };478 let env_vars = {},479 env_var_name = "",480 env_var_value = "";481 // The `outbuf` var holds a cleaned-up version of the command with482 // all obfuscation removed.483 let outbuf = [];484 // When TRUE, the parser skips the next token. Used in cases485 // where we want to ignore "".486 let skip = false;487 for (let i = 0; i < tokens.length; i++) {488 if (skip) {489 outbuf.pop();490 skip = false;491 continue;492 }493 let token = tokens[i],494 lookahead = tokens[i + 1];495 outbuf.push(token.text);496 switch (token.name) {497 case "LITERAL":498 if (flags.in_set_cmd) {499 if (flags.capturing_env_var_name) {500 env_var_name += token.text;501 }502 else if (flags.capturing_env_var_value) {503 env_var_value += token.text;504 }505 }506 break;507 case "ESCAPED":508 break;509 case "SET":510 flags.capturing_env_var_name = true;511 flags.in_set_cmd = true;512 break;513 case "SET_ASSIGNMENT":514 flags.capturing_env_var_name = false;515 flags.capturing_env_var_value = true;516 break;517 case "SET_DQUOTE_CHAR":518 if (flags.capturing_env_var_name) {519 env_var_name += token.text;520 }521 else if (flags.capturing_env_var_value) {522 env_var_value += token.text;523 }524 break;525 case "SET_DQUOTE_BEGIN":526 case "SET_DQUOTE_END":527 // TODO: may need to add another flag here...528 break;529 case "STRING_DQUOTE_BEGIN":530 if (lookahead.name === "STRING_DQUOTE_END") {531 skip = true;532 }533 break;534 default:535 //console.log("UNKNOWN TOK>", token.name, token.text);536 }537 }538 if (env_var_name.length && env_var_value.length) {539 if (/^%[^%]+[^%]%$/.test(env_var_name)) {540 // Special handling for the case where someone sets:541 //542 // SET %foo%=bar543 //544 // In this case, '%foo%' is treated as 'foo'. This is545 // different from something like:546 //547 // SET %%foo%%=bar548 //549 // which Windows treats as '%%foo%%' which is !== '%foo%'.550 //551 env_var_name = env_var_name.replace(/^%|%$/g, "");552 }553 env_vars[env_var_name] = {554 first: env_var_value,555 curr: env_var_value556 };557 }558 return {559 ident: ident,560 clean: outbuf.join(""),561 vars: env_vars562 };563}564/**565 * Given a command string, attempts to split the string, returning an566 * array of individual command strings.567 *568 * @param {string} command - a CMD.EXE command.569 * @returns {Tokens|Array} Each command is an element in the array.570 */571function split_command (command_str) {572 let tokens = tokenise(command_str, { filter: false }),573 index = 0,574 commands = [""];575 tokens.forEach(tok => {576 if (/^(?:CALL|COND_CALL|SEMICOLON)$/.test(tok.name)) {577 index++;578 commands[index] = "";579 }580 else {581 commands[index] += tok.text;582 }583 });584 return commands585 .map(cmd => cmd.replace(/^\s*|\s*$/g, "")) // Remove leading and trailing whitespace586 .filter(cmd => ! /^\s*$/.test(cmd));587}588/**589 * Given a command string, attempts to split the string in to an array590 * of Token objects.591 *592 * @param {string} cmdstr - The command string to split in to tokens.593 * @param {string} [options] - Set .filter T|F to enable/disable filtering.594 * @returns {Token|Array} Token objects, one-per-token.595 */596function tokenise (cmdstr, options) {597 options = options || {};598 options = Object.assign({}, { escapes_as_literals: false, filter: true }, options);599 lexer.setInput(cmdstr);600 let tokens = [];601 while (true) {602 let token = lexer.lex();603 if (token === "EOF") break;604 if (options.escapes_as_literals) {605 if (token.name === "ESCAPE") {606 token.name = "LITERAL";607 }608 else if (token.name === "ESCAPED_LITERAL") {609 if (token.text === "=") {610 token.name = "SET_ASSIGNMENT";611 }612 else {613 token.name = "LITERAL";614 }615 }616 }617 tokens.push(token);618 }619 if (options.filter) {620 tokens = FILTER_apply_escapes(tokens);621 tokens = FILTER_strip_empty_strings(tokens);622 tokens = FILTER_slurp_literals_into_strings(tokens);623 tokens = FILTER_strip_excessive_whitespace(tokens);624 //tokens = FILTER_strip_commas(tokens);625 }626 let cleancmd = stringify_tokens(tokens);627 if (cmdstr !== cleancmd) {628 return tokenise(cleancmd, { escapes_as_literals: true });629 }630 return tokens;631}632/**633 * Attempts to perform a find/replace with variable expansion against634 * a given DOS command with values read from an optional variable635 * key/value object. BATCH implements some syntactic-sugar to support636 * finding and replacing characters within an environment variable:637 *638 * @example639 * // Replace all 'a' chars with 'b' in var 'foo':640 * "%foo:a=b%"641 *642 * @param {string} cmdstr - DOS command we wish to deobfuscate.643 * @param {Object} [vars] - An object mapping var names to values.644 *645 * @returns {string} An expanded form of `cmdstr` with all variable646 * find/replace operations performed.647 */648function substr_replace (cmdstr, vars) {649 let find_replace_re = /%([^:]*):([^\s]+)=([^\s]+)?%/ig,650 got_match;651 while ((got_match = find_replace_re.exec(cmdstr))) {652 let wholematch = got_match[0],653 findstr = got_match[2],654 replstr = got_match[3] || "",655 varname = got_match[1].toLowerCase(),656 varvalue = vars[varname];657 if (vars.hasOwnProperty(varname) === false) {658 continue;659 }660 let replaced_varvalue = varvalue.first.split(findstr).join(replstr);661 cmdstr = cmdstr.split(wholematch).join(replaced_varvalue);662 }663 return cmdstr;664}665/**666 * Given a command string an an object mapping varname => varvalue,667 * attempts to apply the range of text manipulations supported by the668 * BATCH language. The following features are supported:669 *670 * - expansion :: %foo% expands to the valueOf(%foo%).671 * - substrings :: %foo:~5%, %foo:0,3%, %foo:~-3%672 * -673 *674 */675function expand_environment_variables (cmdstr, vars, options) {676 options = options || {};677 const defaults = {678 expand_percent_vars: true,679 delayed_expansion: false680 };681 options = Object.assign({}, defaults, options);682 const default_vars = {683 appdata: {684 first: `C:\\Users\\whoami\\AppData\\Roaming`,685 curr: `C:\\Users\\whoami\\AppData\\Roaming`686 },687 comspec: {688 first: `C:\\Windows\\System32\\cmd.exe`,689 curr: `C:\\Windows\\System32\\cmd.exe`690 }691 };692 if (vars) {693 Object.keys(vars).forEach(varname => {694 const varvalue = vars[varname];695 if (typeof varvalue === "string") {696 vars[varname] = {697 first: varvalue,698 curr: varvalue699 };700 }701 });702 }703 vars = Object.assign(default_vars, vars);704 // Expand Variables705 // ================706 //707 // Take all instances of '%foo%' and replace with the value found708 // within the 'vars' dict.709 //710 let cmd = cmdstr;711 if (options.expand_percent_vars) {712 Object.keys(vars).forEach(varname => {713 cmd = cmd.replace(714 new RegExp(escapeRegexpString(`%${varname}%`), "gi"), vars[varname].first715 );716 });717 }718 // Delayed Expansion719 // =================720 //721 // Instead of '%foo%', delayed expansion works with '!foo!', and722 // reads from '.curr' instead of '.first'.723 //724 if (options.delayed_expansion) {725 Object.keys(vars).forEach(varname => {726 cmd = cmd.replace(727 new RegExp(escapeRegexpString(`!${varname}!`), "gi"), // find728 vars[varname].curr // replace729 );730 });731 }732 // Apply Find/Replace733 // ==================734 //735 // Searches the variable for all instances of STR, replacing with736 // REP, for example:737 //738 // %foo:STR=REP%739 // %foo:cat=dog%740 //741 cmd = substr_replace(cmd, vars);742 // Substring handling743 // ==================744 //745 // There are a few different ways we can apply substrings. Assume746 // %foo% = "abcdef".747 //748 // - %foo:~3% => def749 // - %foo:~0,3% => abc750 // - %foo:~-3% => def751 // - %foo:~1,3% => bcd752 //753 let substr_re = /%([^:]*):\s*~\s*([+-]?\d+)(?:,([+-]?\d+))?%/ig,754 replacements = [],755 substr_match;756 while ((substr_match = substr_re.exec(cmd))) {757 let var_name = substr_match[1].toLowerCase(),758 var_value = vars[var_name],759 substr_start = substr_match[2],760 substr_end = substr_match[3];761 if (substr_start !== undefined) {762 substr_start = parseInt(substr_start, 10);763 }764 if (substr_end !== undefined) {765 substr_end = parseInt(substr_end, 10);766 }767 if (var_value === undefined) {768 continue;769 }770 else {771 var_value = var_value.first;772 }773 let replace = {774 find: substr_match[0]775 };776 let rev = s => s.split("").reverse().join("");777 if ((substr_start !== undefined) && (substr_end === undefined)) {778 if (substr_start == 0) {779 // Special case -- when the substr pattern is780 // something like:781 //782 // %FOO:~0%783 //784 // Windows expands this to the full variable value.785 replace.replace = var_value;786 replacements.push(replace);787 continue;788 }789 else if (substr_start < 0) {790 // Negative substr values start from the last char and791 // substr forwards.792 let rev_var_value = rev(var_value);793 var_value = rev(rev_var_value.substr(0, (substr_start * -1)));794 replace.replace = var_value;795 replacements.push(replace);796 continue;797 }798 replace.replace = var_value.substring(substr_start, substr_end);799 }800 else if ((substr_start !== undefined) && (substr_end !== undefined)) {801 if (substr_start < 0 && substr_end < 0) {802 substr_start = (substr_start * -1);803 substr_end = (substr_end * -1);804 let tmpstart = Math.min(substr_start, substr_end),805 tmpend = Math.max(substr_start, substr_end);806 replace.replace = rev(rev(var_value).split("").slice(tmpstart, tmpend).join(""));807 }808 else if (substr_start < 0 && substr_end > 0) {809 /*810 * Handles cases such as: %foo:~-10,3%.811 */812 let substr_offset = (substr_end + substr_start) * -1;813 replace.replace = rev((rev(var_value).substr(substr_offset, substr_end)));814 }815 else if (substr_end < 0 && substr_start === 0) {816 replace.replace = rev(rev(var_value).substr(substr_end * -1));817 }818 else if (substr_start === 0) {819 replace.replace = var_value.substring(0, substr_end);820 }821 else if (substr_start > 0) {822 replace.replace = var_value.substring(substr_start, substr_end + substr_start);823 }824 }825 replacements.push(replace);826 }827 replacements.forEach(r => {828 cmd = cmd.replace(new RegExp(escapeRegexpString(r.find), "gi"), r.replace);829 });830 return cmd;831}832module.exports = {833 filter: {834 widen_strings: FILTER_slurp_literals_into_strings,835 strip_escapes: FILTER_apply_escapes,836 strip_whitespace: FILTER_strip_excessive_whitespace,837 strip_empty_strings: FILTER_strip_empty_strings,838 strip_commas: FILTER_strip_commas,839 // Command handlers840 handle_CMD: FILTER_handle_cmd,841 },842 try_identify_command: try_identify_command,843 tokenise: tokenise,844 split_command: split_command,845 parse: parse_cmdstr,846 expand_variables: expand_environment_variables...

Full Screen

Full Screen

enums.py

Source:enums.py Github

copy

Full Screen

1"""2Contains enumerations that are used throughout the codebase. Also sets up some3static enumeration data (like spell rarity, spawn spell -> unit spawned mapping4etc.5"""6import enum7@enum.unique8class Owner(enum.IntEnum):9 LEFT_PLAYER = 110 RIGHT_PLAYER = 211 @property12 def side(self):13 return "LeftPlayer" if self == Owner.LEFT_PLAYER else "RightPlayer"14@enum.unique15class Rarity(enum.IntEnum):16 UNDEFINED = 017 COMMON = 118 RARE = 219 EPIC = 320 LEGENDARY = 421 MYTHIC = 522@enum.unique23class CastingStrategy(enum.IntEnum):24 # Cast at any point on the map (think Meteor)25 ENTIRE_MAP = 026 # Cast on any one lane27 SINGLE_LANE = 128 # Cast on controlled area of the map - default minion casting strategy29 # For each lane, cast as far as your furthest minions on any lane, up to30 # first enemy minion on that lane, capped by half-width of lane.31 CONTROLLED_AREA = 232 # Cast position does not matter.33 DOES_NOT_MATTER = 334@enum.unique35class Spell(enum.IntEnum):36 # Spawn spells37 SPAWNUNIT_SWORDSMAN = 338 SPAWNUNIT_ARCHERS = 600139 SPAWNUNIT_REAPER = 2340 SPAWNUNIT_UNDEADHORDE = 641 SPAWNUNIT_TREANT = 600542 SPAWNUNIT_FIREIMP = 1200143 SPAWNUNIT_BRUTE = 944 SPAWNUNIT_CHARGER = 1201545 SPAWNUNIT_WATERELEMENTAL = 602546 SPAWNUNIT_EXECUTIONER = 610647 SPAWNUNIT_SILVERRANGER = 1200248 SPAWNUNIT_ALCHEMIST = 610749 SPAWNUNIT_COMMANDER = 407150 SPAWNUNIT_GOBLINMARKSMAN = 406351 SPAWNUNIT_JUGGERNAUT = 601552 SPAWNUNIT_PRIMALSPIRIT = 611153 SPAWNUNIT_RAVENOUSSCOURGE = 1202054 SPAWNUNIT_ROLLINGROCKS = 1201055 SPAWNUNIT_SHADOWHUNTRESS = 611656 SPAWNUNIT_SHIELDBEARER = 406657 SPAWNUNIT_STONEELEMENTAL = 601358 SPAWNUNIT_UNDEADARMY = 600359 SPAWNUNIT_VALKYRIE = 602060 SPAWNUNIT_VIPER = 1200861 SPAWNUNIT_WISPMOTHER = 10662 # Standard spells63 METEOR = 31464 RAGE = 31765 # Custom spells66 DRAW_CARD = 20067 NOOP_1S = -168 NOOP_2S = -269 NOOP_3S = -370 NOOP_4S = -471 NOOP_5S = -572 NOOP_6S = -673 NOOP_7S = -774 NOOP_8S = -875 @property76 def rarity(self):77 return self._rarities[self]78 @property79 def casting_strategy(self):80 if self in self._custom_casting_strategies:81 return self._custom_casting_strategies[self]82 if self.is_spawn:83 # default casting strategy for minions84 return CastingStrategy.CONTROLLED_AREA85 else:86 # default casting strategy for spells87 return CastingStrategy.ENTIRE_MAP88 @property89 def is_spawn(self):90 """Spawn spells can be drawn from deck, put in hand and they spawn minions."""91 return self.name.startswith("SPAWN")92 @property93 def is_noop(self):94 return int(self) < 095 @property96 def units_spawned(self):97 if self in self._units_spawned:98 return self._units_spawned[self]99 else:100 return None101Spell._rarities = {102 Spell.SPAWNUNIT_SWORDSMAN: Rarity.COMMON,103 Spell.SPAWNUNIT_ARCHERS: Rarity.COMMON,104 Spell.SPAWNUNIT_SHIELDBEARER: Rarity.COMMON,105 Spell.SPAWNUNIT_UNDEADHORDE: Rarity.COMMON,106 Spell.SPAWNUNIT_WATERELEMENTAL: Rarity.COMMON,107 Spell.SPAWNUNIT_RAVENOUSSCOURGE: Rarity.COMMON,108 Spell.SPAWNUNIT_ROLLINGROCKS: Rarity.COMMON,109 Spell.SPAWNUNIT_UNDEADARMY: Rarity.COMMON,110 Spell.SPAWNUNIT_FIREIMP: Rarity.COMMON,111 Spell.SPAWNUNIT_REAPER: Rarity.RARE,112 Spell.SPAWNUNIT_BRUTE: Rarity.RARE,113 Spell.SPAWNUNIT_EXECUTIONER: Rarity.RARE,114 Spell.SPAWNUNIT_SILVERRANGER: Rarity.RARE,115 Spell.SPAWNUNIT_ALCHEMIST: Rarity.RARE,116 Spell.SPAWNUNIT_COMMANDER: Rarity.RARE,117 Spell.SPAWNUNIT_GOBLINMARKSMAN: Rarity.RARE,118 Spell.SPAWNUNIT_VALKYRIE: Rarity.RARE,119 Spell.SPAWNUNIT_VIPER: Rarity.RARE,120 Spell.SPAWNUNIT_CHARGER: Rarity.EPIC,121 Spell.SPAWNUNIT_TREANT: Rarity.EPIC,122 Spell.SPAWNUNIT_JUGGERNAUT: Rarity.EPIC,123 Spell.SPAWNUNIT_PRIMALSPIRIT: Rarity.EPIC,124 Spell.SPAWNUNIT_SHADOWHUNTRESS: Rarity.EPIC,125 Spell.SPAWNUNIT_STONEELEMENTAL: Rarity.EPIC,126 Spell.SPAWNUNIT_WISPMOTHER: Rarity.EPIC,127 Spell.METEOR: Rarity.COMMON,128 Spell.RAGE: Rarity.COMMON,129 Spell.DRAW_CARD: Rarity.UNDEFINED,130 Spell.NOOP_1S: Rarity.UNDEFINED,131 Spell.NOOP_2S: Rarity.UNDEFINED,132 Spell.NOOP_3S: Rarity.UNDEFINED,133 Spell.NOOP_4S: Rarity.UNDEFINED,134 Spell.NOOP_5S: Rarity.UNDEFINED,135 Spell.NOOP_6S: Rarity.UNDEFINED,136 Spell.NOOP_7S: Rarity.UNDEFINED,137 Spell.NOOP_8S: Rarity.UNDEFINED,138}139for spell in Spell:140 if spell not in Spell._rarities:141 raise ValueError("UNDEFINED rarity for %s" % spell)142Spell._custom_casting_strategies = {143 Spell.RAGE: CastingStrategy.SINGLE_LANE,144 Spell.DRAW_CARD: CastingStrategy.DOES_NOT_MATTER,145 Spell.NOOP_1S: CastingStrategy.DOES_NOT_MATTER,146 Spell.NOOP_2S: CastingStrategy.DOES_NOT_MATTER,147 Spell.NOOP_3S: CastingStrategy.DOES_NOT_MATTER,148 Spell.NOOP_4S: CastingStrategy.DOES_NOT_MATTER,149 Spell.NOOP_5S: CastingStrategy.DOES_NOT_MATTER,150 Spell.NOOP_6S: CastingStrategy.DOES_NOT_MATTER,151 Spell.NOOP_7S: CastingStrategy.DOES_NOT_MATTER,152 Spell.NOOP_8S: CastingStrategy.DOES_NOT_MATTER,153}154@enum.unique155class Unit(enum.IntEnum):156 SWORDSMAN = 6157 ARCHER = 79158 REAPER = 36159 SKELETON = 11160 TREANT = 83161 FIREIMP = 12001162 BRUTE = 38163 CHARGER = 12012164 WATERELEMENTAL = 100165 EXECUTIONER = 105166 SILVERRANGER = 12002167 ALCHEMIST = 107168 COMMANDER = 44169 GOBLINMARKSMAN = 39170 JUGGERNAUT = 94171 PRIMALSPIRIT = 102172 RAVENOUSSCOURGE = 12017173 ROLLINGROCKS = 12010174 SHADOWHUNTRESS = 117175 SHIELDBEARER = 42176 STONEELEMENTAL = 92177 VALKYRIE = 99178 VIPER = 12008179 WISPMOTHER = 56180 # STONEELEMENTAL spawns this on deathrattle181 STONEELEMENTALSPAWN = 136182Spell._units_spawned = {183 Spell.SPAWNUNIT_SWORDSMAN: Unit.SWORDSMAN,184 Spell.SPAWNUNIT_ARCHERS: Unit.ARCHER,185 Spell.SPAWNUNIT_SHIELDBEARER: Unit.SHIELDBEARER,186 Spell.SPAWNUNIT_UNDEADHORDE: Unit.SKELETON,187 Spell.SPAWNUNIT_WATERELEMENTAL: Unit.WATERELEMENTAL,188 Spell.SPAWNUNIT_RAVENOUSSCOURGE: Unit.RAVENOUSSCOURGE,189 Spell.SPAWNUNIT_ROLLINGROCKS: Unit.ROLLINGROCKS,190 Spell.SPAWNUNIT_UNDEADARMY: Unit.SKELETON,191 Spell.SPAWNUNIT_FIREIMP: Unit.FIREIMP,192 Spell.SPAWNUNIT_REAPER: Unit.REAPER,193 Spell.SPAWNUNIT_BRUTE: Unit.BRUTE,194 Spell.SPAWNUNIT_EXECUTIONER: Unit.EXECUTIONER,195 Spell.SPAWNUNIT_SILVERRANGER: Unit.SILVERRANGER,196 Spell.SPAWNUNIT_ALCHEMIST: Unit.ALCHEMIST,197 Spell.SPAWNUNIT_COMMANDER: Unit.COMMANDER,198 Spell.SPAWNUNIT_GOBLINMARKSMAN: Unit.GOBLINMARKSMAN,199 Spell.SPAWNUNIT_VALKYRIE: Unit.VALKYRIE,200 Spell.SPAWNUNIT_VIPER: Unit.VIPER,201 Spell.SPAWNUNIT_CHARGER: Unit.CHARGER,202 Spell.SPAWNUNIT_TREANT: Unit.TREANT,203 Spell.SPAWNUNIT_JUGGERNAUT: Unit.JUGGERNAUT,204 Spell.SPAWNUNIT_PRIMALSPIRIT: Unit.PRIMALSPIRIT,205 Spell.SPAWNUNIT_SHADOWHUNTRESS: Unit.SHADOWHUNTRESS,206 Spell.SPAWNUNIT_STONEELEMENTAL: [Unit.STONEELEMENTAL, Unit.STONEELEMENTALSPAWN],207 Spell.SPAWNUNIT_WISPMOTHER: Unit.WISPMOTHER,208}209@enum.unique210class Brain(enum.IntEnum):211 """Brains represent kind and difficulty of AI opponents."""212 UNDEFINED_DIFFICULTY = 0213 UTILITY_1 = 1214 UTILITY_2 = 2215 UTILITY_3 = 3216 UTILITY_4 = 4217 UTILITY_5 = 5218 UTILITY_6 = 6219 UTILITY_7 = 7220 UTILITY_8 = 8221 UTILITY_9 = 9222 LOOKAHEAD_1 = 14223 LOOKAHEAD_2 = 15224 LOOKAHEAD_3 = 16225 LOOKAHEAD_4 = 17226 LOOKAHEAD_5 = 18227 LOOKAHEAD_6 = 19228 LOOKAHEAD_7 = 20229 LOOKAHEAD_8 = 21230 LOOKAHEAD_9 = 22231 RANDOM_1 = 27232 RANDOM_2 = 28233 RANDOM_3 = 29234 RANDOM_4 = 30235 RANDOM_5 = 31236 RANDOM_6 = 32237 RANDOM_7 = 33238 RANDOM_8 = 34239 RANDOM_9 = 35240 DUMMY = 40241 @classmethod242 def utility_brains(cls):243 return list(map(cls, range(cls.UTILITY_1, cls.UTILITY_9 + 1)))244 @classmethod245 def random_brains(cls):246 return list(map(cls, range(cls.RANDOM_1, cls.RANDOM_9 + 1)))247 @classmethod248 def lookahead_brains(cls):...

Full Screen

Full Screen

search.py

Source:search.py Github

copy

Full Screen

1import random, os2def pick(data):3 return random.choice(data)4search_grid = {5 "LRPPO" : [3e-5, 1e-4],#1e-3, 3e-4, 1e-5],6 "LRC" : [3e-5, 1e-4],#1e-3, 3e-4, 1e-5],7 "LRA" : [1e-4],#1e-3, 3e-4, 1e-5],8 "UPDATE_COUNT" : [10, 20],9 "DISCOUNT" : [.982, .97],10 "NORMALIZE" : [True],11 "STEPS_PER_EPOCH" : [1000, 500],#2000, 1000],12 "MINI_BATCH_SIZE" : [64, 32],13 "PPO_EPOCHS_PER_UPDATE" : [3, 5],14 "PPO_DELAY_LEARN" : [1000],15 "RPOLYAK":[False, True],16 "KL_MIN":[.1, .001],17 "HINDSIGHT_ACTION":[0., .3],18 "TD3_GAE":[False],19 "CRITIC_EMPHATIZE_RECENT":[False, True, False],20 "PPO_GAE_N":[10, 3],21 "TD3_GAE_N":[1],22 "PPO_HER":[False],#, False, True],23 "D2RL":[True],24 "PPO_NORM_IN":[True, False],25 "PPO_TRAIN_ACTOR":[False],26 "PPO_TRAIN_CRITIC":[False, True, False],27 "GOAL1_SIZE":[4, 40],28 "HRL":[True],29 "ACHIEVED_PUSH_N":[2],30 "ADVANTAGE":[False],31 "ADV_NORM":[False],32 "REW_DELTA":[1., 1., 0.],33 "REW_SCALE":[.1, .1, 1.],34 "KL_DELTA":[0., 0., 1.],35 "KL_SCALE":[1.],36 "LOOKAHEAD_1":[.2],37 "LOOKAHEAD_K":[.2],38 "SEPERATE_CRITICS":[False],39 "PPO_HER_RATIO":[.0],#.33],40 "BLIND":[True, False, True],41 "TD3_PPO_GRADS":[True],#, False, True],#42 "TD3_PPO_CLIP":[True],#, False, True],#43 "ELU":[True, False],44}45lparams = [46 "LRPPO",47 "LRC",48 "LRA",49 "UPDATE_COUNT",50 "DISCOUNT",51 "NORMALIZE",52 "STEPS_PER_EPOCH",53 "MINI_BATCH_SIZE",54 "PPO_EPOCHS_PER_UPDATE",55 "PPO_DELAY_LEARN",56 "RPOLYAK",57 "KL_MIN",58 "HINDSIGHT_ACTION",59 "TD3_GAE",60 "CRITIC_EMPHATIZE_RECENT",61 "PPO_GAE_N",62 "TD3_GAE_N",63 "PPO_HER",64 "D2RL",65 "PPO_NORM_IN",66 "PPO_TRAIN_ACTOR",67 "PPO_TRAIN_CRITIC",68 "GOAL1_SIZE",69 "HRL",70 "ACHIEVED_PUSH_N",71 "ADVANTAGE",72 "ADV_NORM",73 "REW_DELTA",74 "REW_SCALE",75 "KL_DELTA",76 "KL_SCALE",77 "LOOKAHEAD_1",78 "LOOKAHEAD_K",79 "SEPERATE_CRITICS",80 "PPO_HER_RATIO",81 "BLIND",82 "TD3_PPO_GRADS",83 "TD3_PPO_CLIP",84 "ELU",85]86config = """ENV = "mujoco-pusher"87LRPPO = {}88LRC = {}89LRA = {}90UPDATE_COUNT = {}91SEED = 092MAX_TIMESTEPS = 1e693# TD394EXPL_NOISE = 0.195BATCH_SIZE = 25696DISCOUNT = {}97TAU = 0.00598POLICY_NOISE = 0.299NOISE_CLIP = 0.5100POLICY_FREQ = 2101# TD3 + BC102ALPHA = 2.5103NORMALIZE = {}104# OPEN AI TD3 BASELINE TRAINING105EPOCHS = 60106REPLAY_SIZE = 1e7107START_STEPS = 10000108UPDATE_AFTER = 1000109UPDATE_EVERY = 50*2110#UPDATE_COUNT = 20#40111EVAL_FREQ = 50 * UPDATE_EVERY112# HER113HER_PER_EP = 10114HER_RATIO = .75115# PPO116STEPS_PER_EPOCH = {}117MINI_BATCH_SIZE = {}118PPO_EPOCHS_PER_UPDATE = {}119PPO_DELAY_LEARN = {}120RPOLYAK={}121KL_MIN = {}122HINDSIGHT_ACTION={}123TD3_GAE = {}124CRITIC_EMPHATIZE_RECENT = {}125PPO_GAE_N = {}126TD3_GAE_N = {}127PPO_HER = {}128D2RL = {}129PPO_NORM_IN = {}130PPO_TRAIN_ACTOR = {}131PPO_TRAIN_CRITIC = {}132#DLPPOH133TIMEFEAT = False#True#134LEAK2LL = True#False#135# AUXILARY136CLIP_Q = False#True137PIL2_GV = True138PANDA = "panda" in ENV139ERGOJR = "ergojr" in ENV140MUJOCO = not PANDA and not ERGOJR141assert MUJOCO + PANDA + ERGOJR == 1142BACKLASH = False143PUSHER = "usher" in ENV144GOAL_SIZE = 3145if ERGOJR: # no gripper, velo per joint ( #of joints == action_size )146 ACTION_SIZE = 3 + (not PUSHER) * 1#3147 LL_STATE_SIZE = GOAL_SIZE * 2 + ACTION_SIZE * 2 + TIMEFEAT148 STATE_SIZE = GOAL_SIZE + LL_STATE_SIZE + 3*GOAL_SIZE*PUSHER149else: # arm pos, arm prev pos, arm velo, gripper pos + velo + velp150 ACTION_SIZE = 3 + MUJOCO151 LL_STATE_SIZE = GOAL_SIZE * 3 + 4 * MUJOCO + TIMEFEAT152 STATE_SIZE = 2*GOAL_SIZE + LL_STATE_SIZE + 6*GOAL_SIZE*PUSHER# velp + gripper, object velp for pusher153GOAL0_SIZE = GOAL_SIZE154GOAL1_SIZE = {}155HRL = {}156ACHIEVED_PUSH_N = {}157ADVANTAGE = {}158ADV_NORM = {}159REW_DELTA = {}160REW_SCALE = {}161KL_DELTA = {}162KL_SCALE = {}163LOOKAHEAD_1 = {}164LOOKAHEAD_K = {}165SEPERATE_CRITICS = {}166PPO_HER_RATIO = {}167BLIND = {}168TD3_PPO_GRADS = {}169TD3_PPO_CLIP = {}170ELU = {}171"""172gae = None173values = []174for param in lparams:175 value = pick(search_grid[param])176 if "PPO_GAE_N" in param:177 gae = value178 if "GAE_N" in param:179 value = gae180 print(param, value)181 values.append(value)182cfg = config.format(*values)183#print(cfg)184with open("config.py", "w") as f:185 f.write(cfg)186import uuid, subprocess187fname = "test_"+str(uuid.uuid4())188print("test start : ", fname)189import time190start = time.time()191with open(fname + ".log", 'w') as out:192 return_code = subprocess.call(["python3", "duh5_search.py"], stdout=out)...

Full Screen

Full Screen

greedy_lookahead.py

Source:greedy_lookahead.py Github

copy

Full Screen

1"""2greedy_lookahead.py3Minor Programming: Programming Theory4By: Pauline van Lieshout, Jari Hoffman and Stans Paulussen5This file contains the Greedy with lookahead algorithm, which searches for the best6three connections from which the first connection is eventually added to the track.7"""8from code.classes.track import Track9from code.algorithms.greedy import Greedy10class Greedy_Lookahead(Greedy):11 """12 For each track, choosing the first connection of the best three connections found and adding it to the track. 13 """14 def pick_first_connection(self):15 """16 Picks the first connection based on the best three connections possible.17 """18 self.best_connection = []19 stations = list(self.grid.stations.values())20 # add a first station to the track 21 for station in stations:22 self.track = Track(f"greedy_track_{self.count}", self.grid)23 self.track.add_station(self.grid, station.name)24 lookahead_1 = station.connections25 # calculate quality of all connections and save the best connection26 for la1 in lookahead_1: 27 next_station = stations[int(la1)].name28 self.track.add_station(self.grid, next_station)29 lookahead_2 = stations[int(la1)].get_connections()30 31 for la2 in lookahead_2:32 # if adding the connection exceeds the track's max time length 33 if self.track.add_station(self.grid, la2[0].name) is False:34 break35 36 quality = self.grid.get_quality()37 self.track.remove_last_station()38 # checks if the quality of the track is the best one yet and remembers it39 if quality > self.best_score:40 self.best_score = quality 41 self.best_connection = [station.name, stations[int(la1)].name, la2[0].name]42 self.track.remove_last_station()43 44 # if adding another track does not lead to a better quality, stop algorithm45 if self.best_connection == []:46 return False47 48 # add best connection to the track49 self.track = Track(f"greedy_track_{self.count}", self.grid)50 self.track.add_station(self.grid, self.best_connection[0])51 self.count += 152 return station 53 def pick_next_station(self, station):54 """55 Picks the next station based on the three connections that produce the best score. 56 """57 self.best_score = 058 stations = self.grid.stations59 # all connections of the last added added station 60 lookahead_1 = self.grid.get_station(self.best_connection[1]).connections61 for la1 in lookahead_1.values():62 next_station = la1[0].name63 # if adding the connection exceeds the tracks max time length 64 if self.track.add_station(self.grid, next_station) is False:65 break66 lookahead_2 = self.grid.get_station(la1[0].name).connections67 # keeps adding stations untill the time limit is reached68 for la2 in lookahead_2:69 la2 = stations.get(la2)70 if self.track.add_station(self.grid, la2.name) is False:71 break72 73 quality = self.grid.get_quality()74 75 self.track.remove_last_station()76 # if quality improves, add first station to the track77 if quality > self.best_score:78 self.best_score = quality 79 self.best_connection = [la2.name, la1[0].name]80 ...

Full Screen

Full Screen

main.js

Source:main.js Github

copy

Full Screen

1const agl_processor=window['net.akehurst.language-agl-processor'];2const Agl = agl_processor.net.akehurst.language.agl.processor.Agl;3const AutomatonKind = agl_processor.net.akehurst.language.api.processor.AutomatonKind_api;4function parse() {5 try {6 7 const grammarStr = document.getElementById('grammar').value;8 const sentenceStr = document.getElementById('sentence').value;9 const p = Agl.processorFromString(grammarStr);10 const tree = p.parse(sentenceStr, AutomatonKind.LOOKAHEAD_1);11 document.getElementById('result').value = tree.toStringAllWithIndent(" ");12 13 } catch (ex) {14 document.getElementById('result').value = 'ERROR: '+ex15 }...

Full Screen

Full Screen

test_do_stats.py

Source:test_do_stats.py Github

copy

Full Screen

1from envs.nick_2048 import Nick20482from do_stats import STRATEGIES, IMPLEMENTATIONS3from strategies.random import try_random4def test_strategies():5 TO_TEST = [6 "only_go_right",7 "random",8 "down_left",9 "fixed_action_order",10 "greedy",11 "greedy_fixed_order",12 "down_left_greedy",13 "max_space_then_greedy",14 "lookahead_1",15 "lookahead_2",16 ]17 for strat_name in TO_TEST:18 STRATEGIES[strat_name](Nick2048, 2)19def test_implementations():20 for impl_name, implementation in IMPLEMENTATIONS.items():...

Full Screen

Full Screen

Using AI Code Generation

copy

Full Screen

1var gherkin = require('gherkin');2var parser = new gherkin.Parser(new gherkin.AstBuilder());3var lexer = new gherkin.Lexer(gherkin.GherkinDialectProvider());4var tokenMatcher = new gherkin.TokenMatcher(gherkin.GherkinDialectProvider(), 'en');5var tokenScanner = new gherkin.TokenScanner('test.feature');6var feature = parser.parse(lexer.scan(tokenScanner));7var rule = feature.children[0];8var token = tokenScanner.peek();9var tokenType = token.type;10var tokenMatched = tokenMatcher.match_BackgroundLine(tokenType);11var tokenMatched = tokenMatcher.match_ScenarioLine(tokenType);12var tokenMatched = tokenMatcher.match_ScenarioOutlineLine(tokenType);13var tokenMatched = tokenMatcher.match_ExamplesLine(tokenType);14var tokenMatched = tokenMatcher.match_StepLine(tokenType);15var tokenMatched = tokenMatcher.match_TagLine(tokenType);16var tokenMatched = tokenMatcher.match_Comment(tokenType);17var tokenMatched = tokenMatcher.match_Empty(tokenType);18var tokenMatched = tokenMatcher.match_DocStringSeparator(tokenType);19var tokenMatched = tokenMatcher.match_TableRow(tokenType);20var tokenMatched = tokenMatcher.match_Language(tokenType);21var tokenMatched = tokenMatcher.match_EOF(tokenType);22var tokenMatched = tokenMatcher.match_Other(tokenType);23var gherkin = require('gherkin');24var parser = new gherkin.Parser(new gherkin.AstBuilder());25var lexer = new gherkin.Lexer(gherkin.GherkinDialectProvider());26var tokenMatcher = new gherkin.TokenMatcher(gherkin.GherkinDialectProvider(), 'en');27var tokenScanner = new gherkin.TokenScanner('test.feature');28var feature = parser.parse(lexer.scan(tokenScanner));29var rule = feature.children[0];30var token = tokenScanner.peek();31var tokenType = token.type;32var tokenMatched = tokenMatcher.match_BackgroundLine(tokenType);33var tokenMatched = tokenMatcher.match_ScenarioLine(tokenType);34var tokenMatched = tokenMatcher.match_ScenarioOutlineLine(tokenType);35var tokenMatched = tokenMatcher.match_ExamplesLine(tokenType);

Full Screen

Using AI Code Generation

copy

Full Screen

1var Gherkin = require('gherkin');2var gherkin = new Gherkin.Parser();3var fs = require('fs');4var file = fs.readFileSync('test.feature', 'utf8');5var feature = gherkin.parse(file);6var scenarios = feature.feature.children;7console.log(scenarios);8var steps = scenarios[0].steps;9console.log(steps);10var step = steps[0];11var step_text = step.text;12console.log(step_text);13var keyword = step.keyword;14console.log(keyword);15var match = Gherkin.TokenMatcher.match_1(step, Gherkin.TokenType.StepLine);16console.log(match);17[ { type: 'Scenario',18 location: { line: 3, column: 3 },19 [ { type: 'Step',20 location: { line: 4, column: 5 },21 docString: null } ] } ]22[ { type: 'Step',23 location: { line: 4, column: 5 },24 docString: null } ]

Full Screen

Using AI Code Generation

copy

Full Screen

1var gherkin = require('gherkin');2var parser = new gherkin.Parser();3var lexer = new gherkin.Lexer();4 Given I have 1 cukes in my belly';5var tokens = lexer.lex(gherkinSource);6var ast = parser.parse(tokens);7console.log(ast.feature.children[0].steps[0].text);

Full Screen

Using AI Code Generation

copy

Full Screen

1var parser = new Parser();2var ast = parser.parse("Feature: test");3var feature = ast.feature;4var featureKeyword = feature.keyword;5var featureName = feature.name;6var featureDescription = feature.description;7var featureChildren = feature.children;8var scenario = featureChildren[0];9var scenarioKeyword = scenario.keyword;10var scenarioName = scenario.name;11var scenarioDescription = scenario.description;12var scenarioSteps = scenario.steps;13var step = scenarioSteps[0];14var stepKeyword = step.keyword;15var stepText = step.text;16var stepArgument = step.argument;17featureKeyword = "Feature";18featureName = "test";19featureDescription = null;20featureChildren = [Object];21scenarioKeyword = "Scenario";22scenarioName = null;23scenarioDescription = null;24scenarioSteps = [Object];25stepKeyword = null;26stepText = null;27stepArgument = null;

Full Screen

Using AI Code Generation

copy

Full Screen

1var Gherkin = require('gherkin');2var parser = new Gherkin.Parser();3var lexer = new Gherkin.Lexer();4var line = "Scenario: test";5var token = lexer.scan(line).next();6var isComment = parser.lookahead_1(token);7console.log("isComment: " + isComment);8var Gherkin = require('gherkin');9var parser = new Gherkin.Parser();10var lexer = new Gherkin.Lexer();11var line = "#Scenario: test";12var token = lexer.scan(line).next();13var isComment = parser.lookahead_1(token);14console.log("isComment: " + isComment);15var Gherkin = require('gherkin');16var parser = new Gherkin.Parser();17var lexer = new Gherkin.Lexer();18var line = "";19var token = lexer.scan(line).next();20var isComment = parser.lookahead_1(token);21console.log("isComment: " + isComment);22var Gherkin = require('gherkin');23var parser = new Gherkin.Parser();24var lexer = new Gherkin.Lexer();25var line = " ";26var token = lexer.scan(line).next();27var isComment = parser.lookahead_1(token);28console.log("isComment: " + isComment);29var Gherkin = require('gherkin');30var parser = new Gherkin.Parser();

Full Screen

Cucumber Tutorial:

LambdaTest offers a detailed Cucumber testing tutorial, explaining its features, importance, best practices, and more to help you get started with running your automation testing scripts.

Cucumber Tutorial Chapters:

Here are the detailed Cucumber testing chapters to help you get started:

  • Importance of Cucumber - Learn why Cucumber is important in Selenium automation testing during the development phase to identify bugs and errors.
  • Setting Up Cucumber in Eclipse and IntelliJ - Learn how to set up Cucumber in Eclipse and IntelliJ.
  • Running First Cucumber.js Test Script - After successfully setting up your Cucumber in Eclipse or IntelliJ, this chapter will help you get started with Selenium Cucumber testing in no time.
  • Annotations in Cucumber - To handle multiple feature files and the multiple scenarios in each file, you need to use functionality to execute these scenarios. This chapter will help you learn about a handful of Cucumber annotations ranging from tags, Cucumber hooks, and more to ease the maintenance of the framework.
  • Automation Testing With Cucumber And Nightwatch JS - Learn how to build a robust BDD framework setup for performing Selenium automation testing by integrating Cucumber into the Nightwatch.js framework.
  • Automation Testing With Selenium, Cucumber & TestNG - Learn how to perform Selenium automation testing by integrating Cucumber with the TestNG framework.
  • Integrate Cucumber With Jenkins - By using Cucumber with Jenkins integration, you can schedule test case executions remotely and take advantage of the benefits of Jenkins. Learn how to integrate Cucumber with Jenkins with this detailed chapter.
  • Cucumber Best Practices For Selenium Automation - Take a deep dive into the advanced use cases, such as creating a feature file, separating feature files, and more for Cucumber testing.

Run Cucumber-gherkin automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful