[Pkg-javascript-commits] [node-ebnf-parser] 01/08: Import Upstream version 0.1.10

Praveen Arimbrathodiyil praveen at moszumanska.debian.org
Sun Oct 9 14:34:36 UTC 2016


This is an automated email from the git hooks/post-receive script.

praveen pushed a commit to branch master
in repository node-ebnf-parser.

commit fad9ff43a0efa3528f48009bcc4f83a5a1513066
Author: Praveen Arimbrathodiyil <praveen at debian.org>
Date:   Sun Oct 9 19:16:46 2016 +0530

    Import Upstream version 0.1.10
---
 .gitignore          |   7 +
 .npmignore          |   0
 Makefile            |  24 ++
 README.md           | 218 ++++++++++++++++++
 bnf.l               |  58 +++++
 bnf.y               | 216 ++++++++++++++++++
 ebnf-parser.js      |  41 ++++
 ebnf-transform.js   | 135 +++++++++++
 ebnf.y              |  66 ++++++
 package.json        |  27 +++
 tests/all-tests.js  |   9 +
 tests/bnf.js        |  91 ++++++++
 tests/bnf_parse.js  | 221 ++++++++++++++++++
 tests/ebnf.js       | 105 +++++++++
 tests/ebnf_parse.js |  38 ++++
 transform-parser.js | 629 ++++++++++++++++++++++++++++++++++++++++++++++++++++
 16 files changed, 1885 insertions(+)

diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..6482f85
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,7 @@
+parser.js
+node_modules/
+
+# Editor bak files
+*~
+*.bak
+*.orig
diff --git a/.npmignore b/.npmignore
new file mode 100644
index 0000000..e69de29
diff --git a/Makefile b/Makefile
new file mode 100644
index 0000000..b732fab
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,24 @@
+
+all: install build test
+
+install:
+	npm install
+
+build:
+	node ./node_modules/.bin/jison bnf.y bnf.l
+	mv bnf.js parser.js
+
+	node ./node_modules/.bin/jison ebnf.y
+	mv ebnf.js transform-parser.js
+
+test:
+	node tests/all-tests.js
+
+
+
+
+clean:
+
+superclean: clean
+	-find . -type d -name 'node_modules' -exec rm -rf "{}" \;
+
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..ea2f316
--- /dev/null
+++ b/README.md
@@ -0,0 +1,218 @@
+# ebnf-parser
+
+A parser for BNF and EBNF grammars used by jison.
+
+## install
+
+    npm install ebnf-parser
+
+
+## build
+
+To build the parser yourself, clone the git repo then run:
+
+    make
+
+This will generate `parser.js`, which is required by `ebnf-parser.js`.
+
+## usage
+
+The parser translates a string grammar or JSON grammar into a JSON grammar that jison can use (ENBF is transformed into BNF).
+
+    var ebnfParser = require('ebnf-parser');
+
+    // parse a bnf or ebnf string grammar
+    ebnfParser.parse("%start ... %");
+
+    // transform an ebnf JSON gramamr
+    ebnfParser.transform({"ebnf": ...});
+
+
+## example grammar
+
+The parser can parse its own BNF grammar, shown below:
+
+    %start spec
+
+    /* grammar for parsing jison grammar files */
+
+    %{
+    var transform = require('./ebnf-transform').transform;
+    var ebnf = false;
+    %}
+
+    %%
+
+    spec
+        : declaration_list '%%' grammar optional_end_block EOF
+            {$$ = $1; return extend($$, $3);}
+        | declaration_list '%%' grammar '%%' CODE EOF
+            {$$ = $1; yy.addDeclaration($$,{include:$5}); return extend($$, $3);}
+        ;
+
+    optional_end_block
+        :
+        | '%%'
+        ;
+
+    declaration_list
+        : declaration_list declaration
+            {$$ = $1; yy.addDeclaration($$, $2);}
+        |
+            {$$ = {};}
+        ;
+
+    declaration
+        : START id
+            {$$ = {start: $2};}
+        | LEX_BLOCK
+            {$$ = {lex: $1};}
+        | operator
+            {$$ = {operator: $1};}
+        | ACTION
+            {$$ = {include: $1};}
+        ;
+
+    operator
+        : associativity token_list
+            {$$ = [$1]; $$.push.apply($$, $2);}
+        ;
+
+    associativity
+        : LEFT
+            {$$ = 'left';}
+        | RIGHT
+            {$$ = 'right';}
+        | NONASSOC
+            {$$ = 'nonassoc';}
+        ;
+
+    token_list
+        : token_list symbol
+            {$$ = $1; $$.push($2);}
+        | symbol
+            {$$ = [$1];}
+        ;
+
+    grammar
+        : production_list
+            {$$ = $1;}
+        ;
+
+    production_list
+        : production_list production
+            {$$ = $1;
+              if($2[0] in $$) $$[$2[0]] = $$[$2[0]].concat($2[1]);
+              else  $$[$2[0]] = $2[1];}
+        | production
+            {$$ = {}; $$[$1[0]] = $1[1];}
+        ;
+
+    production
+        : id ':' handle_list ';'
+            {$$ = [$1, $3];}
+        ;
+
+    handle_list
+        : handle_list '|' handle_action
+            {$$ = $1; $$.push($3);}
+        | handle_action
+            {$$ = [$1];}
+        ;
+
+    handle_action
+        : handle prec action
+            {$$ = [($1.length ? $1.join(' ') : '')];
+                if($3) $$.push($3);
+                if($2) $$.push($2);
+                if ($$.length === 1) $$ = $$[0];
+            }
+        ;
+
+    handle
+        : handle expression_suffix
+            {$$ = $1; $$.push($2)}
+        |
+            {$$ = [];}
+        ;
+
+    handle_sublist
+        : handle_sublist '|' handle
+            {$$ = $1; $$.push($3.join(' '));}
+        | handle
+            {$$ = [$1.join(' ')];}
+        ;
+
+    expression_suffix
+        : expression suffix
+            {$$ = $expression + $suffix; }
+        ;
+
+    expression
+        : ID
+            {$$ = $1; }
+        | STRING
+            {$$ = ebnf ? "'"+$1+"'" : $1; }
+        | '(' handle_sublist ')'
+            {$$ = '(' + $handle_sublist.join(' | ') + ')'; }
+        ;
+
+    suffix
+        : {$$ = ''}
+        | '*'
+        | '?'
+        | '+'
+        ;
+
+    prec
+        : PREC symbol
+            {$$ = {prec: $2};}
+        |
+            {$$ = null;}
+        ;
+
+    symbol
+        : id
+            {$$ = $1;}
+        | STRING
+            {$$ = yytext;}
+        ;
+
+    id
+        : ID
+            {$$ = yytext;}
+        ;
+
+    action
+        : '{' action_body '}'
+            {$$ = $2;}
+        | ACTION
+            {$$ = $1;}
+        | ARROW_ACTION
+            {$$ = '$$ ='+$1+';';}
+        |
+            {$$ = '';}
+        ;
+
+    action_body
+        :
+            {$$ = '';}
+        | ACTION_BODY
+            {$$ = yytext;}
+        | action_body '{' action_body '}' ACTION_BODY
+            {$$ = $1+$2+$3+$4+$5;}
+        | action_body '{' action_body '}'
+            {$$ = $1+$2+$3+$4;}
+        ;
+
+    %%
+
+    // transform ebnf to bnf if necessary
+    function extend (json, grammar) {
+        json.bnf = ebnf ? transform(grammar) : grammar;
+        return json;
+    }
+
+## license
+
+MIT
diff --git a/bnf.l b/bnf.l
new file mode 100644
index 0000000..2433772
--- /dev/null
+++ b/bnf.l
@@ -0,0 +1,58 @@
+id                [a-zA-Z][a-zA-Z0-9_-]*
+
+
+%x action code
+%s bnf ebnf
+
+%%
+
+<bnf,ebnf>"%%"          this.pushState('code');return '%%';
+
+<ebnf>"("               return '(';
+<ebnf>")"               return ')';
+<ebnf>"*"               return '*';
+<ebnf>"?"               return '?';
+<ebnf>"+"               return '+';
+
+\s+                     /* skip whitespace */
+"//".*                  /* skip comment */
+"/*"(.|\n|\r)*?"*/"     /* skip comment */
+"["{id}"]"              yytext = yytext.substr(1, yyleng-2); return 'ALIAS';
+{id}                    return 'ID';
+'"'[^"]+'"'             yytext = yytext.substr(1, yyleng-2); return 'STRING';
+"'"[^']+"'"             yytext = yytext.substr(1, yyleng-2); return 'STRING';
+":"                     return ':';
+";"                     return ';';
+"|"                     return '|';
+"%%"                    this.pushState(ebnf ? 'ebnf' : 'bnf'); return '%%';
+"%ebnf"                 if (!yy.options) yy.options = {}; ebnf = yy.options.ebnf = true;
+"%prec"                 return 'PREC';
+"%start"                return 'START';
+"%left"                 return 'LEFT';
+"%right"                return 'RIGHT';
+"%nonassoc"             return 'NONASSOC';
+"%parse-param"          return 'PARSE_PARAM';
+"%options"              return 'OPTIONS';
+"%lex"[\w\W]*?"/lex"    return 'LEX_BLOCK';
+"%"[a-zA-Z]+[^\r\n]*    /* ignore unrecognized decl */
+"<"[a-zA-Z]*">"         /* ignore type */
+"{{"[\w\W]*?"}}"        yytext = yytext.substr(2, yyleng-4); return 'ACTION';
+"%{"(.|\r|\n)*?"%}"     yytext = yytext.substr(2, yytext.length-4); return 'ACTION';
+"{"                     yy.depth = 0; this.pushState('action'); return '{';
+"->".*                  yytext = yytext.substr(2, yyleng-2); return 'ARROW_ACTION';
+.                       /* ignore bad characters */
+<*><<EOF>>              return 'EOF';
+
+<action>"/*"(.|\n|\r)*?"*/"           return 'ACTION_BODY';
+<action>"//".*                        return 'ACTION_BODY';
+<action>"/"[^ /]*?['"{}'][^ ]*?"/"    return 'ACTION_BODY'; // regexp with braces or quotes (and no spaces)
+<action>\"("\\\\"|'\"'|[^"])*\"       return 'ACTION_BODY';
+<action>"'"("\\\\"|"\'"|[^'])*"'"     return 'ACTION_BODY';
+<action>[/"'][^{}/"']+                return 'ACTION_BODY';
+<action>[^{}/"']+                     return 'ACTION_BODY';
+<action>"{"                           yy.depth++; return '{';
+<action>"}"                           if (yy.depth==0) this.begin(ebnf ? 'ebnf' : 'bnf'); else yy.depth--; return '}';
+
+<code>(.|\n|\r)+         return 'CODE';
+
+%%
diff --git a/bnf.y b/bnf.y
new file mode 100644
index 0000000..c5f45ee
--- /dev/null
+++ b/bnf.y
@@ -0,0 +1,216 @@
+%start spec
+
+/* grammar for parsing jison grammar files */
+
+%{
+var transform = require('./ebnf-transform').transform;
+var ebnf = false;
+%}
+
+%%
+
+spec
+    : declaration_list '%%' grammar optional_end_block EOF
+        {
+          $$ = $1;
+          return extend($$, $3);
+        }
+    | declaration_list '%%' grammar '%%' CODE EOF
+        {
+          $$ = $1;
+          yy.addDeclaration($$, { include: $5 });
+          return extend($$, $3);
+        }
+    ;
+
+optional_end_block
+    :
+    | '%%'
+    ;
+
+declaration_list
+    : declaration_list declaration
+        {$$ = $1; yy.addDeclaration($$, $2);}
+    |
+        {$$ = {};}
+    ;
+
+declaration
+    : START id
+        {$$ = {start: $2};}
+    | LEX_BLOCK
+        {$$ = {lex: $1};}
+    | operator
+        {$$ = {operator: $1};}
+    | ACTION
+        {$$ = {include: $1};}
+    | parse_param
+        {$$ = {parseParam: $1};}
+    | options
+        {$$ = {options: $1};}
+    ;
+
+options
+    : OPTIONS token_list
+        {$$ = $2;}
+    ;
+
+parse_param
+    : PARSE_PARAM token_list
+        {$$ = $2;}
+    ;
+
+operator
+    : associativity token_list
+        {$$ = [$1]; $$.push.apply($$, $2);}
+    ;
+
+associativity
+    : LEFT
+        {$$ = 'left';}
+    | RIGHT
+        {$$ = 'right';}
+    | NONASSOC
+        {$$ = 'nonassoc';}
+    ;
+
+token_list
+    : token_list symbol
+        {$$ = $1; $$.push($2);}
+    | symbol
+        {$$ = [$1];}
+    ;
+
+grammar
+    : production_list
+        {$$ = $1;}
+    ;
+
+production_list
+    : production_list production
+        {
+            $$ = $1;
+            if ($2[0] in $$) 
+                $$[$2[0]] = $$[$2[0]].concat($2[1]);
+            else
+                $$[$2[0]] = $2[1];
+        }
+    | production
+        {$$ = {}; $$[$1[0]] = $1[1];}
+    ;
+
+production
+    : id ':' handle_list ';'
+        {$$ = [$1, $3];}
+    ;
+
+handle_list
+    : handle_list '|' handle_action
+        {$$ = $1; $$.push($3);}
+    | handle_action
+        {$$ = [$1];}
+    ;
+
+handle_action
+    : handle prec action
+        {
+            $$ = [($1.length ? $1.join(' ') : '')];
+            if($3) $$.push($3);
+            if($2) $$.push($2);
+            if ($$.length === 1) $$ = $$[0];
+        }
+    ;
+
+handle
+    : handle expression_suffix
+        {$$ = $1; $$.push($2)}
+    |
+        {$$ = [];}
+    ;
+
+handle_sublist
+    : handle_sublist '|' handle
+        {$$ = $1; $$.push($3.join(' '));}
+    | handle
+        {$$ = [$1.join(' ')];}
+    ;
+
+expression_suffix
+    : expression suffix ALIAS
+        {$$ = $expression + $suffix + "[" + $ALIAS + "]"; }
+    | expression suffix
+        {$$ = $expression + $suffix; }
+    ;
+
+expression
+    : ID
+        {$$ = $1; }
+    | STRING
+        {$$ = ebnf ? "'" + $1 + "'" : $1; }
+    | '(' handle_sublist ')'
+        {$$ = '(' + $handle_sublist.join(' | ') + ')'; }
+    ;
+
+suffix
+    : {$$ = ''}
+    | '*'
+    | '?'
+    | '+'
+    ;
+
+prec
+    : PREC symbol
+        {$$ = {prec: $2};}
+    |
+        {$$ = null;}
+    ;
+
+symbol
+    : id
+        {$$ = $1;}
+    | STRING
+        {$$ = yytext;}
+    ;
+
+id
+    : ID
+        {$$ = yytext;}
+    ;
+
+action
+    : '{' action_body '}'
+        {$$ = $2;}
+    | ACTION
+        {$$ = $1;}
+    | ARROW_ACTION
+        {$$ = '$$ =' + $1 + ';';}
+    |
+        {$$ = '';}
+    ;
+
+action_body
+    :
+        {$$ = '';}
+    | action_comments_body
+        {$$ = $1;}
+    | action_body '{' action_body '}' action_comments_body
+        {$$ = $1 + $2 + $3 + $4 + $5;}
+    | action_body '{' action_body '}'
+        {$$ = $1 + $2 + $3 + $4;}
+    ;
+
+action_comments_body
+    : ACTION_BODY
+        { $$ = yytext; }
+    | action_comments_body ACTION_BODY
+        { $$ = $1+$2; }
+    ;
+
+%%
+
+// transform ebnf to bnf if necessary
+function extend (json, grammar) {
+    json.bnf = ebnf ? transform(grammar) : grammar;
+    return json;
+}
+
diff --git a/ebnf-parser.js b/ebnf-parser.js
new file mode 100644
index 0000000..55a0b8f
--- /dev/null
+++ b/ebnf-parser.js
@@ -0,0 +1,41 @@
+var bnf = require("./parser").parser,
+    ebnf = require("./ebnf-transform"),
+    jisonlex = require("lex-parser");
+
+exports.parse = function parse (grammar) { return bnf.parse(grammar); };
+exports.transform = ebnf.transform;
+
+// adds a declaration to the grammar
+bnf.yy.addDeclaration = function (grammar, decl) {
+    if (decl.start) {
+        grammar.start = decl.start;
+
+    } else if (decl.lex) {
+        grammar.lex = parseLex(decl.lex);
+
+    } else if (decl.operator) {
+        if (!grammar.operators) grammar.operators = [];
+        grammar.operators.push(decl.operator);
+
+    } else if (decl.parseParam) {
+        if (!grammar.parseParams) grammar.parseParams = [];
+        grammar.parseParams = grammar.parseParams.concat(decl.parseParam);
+
+    } else if (decl.include) {
+        if (!grammar.moduleInclude) grammar.moduleInclude = '';
+        grammar.moduleInclude += decl.include;
+
+    } else if (decl.options) {
+        if (!grammar.options) grammar.options = {};
+        for (var i=0; i < decl.options.length; i++) {
+            grammar.options[decl.options[i]] = true;
+        }
+    }
+
+};
+
+// parse an embedded lex section
+var parseLex = function (text) {
+    return jisonlex.parse(text.replace(/(?:^%lex)|(?:\/lex$)/g, ''));
+};
+
diff --git a/ebnf-transform.js b/ebnf-transform.js
new file mode 100644
index 0000000..d1b8af7
--- /dev/null
+++ b/ebnf-transform.js
@@ -0,0 +1,135 @@
+var EBNF = (function(){
+    var parser = require('./transform-parser.js');
+
+    var transformExpression = function(e, opts, emit) {
+        var type = e[0], value = e[1], name = false;
+
+        if (type === 'xalias') {
+            type = e[1];
+            value = e[2]
+            name = e[3];
+            if (type) {
+                e = e.slice(1,2);
+            } else {
+                e = value;
+                type = e[0];
+                value = e[1];
+            }
+        }
+
+        if (type === 'symbol') {
+            var n;
+            if (e[1][0] === '\\') n = e[1][1];
+            else if (e[1][0] === '\'') n = e[1].substring(1, e[1].length-1);
+            else n = e[1];
+            emit(n + (name ? "["+name+"]" : ""));
+        } else if (type === "+") {
+            if (!name) {
+                name = opts.production + "_repetition_plus" + opts.repid++;
+            }
+            emit(name);
+
+            opts = optsForProduction(name, opts.grammar);
+            var list = transformExpressionList([value], opts);
+            opts.grammar[name] = [
+                [list, "$$ = [$1];"],
+                [
+                    name + " " + list,
+                    "$1.push($2);"
+                ]
+            ];
+        } else if (type === "*") {
+            if (!name) {
+                name = opts.production + "_repetition" + opts.repid++;
+            }
+            emit(name);
+
+            opts = optsForProduction(name, opts.grammar);
+            opts.grammar[name] = [
+                ["", "$$ = [];"],
+                [
+                    name + " " + transformExpressionList([value], opts),
+                    "$1.push($2);"
+                ]
+            ];
+        } else if (type ==="?") {
+            if (!name) {
+                name = opts.production + "_option" + opts.optid++;
+            }
+            emit(name);
+
+            opts = optsForProduction(name, opts.grammar);
+            opts.grammar[name] = [
+                "", transformExpressionList([value], opts)
+            ];
+        } else if (type === "()") {
+            if (value.length == 1) {
+                emit(transformExpressionList(value[0], opts));
+            } else {
+                if (!name) {
+                    name = opts.production + "_group" + opts.groupid++;
+                }
+                emit(name);
+
+                opts = optsForProduction(name, opts.grammar);
+                opts.grammar[name] = value.map(function(handle) {
+                    return transformExpressionList(handle, opts);
+                });
+            }
+        }
+    };
+
+    var transformExpressionList = function(list, opts) {
+        return list.reduce (function (tot, e) {
+            transformExpression (e, opts, function (i) { tot.push(i); });
+            return tot;
+        }, []).
+        join(" ");
+    };
+
+    var optsForProduction = function(id, grammar) {
+        return {
+            production: id,
+            repid: 0,
+            groupid: 0,
+            optid: 0,
+            grammar: grammar
+        };
+    };
+
+    var transformProduction = function(id, production, grammar) {
+        var transform_opts = optsForProduction(id, grammar);
+        return production.map(function (handle) {
+            var action = null, opts = null;
+            if (typeof(handle) !== 'string')
+                action = handle[1],
+                opts = handle[2],
+                handle = handle[0];
+            var expressions = parser.parse(handle);
+
+            handle = transformExpressionList(expressions, transform_opts);
+
+            var ret = [handle];
+            if (action) ret.push(action);
+            if (opts) ret.push(opts);
+            if (ret.length == 1) return ret[0];
+            else return ret;
+        });
+    };
+
+    var transformGrammar = function(grammar) {
+        Object.keys(grammar).forEach(function(id) {
+            grammar[id] = transformProduction(id, grammar[id], grammar);
+        });
+    };
+
+    return {
+        transform: function (ebnf) {
+            transformGrammar(ebnf);
+            return ebnf;
+        }
+    };
+})();
+
+exports.transform = EBNF.transform;
+
diff --git a/ebnf.y b/ebnf.y
new file mode 100644
index 0000000..e5ccfd3
--- /dev/null
+++ b/ebnf.y
@@ -0,0 +1,66 @@
+/* EBNF grammar spec */
+
+%lex
+
+id                        [a-zA-Z][a-zA-Z0-9_-]*
+
+%%
+\s+             /* skip whitespace */
+{id}           return 'symbol';
+"["{id}"]"     yytext = yytext.substr(1, yyleng-2); return 'ALIAS';
+"'"[^']*"'"    return 'symbol';
+"."            return 'symbol';
+
+bar            return 'bar';
+"("            return '(';
+")"            return ')';
+"*"            return '*';
+"?"            return '?';
+"|"            return '|';
+"+"            return '+';
+<<EOF>>        return 'EOF';
+/lex
+
+%start production
+
+%%
+
+production
+  : handle EOF
+    { return $handle; }
+  ;
+
+handle_list
+  : handle
+    { $$ = [$handle]; }
+  | handle_list '|' handle
+    { $handle_list.push($handle); }
+  ;
+
+handle
+  :
+    { $$ = []; }
+  | handle expression_suffix
+    { $handle.push($expression_suffix); }
+  ;
+
+expression_suffix
+  : expression suffix ALIAS
+    { $$ = ['xalias', $suffix, $expression, $ALIAS]; }
+  | expression suffix
+    { if ($suffix) $$ = [$suffix, $expression]; else $$ = $expression; }
+  ;
+
+expression
+  : symbol
+    { $$ = ['symbol', $symbol]; }
+  | '(' handle_list ')'
+    { $$ = ['()', $handle_list]; }
+  ;
+
+suffix
+  : 
+  | '*'
+  | '?'
+  | '+'
+  ;
diff --git a/package.json b/package.json
new file mode 100644
index 0000000..76f52d4
--- /dev/null
+++ b/package.json
@@ -0,0 +1,27 @@
+{
+  "name": "ebnf-parser",
+  "version": "0.1.10",
+  "description": "A parser for BNF and EBNF grammars used by jison",
+  "main": "ebnf-parser.js",
+  "scripts": {
+    "test": "make test"
+  },
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/zaach/ebnf-parser.git"
+  },
+  "keywords": [
+    "bnf",
+    "ebnf",
+    "grammar",
+    "parser",
+    "jison"
+  ],
+  "author": "Zach Carter",
+  "license": "MIT",
+  "devDependencies": {
+    "jison": "git://github.com/zaach/jison.git#ef2647",
+    "lex-parser": "0.1.0",
+    "test": "*"
+  }
+}
diff --git a/tests/all-tests.js b/tests/all-tests.js
new file mode 100755
index 0000000..232b542
--- /dev/null
+++ b/tests/all-tests.js
@@ -0,0 +1,9 @@
+#!/usr/bin/env narwhal
+
+//exports.testBNF = require("./bnf");
+exports.testBNFParse = require("./bnf_parse");
+exports.testEBNF = require("./ebnf");
+exports.testEBNFParse = require("./ebnf_parse");
+
+if (require.main === module)
+    require("test").run(exports);
diff --git a/tests/bnf.js b/tests/bnf.js
new file mode 100644
index 0000000..df1a1b9
--- /dev/null
+++ b/tests/bnf.js
@@ -0,0 +1,91 @@
+var Jison = require("../setup").Jison,
+    Lexer = require("../setup").Lexer,
+    assert = require("assert");
+
+exports["test BNF parser"] = function () {
+    var grammar = {
+        "lex": {
+            "rules": [
+              ["\\s+", "/* skip whitespace */"],
+              ["[a-zA-Z][a-zA-Z0-9_-]*", "return 'ID';"],
+              ["\"[^\"]+\"", "yytext = yytext.substr(1, yyleng-2); return 'STRING';"],
+              ["'[^']+'", "yytext = yytext.substr(1, yyleng-2); return 'STRING';"],
+              [":", "return ':';"],
+              [";", "return ';';"],
+              ["\\|", "return '|';"],
+              ["%%", "return '%%';"],
+              ["%prec", "return 'PREC';"],
+              ["%start", "return 'START';"],
+              ["%left", "return 'LEFT';"],
+              ["%right", "return 'RIGHT';"],
+              ["%nonassoc", "return 'NONASSOC';"],
+              ["\\{[^}]*\\}", "yytext = yytext.substr(1, yyleng-2); return 'ACTION';"],
+              [".", "/* ignore bad characters */"],
+              ["$", "return 'EOF';"]
+            ]
+        },
+        "bnf": {
+            "spec" :[[ "declaration_list %% grammar EOF", "$$ = $1; $$.bnf = $3; return $$;" ]],
+
+            "declaration_list" :[[ "declaration_list declaration", "$$ = $1; yy.addDeclaration($$, $2);" ],
+                                 [ "", "$$ = {};" ]],
+
+            "declaration" :[[ "START id", "$$ = {start: $2};" ],
+                            [ "operator", "$$ = {operator: $1};" ]],
+
+            "operator" :[[ "associativity token_list", "$$ = [$1]; $$.push.apply($$, $2);" ]],
+
+            "associativity" :[[ "LEFT", "$$ = 'left';" ],
+                              [ "RIGHT", "$$ = 'right';" ],
+                              [ "NONASSOC", "$$ = 'nonassoc';" ]],
+
+            "token_list" :[[ "token_list symbol", "$$ = $1; $$.push($2);" ],
+                           [ "symbol", "$$ = [$1];" ]],
+
+            "grammar" :[[ "production_list", "$$ = $1;" ]],
+
+            "production_list" :[[ "production_list production", "$$ = $1; $$[$2[0]] = $2[1];" ],
+                                [ "production", "$$ = {}; $$[$1[0]] = $1[1];" ]],
+
+            "production" :[[ "id : handle_list ;", "$$ = [$1, $3];" ]],
+
+            "handle_list" :[[ "handle_list | handle_action", "$$ = $1; $$.push($3);" ],
+                            [ "handle_action", "$$ = [$1];" ]],
+
+            "handle_action" :[[ "handle action prec", "$$ = [($1.length ? $1.join(' ') : '')]; if($2) $$.push($2); if($3) $$.push($3); if ($$.length === 1) $$ = $$[0];" ]],
+
+            "handle" :[[ "handle symbol", "$$ = $1; $$.push($2)" ],
+                       [ "", "$$ = [];" ]],
+
+            "prec" :[[ "PREC symbol", "$$ = {prec: $2};" ],
+                     [ "", "$$ = null;" ]],
+
+            "symbol" :[[ "id", "$$ = $1;" ],
+                       [ "STRING", "$$ = yytext;" ]],
+
+            "id" :[[ "ID", "$$ = yytext;" ]],
+
+            "action" :[[ "ACTION", "$$ = yytext;" ],
+                       [ "", "$$ = '';" ]]
+        }
+
+    };
+
+    var parser = new Jison.Parser(grammar);
+    parser.yy.addDeclaration = function (grammar, decl) {
+        if (decl.start) {
+            grammar.start = decl.start
+        }
+        if (decl.operator) {
+            if (!grammar.operators) {
+                grammar.operators = [];
+            }
+            grammar.operators.push(decl.operator);
+        }
+
+    };
+
+    var result = parser.parse('%start foo %left "+" "-" %right "*" "/" %nonassoc "=" STUFF %left UMINUS %% foo : bar baz blitz { stuff } %prec GEMINI | bar %prec UMINUS | ;\nbar: { things };\nbaz: | foo ;');
+    assert.ok(result, "parse bnf production");
+};
+
diff --git a/tests/bnf_parse.js b/tests/bnf_parse.js
new file mode 100644
index 0000000..05d21e0
--- /dev/null
+++ b/tests/bnf_parse.js
@@ -0,0 +1,221 @@
+var assert = require("assert"),
+    bnf = require("../ebnf-parser");
+
+exports["test basic grammar"] = function () {
+    var grammar = "%% test: foo bar | baz ; hello: world ;";
+    var expected = {bnf: {test: ["foo bar", "baz"], hello: ["world"]}};
+
+    assert.deepEqual(bnf.parse(grammar), expected, "grammar should be parsed correctly");
+};
+
+exports["test multiple same rule"] = function () {
+    var grammar = "%% test: foo bar | baz ; test: world ;";
+    var expected = {bnf: {test: ["foo bar", "baz", "world"]}};
+
+    assert.deepEqual(bnf.parse(grammar), expected, "grammar should be parsed correctly");
+};
+
+exports["test classy grammar"] = function () {
+    var grammar = "%%\n\npgm \n: cdl MAIN LBRACE vdl el RBRACE ENDOFFILE \n; cdl \n: c cdl \n| \n;";
+    var expected = {bnf: {pgm: ["cdl MAIN LBRACE vdl el RBRACE ENDOFFILE"], cdl: ["c cdl", ""]}};
+
+    assert.deepEqual(bnf.parse(grammar), expected, "grammar should be parsed correctly");
+};
+
+exports["test advanced grammar"] = function () {
+    var grammar = "%% test: foo bar {action} | baz ; hello: world %prec UMINUS ;extra: foo %prec '-' {action} ;";
+    var expected = {bnf: {test: [["foo bar", "action" ], "baz"], hello: [[ "world", {prec:"UMINUS"} ]], extra: [[ "foo", "action", {prec: "-"} ]]}};
+
+    assert.deepEqual(bnf.parse(grammar), expected, "grammar should be parsed correctly");
+};
+
+exports["test nullable rule"] = function () {
+    var grammar = "%% test: foo bar | ; hello: world ;";
+    var expected = {bnf: {test: ["foo bar", ""], hello: ["world"]}};
+
+    assert.deepEqual(bnf.parse(grammar), expected, "grammar should be parsed correctly");
+};
+
+exports["test nullable rule with action"] = function () {
+    var grammar = "%% test: foo bar | {action}; hello: world ;";
+    var expected = {bnf: {test: ["foo bar", [ "", "action" ]], hello: ["world"]}};
+
+    assert.deepEqual(bnf.parse(grammar), expected, "grammar should be parsed correctly");
+};
+
+exports["test nullable rule with %{ %} delimited action"] = function () {
+    var grammar = "%% test: foo bar | %{action{}%}; hello: world ;";
+    var expected = {bnf: {test: ["foo bar", [ "", "action{}" ]], hello: ["world"]}};
+
+    assert.deepEqual(bnf.parse(grammar), expected, "grammar should be parsed correctly");
+};
+
+exports["test nullable rule with {{ }} delimited action"] = function () {
+    var grammar = "%% test: foo bar | {{action{};}}; hello: world ;";
+    var expected = {bnf: {test: ["foo bar", [ "", "action{};" ]], hello: ["world"]}};
+
+    assert.deepEqual(bnf.parse(grammar), expected, "grammar should be parsed correctly");
+};
+
+exports["test rule with {{ }} delimited action"] = function () {
+    var grammar = "%% test: foo bar {{ node({}, node({})); }}; hello: world ;";
+    var expected = {bnf: {test: [["foo bar"," node({}, node({})); " ]], hello: ["world"]}};
+
+    assert.deepEqual(bnf.parse(grammar), expected, "grammar should be parsed correctly");
+};
+
+exports["test comment"] = function () {
+    var grammar = "/* comment */ %% hello: world ;";
+    var expected = {bnf: {hello: ["world"]}};
+
+    assert.deepEqual(bnf.parse(grammar), expected, "grammar should be parsed correctly");
+};
+
+exports["test single line comment"] = function () {
+    var grammar = "//comment \n %% hello: world ;";
+    var expected = {bnf: {hello: ["world"]}};
+
+    assert.deepEqual(bnf.parse(grammar), expected, "grammar should be parse comment");
+};
+
+exports["test comment with nested *"] = function () {
+    var grammar = "/* comment * not done */ %% hello: /* oh hai */ world ;";
+    var expected = {bnf: {hello: ["world"]}};
+
+    assert.deepEqual(bnf.parse(grammar), expected, "grammar should be parsed correctly");
+};
+
+exports["test token"] = function () {
+    var grammar = "%token blah\n%% test: foo bar | baz ; hello: world ;";
+    var expected = {bnf: {test: ["foo bar", "baz"], hello: ["world"]}};
+
+    assert.deepEqual(bnf.parse(grammar), expected, "grammar should be parsed correctly");
+};
+
+exports["test token with type"] = function () {
+    var grammar = "%type <type> blah\n%% test: foo bar | baz ; hello: world ;";
+    var expected = {bnf: {test: ["foo bar", "baz"], hello: ["world"]}};
+
+    assert.deepEqual(bnf.parse(grammar), expected, "grammar should be parsed correctly");
+};
+
+exports["test embedded lexical block"] = function () {
+    var grammar = "%lex \n%%\n'foo' return 'foo';\n'bar' {return 'bar';}\n'baz' {return 'baz';}\n'world' {return 'world';}\n/lex\
+                   %% test: foo bar | baz ; hello: world ;";
+    var expected = {
+                        lex: {
+                            rules: [
+                               ["foo\\b", "return 'foo';"],
+                               ["bar\\b", "return 'bar';"],
+                               ["baz\\b", "return 'baz';"],
+                               ["world\\b", "return 'world';"]
+                            ]
+                        },
+                        bnf: {test: ["foo bar", "baz"], hello: ["world"]}
+                    };
+
+    assert.deepEqual(bnf.parse(grammar), expected, "grammar should be parsed correctly");
+};
+
+exports["test balanced braces"] = function () {
+    var grammar = "%% test: foo bar { node({}, node({foo:'bar'})); }; hello: world ;";
+    var expected = {bnf: {test: [["foo bar"," node({}, node({foo:'bar'})); " ]], hello: ["world"]}};
+
+    assert.deepEqual(bnf.parse(grammar), expected, "grammar should be parsed correctly");
+};
+
+exports["test brace within a multi-line comment"] = function () {
+    var grammar = "%% test: foo bar { node({}, 3 / 4); /* { */ }; hello: world ;";
+    var expected = {bnf: {test: [["foo bar"," node({}, 3 / 4); /* { */ " ]], hello: ["world"]}};
+
+    assert.deepEqual(bnf.parse(grammar), expected, "grammar should be parsed correctly");
+};
+
+exports["test brace within a single-line comment"] = function () {
+    var grammar = "%% test: foo bar { node({}); // {\n }; hello: world ;";
+    var expected = {bnf: {test: [["foo bar"," node({}); // {\n " ]], hello: ["world"]}};
+
+    assert.deepEqual(bnf.parse(grammar), expected, "grammar should be parsed correctly");
+};
+
+exports["test brace within a string"] = function () {
+    var grammar = "%% test: foo bar { node({}, 3 / 4, '{'); /* { */ }; hello: world ;";
+    var expected = {bnf: {test: [["foo bar"," node({}, 3 / 4, '{'); /* { */ " ]], hello: ["world"]}};
+
+    assert.deepEqual(bnf.parse(grammar), expected, "grammar should be parsed correctly");
+};
+
+exports["test brace within a string with double quotes"] = function () {
+    var grammar = "%% test: foo bar { node({}, 3 / 4, \"{\"); /* { */ }; hello: world ;";
+    var expected = {bnf: {test: [["foo bar"," node({}, 3 / 4, \"{\"); /* { */ " ]], hello: ["world"]}};
+
+    assert.deepEqual(bnf.parse(grammar), expected, "grammar should be parsed correctly");
+};
+
+exports["test uneven braces and quotes within regex"] = function () {
+    var grammar = "%% test: foo bar { node({}, 3 / 4, \"{\"); /{'\"/g; 1 / 2; }; hello: world { blah / bah };";
+    var expected = {bnf: {test: [["foo bar"," node({}, 3 / 4, \"{\"); /{'\"/g; 1 / 2; " ]], hello: [["world", " blah / bah "]]}};
+
+    assert.deepEqual(bnf.parse(grammar), expected, "grammar should be parsed correctly");
+};
+
+exports["test code declaration"] = function () {
+    var grammar = "%{var foo = 'bar';%}\n%%hello: world;";
+    var expected = {bnf: {hello: ["world"]}, moduleInclude: "var foo = 'bar';"};
+
+    assert.deepEqual(bnf.parse(grammar), expected, "grammar should be parsed correctly");
+};
+
+exports["test remainder code"] = function () {
+    var grammar = "%%hello: world;%%var foo = 'bar';";
+    var expected = {bnf: {hello: ["world"]}, moduleInclude: "var foo = 'bar';"};
+
+    assert.deepEqual(bnf.parse(grammar), expected, "grammar should be parsed correctly");
+};
+
+exports["test remainder and declarations code"] = function () {
+    var grammar = "%{test;%}\n%%hello: world;%%var foo = 'bar';";
+    var expected = {bnf: {hello: ["world"]}, moduleInclude: "test;var foo = 'bar';"};
+
+    assert.deepEqual(bnf.parse(grammar), expected, "grammar should be parsed correctly");
+};
+
+exports["test expression action"] = function () {
+    var grammar = "%% test: foo bar -> $foo\n;";
+    var expected = {bnf: {test: [["foo bar","$$ = $foo;"]]}};
+
+    assert.deepEqual(bnf.parse(grammar), expected, "grammar should be parsed correctly");
+};
+
+exports["test quote in rule"] = function () {
+    var grammar = "%lex\n%%\n\\' return \"'\"\n/lex\n%% test: foo bar \"'\";";
+    var expected = {lex: {
+      rules: [
+        ["'", "return \"'\""]
+      ]
+    },
+    bnf: {test: ["foo bar '"]}};
+
+    assert.deepEqual(bnf.parse(grammar), expected, "grammar should be parsed correctly");
+};
+
+exports["test windows line endings"] = function () {
+    var grammar = "%{baz\r\n%}%% test: foo bar | {\r\naction;\r\nhi};\r\nhello: world ;%%foo;\r\nbar;";
+    var expected = {bnf: {test: ["foo bar", [ "", "\r\naction;\r\nhi" ]], hello: ["world"]}, moduleInclude: 'baz\r\nfoo;\r\nbar;'};
+
+    assert.deepEqual(bnf.parse(grammar), expected, "grammar should be parsed correctly");
+};
+
+exports["test parse params"] = function () {
+    var grammar = "%parse-param first second\n%%hello: world;%%";
+    var expected = {bnf: {hello: ["world"]}, parseParams: ["first", "second"]};
+
+    assert.deepEqual(bnf.parse(grammar), expected, "grammar should be parsed correctly");
+};
+
+exports["test options"] = function () {
+    var grammar = "%options one two\n%%hello: world;%%";
+    var expected = {bnf: {hello: ["world"]}, options: {one: true, two: true}};
+
+    assert.deepEqual(bnf.parse(grammar), expected, "grammar should be parsed correctly");
+};
diff --git a/tests/ebnf.js b/tests/ebnf.js
new file mode 100644
index 0000000..a9f2ebd
--- /dev/null
+++ b/tests/ebnf.js
@@ -0,0 +1,105 @@
+var assert = require("assert"),
+    ebnf = require("../ebnf-transform");
+var Parser = require('jison').Parser;
+
+function testParse(top, strings) {
+    return function() {
+        var grammar = {
+            "lex": {
+                "rules": [
+                    ["\\s+", ''],
+                    ["[A-Za-z]+", "return 'word';"],
+                    [",", "return ',';"],
+                    ["$", "return 'EOF';"]
+                ]
+            },
+            "start": "top",
+            "bnf": ebnf.transform({"top": [top]})
+        };
+        strings = (typeof(strings) === 'string' ? [strings] : strings);
+        strings.forEach(function(string) {
+            assert.ok(new Parser(grammar).parse(string));
+        });
+    };
+}
+
+function testBadParse(top, strings) {
+    return function() {
+        var grammar = {
+            "lex": {
+                "rules": [
+                    ["\\s+", ''],
+                    ["[A-Za-z]+", "return 'word';"],
+                    [",", "return ',';"],
+                    ["$", "return 'EOF';"]
+                ]
+            },
+            "start": "top",
+            "ebnf": {"top": [top]}
+        };
+        strings = (typeof(strings) === 'string' ? [strings] : strings);
+        strings.forEach(function(string) {
+            assert.throws(function () {new Parser(grammar).parse(string);})
+        });
+    };
+}
+
+function testAlias(top, obj, str) {
+    return function() {
+        var grammar = {
+            "lex": {
+                "rules": [
+                    ["\\s+", ''],
+                    ["[A-Za-z]+", "return 'word';"],
+                    [",", "return ',';"],
+                    ["$", "return 'EOF';"]
+                ]
+            },
+            "start": "top",
+            "bnf": ebnf.transform({"top": [top]})
+        };
+        assert.deepEqual(grammar['bnf'], obj);
+        assert.ok(new Parser(grammar).parse(str));
+    };
+}
+
+var tests = {
+    "test idempotent transform": function() {
+        var first = {
+            "nodelist": [["", "$$ = [];"], ["nodelist node", "$1.push($2);"]]
+        };
+        var second = ebnf.transform(JSON.parse(JSON.stringify(first)));
+        assert.deepEqual(second, first);
+    },
+    "test repeat (*) on empty string": testParse("word* EOF", ""),
+    "test repeat (*) on single word": testParse("word* EOF", "oneword"),
+    "test repeat (*) on multiple words": testParse("word* EOF", "multiple words"),
+    "test repeat (+) on empty string": testBadParse("word+ EOF", ""),
+    "test repeat (+) on single word": testParse("word+ EOF", "oneword"),
+    "test repeat (+) on multiple words": testParse("word+ EOF", "multiple words"),
+    "test option (?) on empty string": testParse("word? EOF", ""),
+    "test option (?) on single word": testParse("word? EOF", "oneword"),
+    "test group () on simple phrase": testParse("(word word) EOF", "two words"),
+    "test group () with multiple options on first option": testParse("((word word) | word) EOF", "hi there"),
+    "test group () with multiple options on second option": testParse("((word word) | word) EOF", "hi"),
+    "test complex expression ( *, ?, () )": testParse("(word (',' word)*)? EOF ", ["", "hi", "hi, there"]),
+    "test named repeat (*)": testAlias("word*[bob] EOF",
+        { top: [ 'bob EOF' ],
+        bob: [ [ '', '$$ = [];' ], [ 'bob word', '$1.push($2);' ] ] }, "word"),
+    "test named repeat (+)": testAlias("word+[bob] EOF",
+        { top: [ 'bob EOF' ],
+        bob: [ [ 'word', '$$ = [$1];' ], [ 'bob word', '$1.push($2);' ] ] }, "wordy word"),
+    "test named group ()": testAlias("word[alice] (',' word)*[bob] EOF",
+        {"top":["word[alice] bob EOF"],"bob":[["","$$ = [];"],["bob , word","$1.push($2);"]]},
+        "one, two"),
+    "test named option (?)": testAlias("word[alex] word?[bob] EOF", { top: [ 'word[alex] bob EOF' ], bob: [ '', 'word' ] }, "oneor two"),
+    "test named complex expression (())": testAlias("word[alpha] (word[alex] (word[bob] word[carol] ',')+[david] word ',')*[enoch] EOF",
+        {"top":["word[alpha] enoch EOF"],"david":[["word[bob] word[carol] ,","$$ = [$1];"],["david word[bob] word[carol] ,","$1.push($2);"]],
+        "enoch":[["","$$ = [];"],["enoch word[alex] david word ,","$1.push($2);"]]},
+        "one two three four, five,"
+    )
+};
+
+for (var test in tests) {
+    exports[test] = tests[test];
+}
diff --git a/tests/ebnf_parse.js b/tests/ebnf_parse.js
new file mode 100644
index 0000000..36fcf48
--- /dev/null
+++ b/tests/ebnf_parse.js
@@ -0,0 +1,38 @@
+var assert = require("assert"),
+    bnf = require("../ebnf-parser"),
+    ebnf = require("../ebnf-transform");
+
+function testParse(top, strings) {
+    return function() {
+        var expected = {
+            "bnf": ebnf.transform({"top": [top]})
+        };
+        var grammar = "%ebnf\n%%\ntop : "+top+";";
+        assert.deepEqual(bnf.parse(grammar), expected);
+    };
+}
+
+var tests = {
+    "test idempotent transform": function() {
+        var first = {
+            "nodelist": [["", "$$ = [];"], ["nodelist node", "$1.push($2);"]]
+        };
+        var second = ebnf.transform(JSON.parse(JSON.stringify(first)));
+        assert.deepEqual(second, first);
+    },
+    "test repeat (*) on empty string": testParse("word* EOF", ""),
+    "test repeat (*) on single word": testParse("word* EOF", "oneword"),
+    "test repeat (*) on multiple words": testParse("word* EOF", "multiple words"),
+    "test repeat (+) on single word": testParse("word+ EOF", "oneword"),
+    "test repeat (+) on multiple words": testParse("word+ EOF", "multiple words"),
+    "test option (?) on empty string": testParse("word? EOF", ""),
+    "test option (?) on single word": testParse("word? EOF", "oneword"),
+    "test group () on simple phrase": testParse("(word word) EOF", "two words"),
+    "test group () with multiple options on first option": testParse("((word word) | word) EOF", "hi there"),
+    "test group () with multiple options on second option": testParse("((word word) | word) EOF", "hi"),
+    "test complex expression ( *, ?, () )": testParse("(word (',' word)*)? EOF", ["", "hi", "hi, there"])
+};
+
+for (var test in tests) {
+    exports[test] = tests[test];
+}
diff --git a/transform-parser.js b/transform-parser.js
new file mode 100644
index 0000000..4ef7195
--- /dev/null
+++ b/transform-parser.js
@@ -0,0 +1,629 @@
+/* parser generated by jison 0.4.11 */
+/*
+  Returns a Parser object of the following structure:
+
+  Parser: {
+    yy: {}
+  }
+
+  Parser.prototype: {
+    yy: {},
+    trace: function(),
+    symbols_: {associative list: name ==> number},
+    terminals_: {associative list: number ==> name},
+    productions_: [...],
+    performAction: function anonymous(yytext, yyleng, yylineno, yy, yystate, $$, _$),
+    table: [...],
+    defaultActions: {...},
+    parseError: function(str, hash),
+    parse: function(input),
+
+    lexer: {
+        EOF: 1,
+        parseError: function(str, hash),
+        setInput: function(input),
+        input: function(),
+        unput: function(str),
+        more: function(),
+        less: function(n),
+        pastInput: function(),
+        upcomingInput: function(),
+        showPosition: function(),
+        test_match: function(regex_match_array, rule_index),
+        next: function(),
+        lex: function(),
+        begin: function(condition),
+        popState: function(),
+        _currentRules: function(),
+        topState: function(),
+        pushState: function(condition),
+
+        options: {
+            ranges: boolean           (optional: true ==> token location info will include a .range[] member)
+            flex: boolean             (optional: true ==> flex-like lexing behaviour where the rules are tested exhaustively to find the longest match)
+            backtrack_lexer: boolean  (optional: true ==> lexer regexes are tested in order and for each matching regex the action code is invoked; the lexer terminates the scan when a token is returned by the action code)
+        },
+
+        performAction: function(yy, yy_, $avoiding_name_collisions, YY_START),
+        rules: [...],
+        conditions: {associative list: name ==> set},
+    }
+  }
+
+
+  token location info (@$, _$, etc.): {
+    first_line: n,
+    last_line: n,
+    first_column: n,
+    last_column: n,
+    range: [start_number, end_number]       (where the numbers are indexes into the input string, regular zero-based)
+  }
+
+
+  the parseError function receives a 'hash' object with these members for lexer and parser errors: {
+    text:        (matched text)
+    token:       (the produced terminal token, if any)
+    line:        (yylineno)
+  }
+  while parser (grammar) errors will also provide these members, i.e. parser errors deliver a superset of attributes: {
+    loc:         (yylloc)
+    expected:    (string describing the set of expected tokens)
+    recoverable: (boolean: TRUE when the parser has a error recovery rule available for this particular error)
+  }
+*/
+var ebnf = (function(){
+var parser = {trace: function trace() { },
+yy: {},
+symbols_: {"error":2,"production":3,"handle":4,"EOF":5,"handle_list":6,"|":7,"expression_suffix":8,"expression":9,"suffix":10,"ALIAS":11,"symbol":12,"(":13,")":14,"*":15,"?":16,"+":17,"$accept":0,"$end":1},
+terminals_: {2:"error",5:"EOF",7:"|",11:"ALIAS",12:"symbol",13:"(",14:")",15:"*",16:"?",17:"+"},
+productions_: [0,[3,2],[6,1],[6,3],[4,0],[4,2],[8,3],[8,2],[9,1],[9,3],[10,0],[10,1],[10,1],[10,1]],
+performAction: function anonymous(yytext, yyleng, yylineno, yy, yystate /* action[1] */, $$ /* vstack */, _$ /* lstack */) {
+/* this == yyval */
+
+var $0 = $$.length - 1;
+switch (yystate) {
+case 1: return $$[$0-1]; 
+break;
+case 2: this.$ = [$$[$0]]; 
+break;
+case 3: $$[$0-2].push($$[$0]); 
+break;
+case 4: this.$ = []; 
+break;
+case 5: $$[$0-1].push($$[$0]); 
+break;
+case 6: this.$ = ['xalias', $$[$0-1], $$[$0-2], $$[$0]]; 
+break;
+case 7: if ($$[$0]) this.$ = [$$[$0], $$[$0-1]]; else this.$ = $$[$0-1]; 
+break;
+case 8: this.$ = ['symbol', $$[$0]]; 
+break;
+case 9: this.$ = ['()', $$[$0-1]]; 
+break;
+}
+},
+table: [{3:1,4:2,5:[2,4],12:[2,4],13:[2,4]},{1:[3]},{5:[1,3],8:4,9:5,12:[1,6],13:[1,7]},{1:[2,1]},{5:[2,5],7:[2,5],12:[2,5],13:[2,5],14:[2,5]},{5:[2,10],7:[2,10],10:8,11:[2,10],12:[2,10],13:[2,10],14:[2,10],15:[1,9],16:[1,10],17:[1,11]},{5:[2,8],7:[2,8],11:[2,8],12:[2,8],13:[2,8],14:[2,8],15:[2,8],16:[2,8],17:[2,8]},{4:13,6:12,7:[2,4],12:[2,4],13:[2,4],14:[2,4]},{5:[2,7],7:[2,7],11:[1,14],12:[2,7],13:[2,7],14:[2,7]},{5:[2,11],7:[2,11],11:[2,11],12:[2,11],13:[2,11],14:[2,11]},{5:[2,12],7: [...]
+defaultActions: {3:[2,1]},
+parseError: function parseError(str, hash) {
+    if (hash.recoverable) {
+        this.trace(str);
+    } else {
+        throw new Error(str);
+    }
+},
+parse: function parse(input) {
+    var self = this, stack = [0], vstack = [null], lstack = [], table = this.table, yytext = '', yylineno = 0, yyleng = 0, recovering = 0, TERROR = 2, EOF = 1;
+    var args = lstack.slice.call(arguments, 1);
+    this.lexer.setInput(input);
+    this.lexer.yy = this.yy;
+    this.yy.lexer = this.lexer;
+    this.yy.parser = this;
+    if (typeof this.lexer.yylloc == 'undefined') {
+        this.lexer.yylloc = {};
+    }
+    var yyloc = this.lexer.yylloc;
+    lstack.push(yyloc);
+    var ranges = this.lexer.options && this.lexer.options.ranges;
+    if (typeof this.yy.parseError === 'function') {
+        this.parseError = this.yy.parseError;
+    } else {
+        this.parseError = Object.getPrototypeOf(this).parseError;
+    }
+    function popStack(n) {
+        stack.length = stack.length - 2 * n;
+        vstack.length = vstack.length - n;
+        lstack.length = lstack.length - n;
+    }
+    function lex() {
+        var token;
+        token = self.lexer.lex() || EOF;
+        if (typeof token !== 'number') {
+            token = self.symbols_[token] || token;
+        }
+        return token;
+    }
+    var symbol, preErrorSymbol, state, action, a, r, yyval = {}, p, len, newState, expected;
+    while (true) {
+        state = stack[stack.length - 1];
+        if (this.defaultActions[state]) {
+            action = this.defaultActions[state];
+        } else {
+            if (symbol === null || typeof symbol == 'undefined') {
+                symbol = lex();
+            }
+            action = table[state] && table[state][symbol];
+        }
+                    if (typeof action === 'undefined' || !action.length || !action[0]) {
+                var errStr = '';
+                expected = [];
+                for (p in table[state]) {
+                    if (this.terminals_[p] && p > TERROR) {
+                        expected.push('\'' + this.terminals_[p] + '\'');
+                    }
+                }
+                if (this.lexer.showPosition) {
+                    errStr = 'Parse error on line ' + (yylineno + 1) + ':\n' + this.lexer.showPosition() + '\nExpecting ' + expected.join(', ') + ', got \'' + (this.terminals_[symbol] || symbol) + '\'';
+                } else {
+                    errStr = 'Parse error on line ' + (yylineno + 1) + ': Unexpected ' + (symbol == EOF ? 'end of input' : '\'' + (this.terminals_[symbol] || symbol) + '\'');
+                }
+                this.parseError(errStr, {
+                    text: this.lexer.match,
+                    token: this.terminals_[symbol] || symbol,
+                    line: this.lexer.yylineno,
+                    loc: yyloc,
+                    expected: expected
+                });
+            }
+        if (action[0] instanceof Array && action.length > 1) {
+            throw new Error('Parse Error: multiple actions possible at state: ' + state + ', token: ' + symbol);
+        }
+        switch (action[0]) {
+        case 1:
+            stack.push(symbol);
+            vstack.push(this.lexer.yytext);
+            lstack.push(this.lexer.yylloc);
+            stack.push(action[1]);
+            symbol = null;
+            if (!preErrorSymbol) {
+                yyleng = this.lexer.yyleng;
+                yytext = this.lexer.yytext;
+                yylineno = this.lexer.yylineno;
+                yyloc = this.lexer.yylloc;
+                if (recovering > 0) {
+                    recovering--;
+                }
+            } else {
+                symbol = preErrorSymbol;
+                preErrorSymbol = null;
+            }
+            break;
+        case 2:
+            len = this.productions_[action[1]][1];
+            yyval.$ = vstack[vstack.length - len];
+            yyval._$ = {
+                first_line: lstack[lstack.length - (len || 1)].first_line,
+                last_line: lstack[lstack.length - 1].last_line,
+                first_column: lstack[lstack.length - (len || 1)].first_column,
+                last_column: lstack[lstack.length - 1].last_column
+            };
+            if (ranges) {
+                yyval._$.range = [
+                    lstack[lstack.length - (len || 1)].range[0],
+                    lstack[lstack.length - 1].range[1]
+                ];
+            }
+            r = this.performAction.apply(yyval, [
+                yytext,
+                yyleng,
+                yylineno,
+                this.yy,
+                action[1],
+                vstack,
+                lstack
+            ].concat(args));
+            if (typeof r !== 'undefined') {
+                return r;
+            }
+            if (len) {
+                stack = stack.slice(0, -1 * len * 2);
+                vstack = vstack.slice(0, -1 * len);
+                lstack = lstack.slice(0, -1 * len);
+            }
+            stack.push(this.productions_[action[1]][0]);
+            vstack.push(yyval.$);
+            lstack.push(yyval._$);
+            newState = table[stack[stack.length - 2]][stack[stack.length - 1]];
+            stack.push(newState);
+            break;
+        case 3:
+            return true;
+        }
+    }
+    return true;
+}};
+/* generated by jison-lex 0.2.1 */
+var lexer = (function(){
+var lexer = {
+
+EOF:1,
+
+parseError:function parseError(str, hash) {
+        if (this.yy.parser) {
+            this.yy.parser.parseError(str, hash);
+        } else {
+            throw new Error(str);
+        }
+    },
+
+// resets the lexer, sets new input
+setInput:function (input) {
+        this._input = input;
+        this._more = this._backtrack = this.done = false;
+        this.yylineno = this.yyleng = 0;
+        this.yytext = this.matched = this.match = '';
+        this.conditionStack = ['INITIAL'];
+        this.yylloc = {
+            first_line: 1,
+            first_column: 0,
+            last_line: 1,
+            last_column: 0
+        };
+        if (this.options.ranges) {
+            this.yylloc.range = [0,0];
+        }
+        this.offset = 0;
+        return this;
+    },
+
+// consumes and returns one char from the input
+input:function () {
+        var ch = this._input[0];
+        this.yytext += ch;
+        this.yyleng++;
+        this.offset++;
+        this.match += ch;
+        this.matched += ch;
+        var lines = ch.match(/(?:\r\n?|\n).*/g);
+        if (lines) {
+            this.yylineno++;
+            this.yylloc.last_line++;
+        } else {
+            this.yylloc.last_column++;
+        }
+        if (this.options.ranges) {
+            this.yylloc.range[1]++;
+        }
+
+        this._input = this._input.slice(1);
+        return ch;
+    },
+
+// unshifts one char (or a string) into the input
+unput:function (ch) {
+        var len = ch.length;
+        var lines = ch.split(/(?:\r\n?|\n)/g);
+
+        this._input = ch + this._input;
+        this.yytext = this.yytext.substr(0, this.yytext.length - len - 1);
+        //this.yyleng -= len;
+        this.offset -= len;
+        var oldLines = this.match.split(/(?:\r\n?|\n)/g);
+        this.match = this.match.substr(0, this.match.length - 1);
+        this.matched = this.matched.substr(0, this.matched.length - 1);
+
+        if (lines.length - 1) {
+            this.yylineno -= lines.length - 1;
+        }
+        var r = this.yylloc.range;
+
+        this.yylloc = {
+            first_line: this.yylloc.first_line,
+            last_line: this.yylineno + 1,
+            first_column: this.yylloc.first_column,
+            last_column: lines ?
+                (lines.length === oldLines.length ? this.yylloc.first_column : 0)
+                 + oldLines[oldLines.length - lines.length].length - lines[0].length :
+              this.yylloc.first_column - len
+        };
+
+        if (this.options.ranges) {
+            this.yylloc.range = [r[0], r[0] + this.yyleng - len];
+        }
+        this.yyleng = this.yytext.length;
+        return this;
+    },
+
+// When called from action, caches matched text and appends it on next action
+more:function () {
+        this._more = true;
+        return this;
+    },
+
+// When called from action, signals the lexer that this rule fails to match the input, so the next matching rule (regex) should be tested instead.
+reject:function () {
+        if (this.options.backtrack_lexer) {
+            this._backtrack = true;
+        } else {
+            return this.parseError('Lexical error on line ' + (this.yylineno + 1) + '. You can only invoke reject() in the lexer when the lexer is of the backtracking persuasion (options.backtrack_lexer = true).\n' + this.showPosition(), {
+                text: "",
+                token: null,
+                line: this.yylineno
+            });
+
+        }
+        return this;
+    },
+
+// retain first n characters of the match
+less:function (n) {
+        this.unput(this.match.slice(n));
+    },
+
+// displays already matched input, i.e. for error messages
+pastInput:function () {
+        var past = this.matched.substr(0, this.matched.length - this.match.length);
+        return (past.length > 20 ? '...':'') + past.substr(-20).replace(/\n/g, "");
+    },
+
+// displays upcoming input, i.e. for error messages
+upcomingInput:function () {
+        var next = this.match;
+        if (next.length < 20) {
+            next += this._input.substr(0, 20-next.length);
+        }
+        return (next.substr(0,20) + (next.length > 20 ? '...' : '')).replace(/\n/g, "");
+    },
+
+// displays the character position where the lexing error occurred, i.e. for error messages
+showPosition:function () {
+        var pre = this.pastInput();
+        var c = new Array(pre.length + 1).join("-");
+        return pre + this.upcomingInput() + "\n" + c + "^";
+    },
+
+// test the lexed token: return FALSE when not a match, otherwise return token
+test_match:function (match, indexed_rule) {
+        var token,
+            lines,
+            backup;
+
+        if (this.options.backtrack_lexer) {
+            // save context
+            backup = {
+                yylineno: this.yylineno,
+                yylloc: {
+                    first_line: this.yylloc.first_line,
+                    last_line: this.last_line,
+                    first_column: this.yylloc.first_column,
+                    last_column: this.yylloc.last_column
+                },
+                yytext: this.yytext,
+                match: this.match,
+                matches: this.matches,
+                matched: this.matched,
+                yyleng: this.yyleng,
+                offset: this.offset,
+                _more: this._more,
+                _input: this._input,
+                yy: this.yy,
+                conditionStack: this.conditionStack.slice(0),
+                done: this.done
+            };
+            if (this.options.ranges) {
+                backup.yylloc.range = this.yylloc.range.slice(0);
+            }
+        }
+
+        lines = match[0].match(/(?:\r\n?|\n).*/g);
+        if (lines) {
+            this.yylineno += lines.length;
+        }
+        this.yylloc = {
+            first_line: this.yylloc.last_line,
+            last_line: this.yylineno + 1,
+            first_column: this.yylloc.last_column,
+            last_column: lines ?
+                         lines[lines.length - 1].length - lines[lines.length - 1].match(/\r?\n?/)[0].length :
+                         this.yylloc.last_column + match[0].length
+        };
+        this.yytext += match[0];
+        this.match += match[0];
+        this.matches = match;
+        this.yyleng = this.yytext.length;
+        if (this.options.ranges) {
+            this.yylloc.range = [this.offset, this.offset += this.yyleng];
+        }
+        this._more = false;
+        this._backtrack = false;
+        this._input = this._input.slice(match[0].length);
+        this.matched += match[0];
+        token = this.performAction.call(this, this.yy, this, indexed_rule, this.conditionStack[this.conditionStack.length - 1]);
+        if (this.done && this._input) {
+            this.done = false;
+        }
+        if (token) {
+            return token;
+        } else if (this._backtrack) {
+            // recover context
+            for (var k in backup) {
+                this[k] = backup[k];
+            }
+            return false; // rule action called reject() implying the next rule should be tested instead.
+        }
+        return false;
+    },
+
+// return next match in input
+next:function () {
+        if (this.done) {
+            return this.EOF;
+        }
+        if (!this._input) {
+            this.done = true;
+        }
+
+        var token,
+            match,
+            tempMatch,
+            index;
+        if (!this._more) {
+            this.yytext = '';
+            this.match = '';
+        }
+        var rules = this._currentRules();
+        for (var i = 0; i < rules.length; i++) {
+            tempMatch = this._input.match(this.rules[rules[i]]);
+            if (tempMatch && (!match || tempMatch[0].length > match[0].length)) {
+                match = tempMatch;
+                index = i;
+                if (this.options.backtrack_lexer) {
+                    token = this.test_match(tempMatch, rules[i]);
+                    if (token !== false) {
+                        return token;
+                    } else if (this._backtrack) {
+                        match = false;
+                        continue; // rule action called reject() implying a rule MISmatch.
+                    } else {
+                        // else: this is a lexer rule which consumes input without producing a token (e.g. whitespace)
+                        return false;
+                    }
+                } else if (!this.options.flex) {
+                    break;
+                }
+            }
+        }
+        if (match) {
+            token = this.test_match(match, rules[index]);
+            if (token !== false) {
+                return token;
+            }
+            // else: this is a lexer rule which consumes input without producing a token (e.g. whitespace)
+            return false;
+        }
+        if (this._input === "") {
+            return this.EOF;
+        } else {
+            return this.parseError('Lexical error on line ' + (this.yylineno + 1) + '. Unrecognized text.\n' + this.showPosition(), {
+                text: "",
+                token: null,
+                line: this.yylineno
+            });
+        }
+    },
+
+// return next match that has a token
+lex:function lex() {
+        var r = this.next();
+        if (r) {
+            return r;
+        } else {
+            return this.lex();
+        }
+    },
+
+// activates a new lexer condition state (pushes the new lexer condition state onto the condition stack)
+begin:function begin(condition) {
+        this.conditionStack.push(condition);
+    },
+
+// pop the previously active lexer condition state off the condition stack
+popState:function popState() {
+        var n = this.conditionStack.length - 1;
+        if (n > 0) {
+            return this.conditionStack.pop();
+        } else {
+            return this.conditionStack[0];
+        }
+    },
+
+// produce the lexer rule set which is active for the currently active lexer condition state
+_currentRules:function _currentRules() {
+        if (this.conditionStack.length && this.conditionStack[this.conditionStack.length - 1]) {
+            return this.conditions[this.conditionStack[this.conditionStack.length - 1]].rules;
+        } else {
+            return this.conditions["INITIAL"].rules;
+        }
+    },
+
+// return the currently active lexer condition state; when an index argument is provided it produces the N-th previous condition state, if available
+topState:function topState(n) {
+        n = this.conditionStack.length - 1 - Math.abs(n || 0);
+        if (n >= 0) {
+            return this.conditionStack[n];
+        } else {
+            return "INITIAL";
+        }
+    },
+
+// alias for begin(condition)
+pushState:function pushState(condition) {
+        this.begin(condition);
+    },
+
+// return the number of states currently on the stack
+stateStackSize:function stateStackSize() {
+        return this.conditionStack.length;
+    },
+options: {},
+performAction: function anonymous(yy,yy_,$avoiding_name_collisions,YY_START) {
+
+var YYSTATE=YY_START;
+switch($avoiding_name_collisions) {
+case 0:/* skip whitespace */
+break;
+case 1:return 12;
+break;
+case 2:yy_.yytext = yy_.yytext.substr(1, yy_.yyleng-2); return 11;
+break;
+case 3:return 12;
+break;
+case 4:return 12;
+break;
+case 5:return 'bar';
+break;
+case 6:return 13;
+break;
+case 7:return 14;
+break;
+case 8:return 15;
+break;
+case 9:return 16;
+break;
+case 10:return 7;
+break;
+case 11:return 17;
+break;
+case 12:return 5;
+break;
+}
+},
+rules: [/^(?:\s+)/,/^(?:([a-zA-Z][a-zA-Z0-9_-]*))/,/^(?:\[([a-zA-Z][a-zA-Z0-9_-]*)\])/,/^(?:'[^']*')/,/^(?:\.)/,/^(?:bar\b)/,/^(?:\()/,/^(?:\))/,/^(?:\*)/,/^(?:\?)/,/^(?:\|)/,/^(?:\+)/,/^(?:$)/],
+conditions: {"INITIAL":{"rules":[0,1,2,3,4,5,6,7,8,9,10,11,12],"inclusive":true}}
+};
+return lexer;
+})();
+parser.lexer = lexer;
+function Parser () {
+  this.yy = {};
+}
+Parser.prototype = parser;parser.Parser = Parser;
+return new Parser;
+})();
+
+
+if (typeof require !== 'undefined' && typeof exports !== 'undefined') {
+exports.parser = ebnf;
+exports.Parser = ebnf.Parser;
+exports.parse = function () { return ebnf.parse.apply(ebnf, arguments); };
+exports.main = function commonjsMain(args) {
+    if (!args[1]) {
+        console.log('Usage: '+args[0]+' FILE');
+        process.exit(1);
+    }
+    var source = require('fs').readFileSync(require('path').normalize(args[1]), "utf8");
+    return exports.parser.parse(source);
+};
+if (typeof module !== 'undefined' && require.main === module) {
+  exports.main(process.argv.slice(1));
+}
+}
\ No newline at end of file

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/pkg-javascript/node-ebnf-parser.git



More information about the Pkg-javascript-commits mailing list