1
0
Fork 0
mirror of https://github.com/jashkenas/coffeescript.git synced 2022-11-09 12:23:24 -05:00

things are in motion -- bin/node_coffee is the new JS-only command line ... it can pass some of the tests

This commit is contained in:
Jeremy Ashkenas 2010-02-11 01:57:33 -05:00
parent f761c25dcd
commit 872b36c11d
83 changed files with 8312 additions and 226 deletions

7
bin/node_coffee Executable file
View file

@ -0,0 +1,7 @@
#!/usr/bin/env node
process.mixin(require('sys'));
require.paths.unshift('./lib/coffee_script');
require('command_line').run();

View file

@ -1,11 +1,44 @@
(function(){
var compiler, path;
var compiler, lexer, parser, path;
process.mixin(require('./nodes'));
lexer = new (require('./lexer').Lexer)();
parser = require('./parser').parser;
// Thin wrapper for Jison compatibility around the real lexer.
parser.lexer = {
lex: function lex() {
var token;
token = this.tokens[this.pos] || [""];
this.pos += 1;
this.yylineno = token[2];
this.yytext = token[1];
return token[0];
},
setInput: function setInput(tokens) {
this.tokens = tokens;
return this.pos = 0;
},
upcomingInput: function upcomingInput() {
return "";
},
showPosition: function showPosition() {
return this.pos;
}
};
exports.VERSION = '0.5.0';
// Compile CoffeeScript to JavaScript, using the Coffee/Jison compiler.
exports.compile = function compile(code) {
var nodes, tokens;
tokens = lexer.tokenize(code);
nodes = parser.parse(tokens);
return nodes.compile();
};
//---------- Below this line is obsolete, for the Ruby compiler. ----------------
// Executes the `coffee` Ruby program to convert from CoffeeScript to JavaScript.
path = require('path');
// The path to the CoffeeScript executable.
compiler = path.normalize(path.dirname(__filename) + '/../../bin/coffee');
// Compile a string over stdin, with global variables, for the REPL.
exports.compile = function compile(code, callback) {
exports.ruby_compile = function ruby_compile(code, callback) {
var coffee, js;
js = '';
coffee = process.createChildProcess(compiler, ['--eval', '--no-wrap', '--globals']);
@ -21,7 +54,7 @@
return coffee.close();
};
// Compile a list of CoffeeScript files on disk.
exports.compile_files = function compile_files(paths, callback) {
exports.ruby_compile_files = function ruby_compile_files(paths, callback) {
var coffee, exit_ran, js;
js = '';
coffee = process.createChildProcess(compiler, ['--print'].concat(paths));

View file

@ -0,0 +1,121 @@
(function(){
var BANNER, SWITCHES, WATCH_INTERVAL, coffee, optparse, posix;
optparse = require('./../../vendor/optparse-js/src/optparse');
posix = require('posix');
coffee = require('coffee-script');
BANNER = "coffee compiles CoffeeScript source files into JavaScript.\n\nUsage:\n coffee path/to/script.coffee";
SWITCHES = [['-i', '--interactive', 'run an interactive CoffeeScript REPL'], ['-r', '--run', 'compile and run a CoffeeScript'], ['-o', '--output [DIR]', 'set the directory for compiled JavaScript'], ['-w', '--watch', 'watch scripts for changes, and recompile'], ['-p', '--print', 'print the compiled JavaScript to stdout'], ['-l', '--lint', 'pipe the compiled JavaScript through JSLint'], ['-e', '--eval', 'compile a cli scriptlet or read from stdin'], ['-t', '--tokens', 'print the tokens that the lexer produces'], ['-n', '--no-wrap', 'raw output, no function safety wrapper'], ['-g', '--globals', 'attach all top-level variables as globals'], ['-v', '--version', 'display CoffeeScript version'], ['-h', '--help', 'display this help message']];
WATCH_INTERVAL = 0.5;
// The CommandLine handles all of the functionality of the `coffee` utility.
exports.run = function run() {
this.parse_options();
this.compile_scripts();
return this;
};
// The "--help" usage message.
exports.usage = function usage() {
puts('\n' + this.option_parser.toString() + '\n');
return process.exit(0);
};
// The "--version" message.
exports.version = function version() {
puts("CoffeeScript version " + coffee.VERSION);
return process.exit(0);
};
// Compile a single source file to JavaScript.
exports.compile = function compile(script, source) {
var options;
source = source || 'error';
options = {
};
if (this.options.no_wrap) {
options.no_wrap = true;
}
if (this.options.globals) {
options.globals = true;
}
try {
return CoffeeScript.compile(script, options);
} catch (error) {
process.stdio.writeError(source + ': ' + error.toString());
if (!(this.options.watch)) {
process.exit(1);
}
return null;
}
};
// Compiles the source CoffeeScript, returning the desired JavaScript, tokens,
// or JSLint results.
exports.compile_scripts = function compile_scripts() {
var source;
if (!((source = this.sources.shift()))) {
return null;
}
return posix.cat(source).addCallback((function(__this) {
var __func = function(code) {
var js;
js = coffee.compile(code);
if (this.options.run) {
return eval(js);
}
if (this.options.print) {
return puts(js);
}
return exports.compile_scripts();
};
return (function() {
return __func.apply(__this, arguments);
});
})(this));
};
// Use OptionParser for all the options.
exports.parse_options = function parse_options() {
var oparser, opts, paths;
opts = (this.options = {
});
oparser = (this.option_parser = new optparse.OptionParser(SWITCHES));
oparser.add = oparser['on'];
oparser.add('interactive', function() {
return opts.interactive = true;
});
oparser.add('run', function() {
return opts.run = true;
});
oparser.add('output', function(dir) {
return opts.output = dir;
});
oparser.add('watch', function() {
return opts.watch = true;
});
oparser.add('print', function() {
return opts.print = true;
});
oparser.add('lint', function() {
return opts.lint = true;
});
oparser.add('eval', function() {
return opts.eval = true;
});
oparser.add('tokens', function() {
return opts.tokens = true;
});
oparser.add('help', (function(__this) {
var __func = function() {
return this.usage();
};
return (function() {
return __func.apply(__this, arguments);
});
})(this));
oparser.add('version', (function(__this) {
var __func = function() {
return this.version();
};
return (function() {
return __func.apply(__this, arguments);
});
})(this));
paths = oparser.parse(process.ARGV);
return this.sources = paths.slice(2, paths.length);
};
})();

View file

@ -202,7 +202,7 @@ Usage:
opts.on('-n', '--no-wrap', 'raw output, no function safety wrapper') do |n|
@options[:no_wrap] = true
end
opts.on('-g', '--globals', 'attach all top-level variable as globals') do |n|
opts.on('-g', '--globals', 'attach all top-level variables as globals') do |n|
@options[:globals] = true
end
opts.on_tail('--narwhal', 'use Narwhal instead of Node.js') do |n|

View file

@ -171,17 +171,17 @@
}), o("Expression != Expression", function() {
return new OpNode('!=', $1, $3);
}), o("Expression IS Expression", function() {
return new OpNode('IS', $1, $3);
return new OpNode('is', $1, $3);
}), o("Expression ISNT Expression", function() {
return new OpNode('ISNT', $1, $3);
return new OpNode('isnt', $1, $3);
}), o("Expression && Expression", function() {
return new OpNode('&&', $1, $3);
}), o("Expression || Expression", function() {
return new OpNode('||', $1, $3);
}), o("Expression AND Expression", function() {
return new OpNode('AND', $1, $3);
return new OpNode('and', $1, $3);
}), o("Expression OR Expression", function() {
return new OpNode('OR', $1, $3);
return new OpNode('or', $1, $3);
}), o("Expression ? Expression", function() {
return new OpNode('?', $1, $3);
}), o("Expression -= Expression", function() {
@ -201,9 +201,9 @@
}), o("Expression ?= Expression", function() {
return new OpNode('?=', $1, $3);
}), o("Expression INSTANCEOF Expression", function() {
return new OpNode('INSTANCEOF', $1, $3);
return new OpNode('instanceof', $1, $3);
}), o("Expression IN Expression", function() {
return new OpNode('IN', $1, $3);
return new OpNode('in', $1, $3);
})
],
// The existence operator.
@ -562,7 +562,7 @@
debug: false
});
// Save the parser to a file.
puts(parser.generate());
// puts parser.generate()
posix = require('posix');
posix.open('parser.js', process.O_CREAT | process.O_WRONLY, 0755).addCallback(function(fd) {
return posix.write(fd, parser.generate());

View file

@ -2,148 +2,6 @@
var AccessorNode, ArrayNode, AssignNode, CallNode, ClosureNode, CodeNode, CommentNode, ExistenceNode, Expressions, ExtendsNode, ForNode, IDENTIFIER, IfNode, IndexNode, LiteralNode, Node, ObjectNode, OpNode, ParentheticalNode, PushNode, RangeNode, ReturnNode, SliceNode, SplatNode, TAB, TRAILING_WHITESPACE, ThisNode, ThrowNode, TryNode, ValueNode, WhileNode, any, compact, del, dup, flatten, inherit, merge, statement;
var __hasProp = Object.prototype.hasOwnProperty;
process.mixin(require('./scope'));
// The abstract base class for all CoffeeScript nodes.
// All nodes are implement a "compile_node" method, which performs the
// code generation for that node. To compile a node, call the "compile"
// method, which wraps "compile_node" in some extra smarts, to know when the
// generated code should be wrapped up in a closure. An options hash is passed
// and cloned throughout, containing messages from higher in the AST,
// information about the current scope, and indentation level.
exports.Expressions = function Expressions() {
var arguments = Array.prototype.slice.call(arguments, 0);
this.name = this.constructor.name;
return this.values = arguments;
};
exports.LiteralNode = function LiteralNode() {
var arguments = Array.prototype.slice.call(arguments, 0);
this.name = this.constructor.name;
return this.values = arguments;
};
exports.ReturnNode = function ReturnNode() {
var arguments = Array.prototype.slice.call(arguments, 0);
this.name = this.constructor.name;
return this.values = arguments;
};
exports.CommentNode = function CommentNode() {
var arguments = Array.prototype.slice.call(arguments, 0);
this.name = this.constructor.name;
return this.values = arguments;
};
exports.CallNode = function CallNode() {
var arguments = Array.prototype.slice.call(arguments, 0);
this.name = this.constructor.name;
return this.values = arguments;
};
exports.ExtendsNode = function ExtendsNode() {
var arguments = Array.prototype.slice.call(arguments, 0);
this.name = this.constructor.name;
return this.values = arguments;
};
exports.ValueNode = function ValueNode() {
var arguments = Array.prototype.slice.call(arguments, 0);
this.name = this.constructor.name;
return this.values = arguments;
};
exports.AccessorNode = function AccessorNode() {
var arguments = Array.prototype.slice.call(arguments, 0);
this.name = this.constructor.name;
return this.values = arguments;
};
exports.IndexNode = function IndexNode() {
var arguments = Array.prototype.slice.call(arguments, 0);
this.name = this.constructor.name;
return this.values = arguments;
};
exports.RangeNode = function RangeNode() {
var arguments = Array.prototype.slice.call(arguments, 0);
this.name = this.constructor.name;
return this.values = arguments;
};
exports.SliceNode = function SliceNode() {
var arguments = Array.prototype.slice.call(arguments, 0);
this.name = this.constructor.name;
return this.values = arguments;
};
exports.ThisNode = function ThisNode() {
var arguments = Array.prototype.slice.call(arguments, 0);
this.name = this.constructor.name;
return this.values = arguments;
};
exports.AssignNode = function AssignNode() {
var arguments = Array.prototype.slice.call(arguments, 0);
this.name = this.constructor.name;
return this.values = arguments;
};
exports.OpNode = function OpNode() {
var arguments = Array.prototype.slice.call(arguments, 0);
this.name = this.constructor.name;
return this.values = arguments;
};
exports.CodeNode = function CodeNode() {
var arguments = Array.prototype.slice.call(arguments, 0);
this.name = this.constructor.name;
return this.values = arguments;
};
exports.SplatNode = function SplatNode() {
var arguments = Array.prototype.slice.call(arguments, 0);
this.name = this.constructor.name;
return this.values = arguments;
};
exports.ObjectNode = function ObjectNode() {
var arguments = Array.prototype.slice.call(arguments, 0);
this.name = this.constructor.name;
return this.values = arguments;
};
exports.ArrayNode = function ArrayNode() {
var arguments = Array.prototype.slice.call(arguments, 0);
this.name = this.constructor.name;
return this.values = arguments;
};
exports.PushNode = function PushNode() {
var arguments = Array.prototype.slice.call(arguments, 0);
this.name = this.constructor.name;
return this.values = arguments;
};
exports.ClosureNode = function ClosureNode() {
var arguments = Array.prototype.slice.call(arguments, 0);
this.name = this.constructor.name;
return this.values = arguments;
};
exports.WhileNode = function WhileNode() {
var arguments = Array.prototype.slice.call(arguments, 0);
this.name = this.constructor.name;
return this.values = arguments;
};
exports.ForNode = function ForNode() {
var arguments = Array.prototype.slice.call(arguments, 0);
this.name = this.constructor.name;
return this.values = arguments;
};
exports.TryNode = function TryNode() {
var arguments = Array.prototype.slice.call(arguments, 0);
this.name = this.constructor.name;
return this.values = arguments;
};
exports.ThrowNode = function ThrowNode() {
var arguments = Array.prototype.slice.call(arguments, 0);
this.name = this.constructor.name;
return this.values = arguments;
};
exports.ExistenceNode = function ExistenceNode() {
var arguments = Array.prototype.slice.call(arguments, 0);
this.name = this.constructor.name;
return this.values = arguments;
};
exports.ParentheticalNode = function ParentheticalNode() {
var arguments = Array.prototype.slice.call(arguments, 0);
this.name = this.constructor.name;
return this.values = arguments;
};
exports.IfNode = function IfNode() {
var arguments = Array.prototype.slice.call(arguments, 0);
this.name = this.constructor.name;
return this.values = arguments;
};
// Some helper functions
// Tabs are two spaces for pretty printing.
TAB = ' ';
@ -261,11 +119,6 @@
}
return klass;
};
// # Provide a quick implementation of a children method.
// children: (klass, attrs...) ->
// klass::children: ->
// nodes: this[attr] for attr in attrs
// compact flatten nodes
// Mark a node as a statement, or a statement only.
statement = function statement(klass, only) {
klass.prototype.is_statement = function is_statement() {
@ -528,7 +381,7 @@
var __a, __b, baseline, code, only, part, parts, prop, props, soaked, temp;
soaked = false;
only = del(o, 'only_first');
props = only ? this.properties.slice(0, this.properties.length) : this.properties;
props = only ? this.properties.slice(0, this.properties.length - 1) : this.properties;
baseline = this.base.compile(o);
parts = [baseline];
__a = props;
@ -931,11 +784,12 @@
return assigns.join("\n");
},
compile_splice: function compile_splice(o) {
var from, name, plus, range, to;
var from, l, name, plus, range, to;
name = this.variable.compile(merge(o, {
only_first: true
}));
range = this.variable.properties.last.range;
l = this.variable.properties.length;
range = this.variable.properties[l - 1].range;
plus = range.exclusive ? '' : ' + 1';
from = range.from.compile(o);
to = range.to.compile(o) + ' - ' + from + plus;

View file

@ -161,17 +161,17 @@ case 75:this.$ = new OpNode('==', $$[$0-3+1-1], $$[$0-3+3-1]);
break;
case 76:this.$ = new OpNode('!=', $$[$0-3+1-1], $$[$0-3+3-1]);
break;
case 77:this.$ = new OpNode('IS', $$[$0-3+1-1], $$[$0-3+3-1]);
case 77:this.$ = new OpNode('is', $$[$0-3+1-1], $$[$0-3+3-1]);
break;
case 78:this.$ = new OpNode('ISNT', $$[$0-3+1-1], $$[$0-3+3-1]);
case 78:this.$ = new OpNode('isnt', $$[$0-3+1-1], $$[$0-3+3-1]);
break;
case 79:this.$ = new OpNode('&&', $$[$0-3+1-1], $$[$0-3+3-1]);
break;
case 80:this.$ = new OpNode('||', $$[$0-3+1-1], $$[$0-3+3-1]);
break;
case 81:this.$ = new OpNode('AND', $$[$0-3+1-1], $$[$0-3+3-1]);
case 81:this.$ = new OpNode('and', $$[$0-3+1-1], $$[$0-3+3-1]);
break;
case 82:this.$ = new OpNode('OR', $$[$0-3+1-1], $$[$0-3+3-1]);
case 82:this.$ = new OpNode('or', $$[$0-3+1-1], $$[$0-3+3-1]);
break;
case 83:this.$ = new OpNode('?', $$[$0-3+1-1], $$[$0-3+3-1]);
break;
@ -191,9 +191,9 @@ case 90:this.$ = new OpNode('&&=', $$[$0-3+1-1], $$[$0-3+3-1]);
break;
case 91:this.$ = new OpNode('?=', $$[$0-3+1-1], $$[$0-3+3-1]);
break;
case 92:this.$ = new OpNode('INSTANCEOF', $$[$0-3+1-1], $$[$0-3+3-1]);
case 92:this.$ = new OpNode('instanceof', $$[$0-3+1-1], $$[$0-3+3-1]);
break;
case 93:this.$ = new OpNode('IN', $$[$0-3+1-1], $$[$0-3+3-1]);
case 93:this.$ = new OpNode('in', $$[$0-3+1-1], $$[$0-3+3-1]);
break;
case 94:this.$ = new ExistenceNode($$[$0-2+1-1]);
break;

View file

@ -11,7 +11,7 @@
};
// The main REPL function. Called everytime a line of code is entered.
readline = function readline(code) {
return coffee.compile(code, run);
return coffee.ruby_compile(code, run);
};
// Attempt to evaluate the command. If there's an exception, print it.
run = function run(js) {

View file

@ -5,7 +5,7 @@
coffee = require('./coffee-script');
paths = process.ARGV;
paths = paths.slice(2, paths.length);
paths.length ? coffee.compile_files(paths, function(js) {
paths.length ? coffee.ruby_compile_files(paths, function(js) {
return eval(js);
}) : require('./repl');
})();

View file

@ -1,12 +1,41 @@
process.mixin require './nodes'
lexer: new (require('./lexer').Lexer)()
parser: require('./parser').parser
# Thin wrapper for Jison compatibility around the real lexer.
parser.lexer: {
lex: ->
token: @tokens[@pos] or [""]
@pos += 1
this.yylineno: token[2]
this.yytext: token[1]
token[0]
setInput: (tokens) ->
@tokens: tokens
@pos: 0
upcomingInput: -> ""
showPosition: -> @pos
}
exports.VERSION: '0.5.0'
# Compile CoffeeScript to JavaScript, using the Coffee/Jison compiler.
exports.compile: (code) ->
tokens: lexer.tokenize code
nodes: parser.parse tokens
nodes.compile()
#---------- Below this line is obsolete, for the Ruby compiler. ----------------
# Executes the `coffee` Ruby program to convert from CoffeeScript to JavaScript.
path: require('path')
# The path to the CoffeeScript executable.
compiler: path.normalize(path.dirname(__filename) + '/../../bin/coffee')
# Compile a string over stdin, with global variables, for the REPL.
exports.compile: (code, callback) ->
exports.ruby_compile: (code, callback) ->
js: ''
coffee: process.createChildProcess compiler, ['--eval', '--no-wrap', '--globals']
@ -21,7 +50,7 @@ exports.compile: (code, callback) ->
# Compile a list of CoffeeScript files on disk.
exports.compile_files: (paths, callback) ->
exports.ruby_compile_files: (paths, callback) ->
js: ''
coffee: process.createChildProcess compiler, ['--print'].concat(paths)

88
src/command_line.coffee Normal file
View file

@ -0,0 +1,88 @@
optparse: require('./../../vendor/optparse-js/src/optparse')
posix: require 'posix'
coffee: require 'coffee-script'
BANNER: '''
coffee compiles CoffeeScript source files into JavaScript.
Usage:
coffee path/to/script.coffee
'''
SWITCHES: [
['-i', '--interactive', 'run an interactive CoffeeScript REPL']
['-r', '--run', 'compile and run a CoffeeScript']
['-o', '--output [DIR]', 'set the directory for compiled JavaScript']
['-w', '--watch', 'watch scripts for changes, and recompile']
['-p', '--print', 'print the compiled JavaScript to stdout']
['-l', '--lint', 'pipe the compiled JavaScript through JSLint']
['-e', '--eval', 'compile a cli scriptlet or read from stdin']
['-t', '--tokens', 'print the tokens that the lexer produces']
['-n', '--no-wrap', 'raw output, no function safety wrapper']
['-g', '--globals', 'attach all top-level variables as globals']
['-v', '--version', 'display CoffeeScript version']
['-h', '--help', 'display this help message']
]
WATCH_INTERVAL: 0.5
# The CommandLine handles all of the functionality of the `coffee` utility.
exports.run: ->
@parse_options()
@compile_scripts()
this
# The "--help" usage message.
exports.usage: ->
puts '\n' + @option_parser.toString() + '\n'
process.exit 0
# The "--version" message.
exports.version: ->
puts "CoffeeScript version " + coffee.VERSION
process.exit 0
# Compile a single source file to JavaScript.
exports.compile: (script, source) ->
source ||= 'error'
options: {}
options.no_wrap: true if @options.no_wrap
options.globals: true if @options.globals
try
CoffeeScript.compile(script, options)
catch error
process.stdio.writeError(source + ': ' + error.toString())
process.exit 1 unless @options.watch
null
# Compiles the source CoffeeScript, returning the desired JavaScript, tokens,
# or JSLint results.
exports.compile_scripts: ->
return unless source: @sources.shift()
posix.cat(source).addCallback (code) =>
js: coffee.compile code
return eval js if @options.run
return puts js if @options.print
exports.compile_scripts()
# Use OptionParser for all the options.
exports.parse_options: ->
opts: @options: {}
oparser: @option_parser: new optparse.OptionParser SWITCHES
oparser.add: oparser['on']
oparser.add 'interactive', -> opts.interactive: true
oparser.add 'run', -> opts.run: true
oparser.add 'output', (dir) -> opts.output: dir
oparser.add 'watch', -> opts.watch: true
oparser.add 'print', -> opts.print: true
oparser.add 'lint', -> opts.lint: true
oparser.add 'eval', -> opts.eval: true
oparser.add 'tokens', -> opts.tokens: true
oparser.add 'help', => @usage()
oparser.add 'version', => @version()
paths: oparser.parse(process.ARGV)
@sources: paths[2...paths.length]

View file

@ -170,13 +170,13 @@ grammar: {
o "Expression == Expression", -> new OpNode('==', $1, $3)
o "Expression != Expression", -> new OpNode('!=', $1, $3)
o "Expression IS Expression", -> new OpNode('IS', $1, $3)
o "Expression ISNT Expression", -> new OpNode('ISNT', $1, $3)
o "Expression IS Expression", -> new OpNode('is', $1, $3)
o "Expression ISNT Expression", -> new OpNode('isnt', $1, $3)
o "Expression && Expression", -> new OpNode('&&', $1, $3)
o "Expression || Expression", -> new OpNode('||', $1, $3)
o "Expression AND Expression", -> new OpNode('AND', $1, $3)
o "Expression OR Expression", -> new OpNode('OR', $1, $3)
o "Expression AND Expression", -> new OpNode('and', $1, $3)
o "Expression OR Expression", -> new OpNode('or', $1, $3)
o "Expression ? Expression", -> new OpNode('?', $1, $3)
o "Expression -= Expression", -> new OpNode('-=', $1, $3)
@ -188,8 +188,8 @@ grammar: {
o "Expression &&= Expression", -> new OpNode('&&=', $1, $3)
o "Expression ?= Expression", -> new OpNode('?=', $1, $3)
o "Expression INSTANCEOF Expression", -> new OpNode('INSTANCEOF', $1, $3)
o "Expression IN Expression", -> new OpNode('IN', $1, $3)
o "Expression INSTANCEOF Expression", -> new OpNode('instanceof', $1, $3)
o "Expression IN Expression", -> new OpNode('in', $1, $3)
]
# The existence operator.
@ -461,7 +461,7 @@ tokens: tokens.join(" ")
parser: new Parser({tokens: tokens, bnf: bnf, operators: operators, startSymbol: 'Root'}, {debug: false})
# Save the parser to a file.
puts parser.generate()
# puts parser.generate()
posix: require 'posix'
posix.open('parser.js', process.O_CREAT | process.O_WRONLY, 0755).addCallback (fd) ->
posix.write(fd, parser.generate())

View file

@ -1,41 +1,5 @@
process.mixin require './scope'
# The abstract base class for all CoffeeScript nodes.
# All nodes are implement a "compile_node" method, which performs the
# code generation for that node. To compile a node, call the "compile"
# method, which wraps "compile_node" in some extra smarts, to know when the
# generated code should be wrapped up in a closure. An options hash is passed
# and cloned throughout, containing messages from higher in the AST,
# information about the current scope, and indentation level.
exports.Expressions : -> @name: this.constructor.name; @values: arguments
exports.LiteralNode : -> @name: this.constructor.name; @values: arguments
exports.ReturnNode : -> @name: this.constructor.name; @values: arguments
exports.CommentNode : -> @name: this.constructor.name; @values: arguments
exports.CallNode : -> @name: this.constructor.name; @values: arguments
exports.ExtendsNode : -> @name: this.constructor.name; @values: arguments
exports.ValueNode : -> @name: this.constructor.name; @values: arguments
exports.AccessorNode : -> @name: this.constructor.name; @values: arguments
exports.IndexNode : -> @name: this.constructor.name; @values: arguments
exports.RangeNode : -> @name: this.constructor.name; @values: arguments
exports.SliceNode : -> @name: this.constructor.name; @values: arguments
exports.ThisNode : -> @name: this.constructor.name; @values: arguments
exports.AssignNode : -> @name: this.constructor.name; @values: arguments
exports.OpNode : -> @name: this.constructor.name; @values: arguments
exports.CodeNode : -> @name: this.constructor.name; @values: arguments
exports.SplatNode : -> @name: this.constructor.name; @values: arguments
exports.ObjectNode : -> @name: this.constructor.name; @values: arguments
exports.ArrayNode : -> @name: this.constructor.name; @values: arguments
exports.PushNode : -> @name: this.constructor.name; @values: arguments
exports.ClosureNode : -> @name: this.constructor.name; @values: arguments
exports.WhileNode : -> @name: this.constructor.name; @values: arguments
exports.ForNode : -> @name: this.constructor.name; @values: arguments
exports.TryNode : -> @name: this.constructor.name; @values: arguments
exports.ThrowNode : -> @name: this.constructor.name; @values: arguments
exports.ExistenceNode : -> @name: this.constructor.name; @values: arguments
exports.ParentheticalNode : -> @name: this.constructor.name; @values: arguments
exports.IfNode : -> @name: this.constructor.name; @values: arguments
# Some helper functions
# Tabs are two spaces for pretty printing.
@ -92,12 +56,6 @@ inherit: (parent, props) ->
(klass.prototype[name]: prop) for name, prop of props
klass
# # Provide a quick implementation of a children method.
# children: (klass, attrs...) ->
# klass::children: ->
# nodes: this[attr] for attr in attrs
# compact flatten nodes
# Mark a node as a statement, or a statement only.
statement: (klass, only) ->
klass::is_statement: -> true
@ -308,7 +266,7 @@ ValueNode: exports.ValueNode: inherit Node, {
compile_node: (o) ->
soaked: false
only: del(o, 'only_first')
props: if only then @properties[0...@properties.length] else @properties
props: if only then @properties[0...@properties.length - 1] else @properties
baseline: @base.compile o
parts: [baseline]
@ -647,7 +605,8 @@ AssignNode: exports.AssignNode: inherit Node, {
compile_splice: (o) ->
name: @variable.compile(merge(o, {only_first: true}))
range: @variable.properties.last.range
l: @variable.properties.length
range: @variable.properties[l - 1].range
plus: if range.exclusive then '' else ' + 1'
from: range.from.compile(o)
to: range.to.compile(o) + ' - ' + from + plus

View file

@ -9,7 +9,7 @@ prompt: 'coffee> '
quit: -> process.stdio.close()
# The main REPL function. Called everytime a line of code is entered.
readline: (code) -> coffee.compile code, run
readline: (code) -> coffee.ruby_compile code, run
# Attempt to evaluate the command. If there's an exception, print it.
run: (js) ->

View file

@ -7,6 +7,6 @@ paths: process.ARGV
paths: paths[2...paths.length]
if paths.length
coffee.compile_files paths, (js) -> eval(js)
coffee.ruby_compile_files paths, (js) -> eval(js)
else
require './repl'

31
vendor/jison/Jakefile vendored Normal file
View file

@ -0,0 +1,31 @@
#!/usr/bin/env narwhal
var FILE = require("file"),
OS = require("os"),
jake = require("jake");
jake.task("build", ["build:bnf", "build:lex"]);
jake.task("build:bnf", function () {
OS.system(['./bin/jison', 'src/bnf.jison', 'src/bnf.jisonlex']);
OS.system(['mv', 'bnf.js', 'lib/jison/util/bnf-parser.js']);
});
jake.task("build:lex", function () {
OS.system(['./bin/jison', 'src/jisonlex.jison', 'src/jisonlex.jisonlex']);
OS.system(['mv', 'jisonlex.js', 'lib/jison/util/lex-parser.js']);
});
jake.task("test", function () {
OS.system(['narwhal', 'tests/all-tests.js']);
});
jake.task("test:parser", function () {
OS.system(['narwhal', 'tests/parser/parser-tests.js']);
});
jake.task("test:lexer", function () {
OS.system(['narwhal', 'tests/lexer/lexer-tests.js']);
});
jake.task("test:grammar", function () {
OS.system(['narwhal', 'tests/grammar/grammar-tests.js']);
});

347
vendor/jison/README.md vendored Normal file
View file

@ -0,0 +1,347 @@
Jison
=====
An API for creating parsers in JavaScript
-----------------------------------------
Jison generates bottom-up parsers in JavaScript. Its API is similar to Bison's, hence the name. It supports many of Bison's major features, plus some of its own. If you are new to parser generators such as Bison, and Context-free Grammars in general, a [good introduction][1] is found in the Bison manual. If you already know Bison, Jison should be easy to pickup.
A brief warning before proceeding: **the API is ridiculously unstable** right now. The goal is to mirror Bison where it makes sense, but we're not even there yet. Also, optimization has not been a main focus as of yet.
Briefly, Jison takes a JSON encoded grammar specification and outputs a JavaScript file capable of parsing the language described by that grammar specification. You can then use the generated script to parse inputs and accept, reject, or perform actions based on the input.
Installation
------------
**Prerequisite**: To run Jison from the command line, you'll need to have [Narwhal][2] installed and available from your `PATH`.
Clone the github repository:
git clone git://github.com/zaach/jison.git
Usage from the command line
-----------------------
Now you're ready to generate some parsers:
cd jison
narwhal bin/jison examples/calculator.jison examples/calculator.jisonlex
This will generate a `calculator.js` file in your current working directory. This file can be used to parse an input file, like so:
echo "2^32 / 1024" > testcalc
narwhal calculator.js testcalc
This will print out `4194304`.
Usage from a CommonJS module
--------------------------
You can generate parsers programatically from JavaScript as well. Assuming Jison is in your commonjs environment's load path:
// mygenerator.js
var Parser = require("jison").Parser;
var grammar = {
"lex": {
"rules": [
["\\s+", "/* skip whitespace */"],
["[a-f0-9]+", "return 'HEX';"]
]
},
"bnf": {
"hex_strings" :[ "hex_strings HEX",
"HEX" ]
}
};
var parser = new Parser(grammar);
// generate source, ready to be written to disk
var parserSource = parser.generate();
// you can also use the parser directly from memory
// returns true
parser.parse("adfe34bc e82a");
// throws lexical error
parser.parse("adfe34bc zxg");
Using the generated parser
--------------------------
So, you have generated your parser through the command line or JavaScript API and have saved it to disk. Now it can be put to use.
As demonstrated before, the parser can be used from the command line:
narwhal calculator.js testcalc
Though, more ideally, the parser will be a dependency of another module. You can require it from another module like so:
// mymodule.js
var parser = require("./calculator").parser;
function exec (input) {
return parser.parse(input);
}
var twenty = exec("4 * 5");
Or more succinctly:
// mymodule.js
function exec (input) {
return require("./calculator").parse(input);
}
var twenty = exec("4 * 5");
Using the parser in a web page
----------------------------
The generated parser script may be included in a web page without any need for a CommonJS loading environment. It's as simple as pointing to it via a scipt tag:
<script src="calc.js"></script>
When you generate the parser, you can specify the variable name it will be declared as:
// mygenerator.js
var parserSource = generator.generate({moduleName: "calc"});
// then write parserSource to a file called, say, calc.js
Whatever `moduleName` you specified will be the the variable you can access the parser from in your web page:
<!-- mypage.html -->
...
<script src="calc.js"></script>
<script>
calc.parse("42 / 0");
</script>
...
The moduleName you specify can also include a namespace, e.g:
// mygenerator.js
var parserSource = parser.generate({moduleName: "myCalculator.parser"});
And could be used like so:
<!-- mypage.html -->
...
<script>
var myCalculator = {};
</script>
<script src="calc.js"></script>
<script>
myCalculator.parser.parse("42 / 0");
</script>
...
Or something like that -- you get the picture.
A demo of the calculator script used in a web page is [here](http://zaach.github.com/jison/demo/calc.html) and the source of the page and the narwhal script to generate the parser are [here](http://gist.github.com/265842).
Specifying a language
---------------------
The process of parsing a language involves two phases: **lexical analysis** (tokenizing) and **parsing**, which the Lex/Yacc and Flex/Bison combinations are famous for. Jison lets you specify a parser much like you would using Bison/Flex, with separate files for tokenization rules and for the language grammar.
For example, here is the calculator parser:
calc.jisonlex, tokenization rules
%%
\s+ {/* skip whitespace */}
[0-9]+("."[0-9]+)?\b {return 'NUMBER';}
"*" {return '*';}
"/" {return '/';}
"-" {return '-';}
"+" {return '+';}
"^" {return '^';}
"(" {return '(';}
")" {return ')';}
"PI" {return 'PI';}
"E" {return 'E';}
<<EOF>> {return 'EOF';}
and calc.jison, language grammar
/* description: Grammar for a parser that parses and executes mathematical expressions. */
%left '+' '-'
%left '*' '/'
%left '^'
%left UMINUS
%%
S
: e EOF
{print($1); return $1;}
;
e
: e '+' e
{$$ = $1+$3;}
| e '-' e
{$$ = $1-$3;}
| e '*' e
{$$ = $1*$3;}
| e '/' e
{$$ = $1/$3;}
| e '^' e
{$$ = Math.pow($1, $3);}
| '-' e
{$$ = -$2;} %prec UMINUS
| '(' e ')'
{$$ = $2;}
| NUMBER
{$$ = Number(yytext);}
| E
{$$ = Math.E;}
| PI
{$$ = Math.PI;}
;
which compiles down to this JSON:
{
"lex": {
"rules": [
["\\s+", "/* skip whitespace */"],
["[0-9]+(?:\\.[0-9]+)?\\b", "return 'NUMBER';"],
["\\*", "return '*';"],
["\\/", "return '/';"],
["-", "return '-';"],
["\\+", "return '+';"],
["\\^", "return '^';"],
["\\(", "return '(';"],
["\\)", "return ')';"],
["PI\\b", "return 'PI';"],
["E\\b", "return 'E';"],
["$", "return 'EOF';"]
]
},
"operators": [
["left", "+", "-"],
["left", "*", "/"],
["left", "^"],
["left", "UMINUS"]
],
"bnf": {
"S" :[[ "e EOF", "print($1); return $1;" ]],
"e" :[[ "e + e", "$$ = $1+$3;" ],
[ "e - e", "$$ = $1-$3;" ],
[ "e * e", "$$ = $1*$3;" ],
[ "e / e", "$$ = $1/$3;" ],
[ "e ^ e", "$$ = Math.pow($1, $3);" ],
[ "- e", "$$ = -$2;", {"prec": "UMINUS"} ],
[ "( e )", "$$ = $2;" ],
[ "NUMBER", "$$ = Number(yytext);" ],
[ "E", "$$ = Math.E;" ],
[ "PI", "$$ = Math.PI;" ]]
}
}
Jison accepts both the Bison/Flex style formats, or the raw JSON format, e.g:
narwhal bin/jison examples/calculator.jison examples/calculator.jisonlex
or
narwhal bin/jison examples/calculator.json
More examples can be found in the `examples/` and `tests/parser/` directories.
Sharing scope
------------
In Bison, code is expected to be lexically defined within the scope of the semantic actions. E.g., chunks of code may be included in the generated parser source, which are available from semantic actions.
Jison is more modular. Instead of pulling code into the generated module, the generated module is expected to be required and used by other modules. This means that if you want to expose functionality to the semantic actions, you can't rely on it being available through lexical scoping. Instead, the parser has a `yy` property which is exposed to actions as the `yy` free variable. Any functionality attached to this property is available in both lexical and semantic actions through the `yy` free variable.
An example from orderly.js:
var parser = require("./orderly/parse").parser;
// set parser's shared scope
parser.yy = require("./orderly/scope");
// returns the JSON object
var parse = exports.parse = function (input) {
return parser.parse(input);
};
...
The `scope` module contains logic for building data structures, which is used within the semantic actions.
*TODO: More on this.*
Lexical Analysis
----------------
Jison includes a rather rudimentary lexer, though **any module that supports the basic lexer API could be used** in its place. Jison's lexer uses the `lex` key of the JSON grammar spec, where the rules for matching a token are defined along with the action to execute on a match. Usually, the action will return the token which is used by the Jison parser. A custom lexer could be used instead with it's own methods of tokenizing.
*TODO: More on this.*
Parsing algorithms
------------------
Like Bison, Jison can recognize languages described by LALR(1) grammars, though it also has modes for LR(0), SLR(1), and LR(1). It also has a special mode for generating LL(1) parse tables (requested by my professor,) and could be extended to generate a recursive descent parser for LL(k) languages in the future. But, for now, Jison is geared toward bottom-up parsing.
**LR(1) mode is currently not practical for use with anything other than toy grammars, but that is entirely a consequence of the algorithm used, and may change in the future.*
Real world example
------------------
I wrote a parser for [Orderly][3] using Jison. Some benefits I found were:
- If modeled after the normative language grammar, it is guaranteed to recognize the correct language.
- Adding new syntax is straight forward.
- It was much faster to develop than if I were to attempt implementing a (top-down) parser from scratch. But for others not used to grammar specifications, this might not be the case.
Contributors
------------
- Zach Carter
- Jarred Ligatti
- Manuel E. Bermúdez
License
-------
> Copyright (c) 2009 Zachary Carter
>
> Permission is hereby granted, free of
> charge, to any person obtaining a
> copy of this software and associated
> documentation files (the "Software"),
> to deal in the Software without
> restriction, including without
> limitation the rights to use, copy,
> modify, merge, publish, distribute,
> sublicense, and/or sell copies of the
> Software, and to permit persons to
> whom the Software is furnished to do
> so, subject to the following
> conditions:
>
> The above copyright notice and this
> permission notice shall be included
> in all copies or substantial portions
> of the Software.
>
> THE SOFTWARE IS PROVIDED "AS IS",
> WITHOUT WARRANTY OF ANY KIND, EXPRESS
> OR IMPLIED, INCLUDING BUT NOT LIMITED
> TO THE WARRANTIES OF MERCHANTABILITY,
> FITNESS FOR A PARTICULAR PURPOSE AND
> NONINFRINGEMENT. IN NO EVENT SHALL THE
> AUTHORS OR COPYRIGHT HOLDERS BE
> LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
> LIABILITY, WHETHER IN AN ACTION OF
> CONTRACT, TORT OR OTHERWISE, ARISING
> FROM, OUT OF OR IN CONNECTION WITH THE
> SOFTWARE OR THE USE OR OTHER DEALINGS
> IN THE SOFTWARE.
[1]: http://dinosaur.compilertools.net/bison/bison_4.html
[2]: http://github.com/280north/narwhal
[3]: http://github.com/zaach/orderly.js

3
vendor/jison/bin/jison vendored Executable file
View file

@ -0,0 +1,3 @@
#!/usr/bin/env narwhal
require('jison').main(system.args);

3
vendor/jison/bin/json2jison vendored Executable file
View file

@ -0,0 +1,3 @@
#!/usr/bin/env narwhal
require('jison/json2jison').main(system.args);

415
vendor/jison/examples/ansic.jison vendored Normal file
View file

@ -0,0 +1,415 @@
%token IDENTIFIER CONSTANT STRING_LITERAL SIZEOF
%token PTR_OP INC_OP DEC_OP LEFT_OP RIGHT_OP LE_OP GE_OP EQ_OP NE_OP
%token AND_OP OR_OP MUL_ASSIGN DIV_ASSIGN MOD_ASSIGN ADD_ASSIGN
%token SUB_ASSIGN LEFT_ASSIGN RIGHT_ASSIGN AND_ASSIGN
%token XOR_ASSIGN OR_ASSIGN TYPE_NAME
%token TYPEDEF EXTERN STATIC AUTO REGISTER
%token CHAR SHORT INT LONG SIGNED UNSIGNED FLOAT DOUBLE CONST VOLATILE VOID
%token STRUCT UNION ENUM ELLIPSIS
%token CASE DEFAULT IF ELSE SWITCH WHILE DO FOR GOTO CONTINUE BREAK RETURN
%start translation_unit
%%
primary_expression
: IDENTIFIER
| CONSTANT
| STRING_LITERAL
| '(' expression ')'
;
postfix_expression
: primary_expression
| postfix_expression '[' expression ']'
| postfix_expression '(' ')'
| postfix_expression '(' argument_expression_list ')'
| postfix_expression '.' IDENTIFIER
| postfix_expression PTR_OP IDENTIFIER
| postfix_expression INC_OP
| postfix_expression DEC_OP
;
argument_expression_list
: assignment_expression
| argument_expression_list ',' assignment_expression
;
unary_expression
: postfix_expression
| INC_OP unary_expression
| DEC_OP unary_expression
| unary_operator cast_expression
| SIZEOF unary_expression
| SIZEOF '(' type_name ')'
;
unary_operator
: '&'
| '*'
| '+'
| '-'
| '~'
| '!'
;
cast_expression
: unary_expression
| '(' type_name ')' cast_expression
;
multiplicative_expression
: cast_expression
| multiplicative_expression '*' cast_expression
| multiplicative_expression '/' cast_expression
| multiplicative_expression '%' cast_expression
;
additive_expression
: multiplicative_expression
| additive_expression '+' multiplicative_expression
| additive_expression '-' multiplicative_expression
;
shift_expression
: additive_expression
| shift_expression LEFT_OP additive_expression
| shift_expression RIGHT_OP additive_expression
;
relational_expression
: shift_expression
| relational_expression '<' shift_expression
| relational_expression '>' shift_expression
| relational_expression LE_OP shift_expression
| relational_expression GE_OP shift_expression
;
equality_expression
: relational_expression
| equality_expression EQ_OP relational_expression
| equality_expression NE_OP relational_expression
;
and_expression
: equality_expression
| and_expression '&' equality_expression
;
exclusive_or_expression
: and_expression
| exclusive_or_expression '^' and_expression
;
inclusive_or_expression
: exclusive_or_expression
| inclusive_or_expression '|' exclusive_or_expression
;
logical_and_expression
: inclusive_or_expression
| logical_and_expression AND_OP inclusive_or_expression
;
logical_or_expression
: logical_and_expression
| logical_or_expression OR_OP logical_and_expression
;
conditional_expression
: logical_or_expression
| logical_or_expression '?' expression ':' conditional_expression
;
assignment_expression
: conditional_expression
| unary_expression assignment_operator assignment_expression
;
assignment_operator
: '='
| MUL_ASSIGN
| DIV_ASSIGN
| MOD_ASSIGN
| ADD_ASSIGN
| SUB_ASSIGN
| LEFT_ASSIGN
| RIGHT_ASSIGN
| AND_ASSIGN
| XOR_ASSIGN
| OR_ASSIGN
;
expression
: assignment_expression
| expression ',' assignment_expression
;
constant_expression
: conditional_expression
;
declaration
: declaration_specifiers ';'
| declaration_specifiers init_declarator_list ';'
;
declaration_specifiers
: storage_class_specifier
| storage_class_specifier declaration_specifiers
| type_specifier
| type_specifier declaration_specifiers
| type_qualifier
| type_qualifier declaration_specifiers
;
init_declarator_list
: init_declarator
| init_declarator_list ',' init_declarator
;
init_declarator
: declarator
| declarator '=' initializer
;
storage_class_specifier
: TYPEDEF
| EXTERN
| STATIC
| AUTO
| REGISTER
;
type_specifier
: VOID
| CHAR
| SHORT
| INT
| LONG
| FLOAT
| DOUBLE
| SIGNED
| UNSIGNED
| struct_or_union_specifier
| enum_specifier
| TYPE_NAME
;
struct_or_union_specifier
: struct_or_union IDENTIFIER '{' struct_declaration_list '}'
| struct_or_union '{' struct_declaration_list '}'
| struct_or_union IDENTIFIER
;
struct_or_union
: STRUCT
| UNION
;
struct_declaration_list
: struct_declaration
| struct_declaration_list struct_declaration
;
struct_declaration
: specifier_qualifier_list struct_declarator_list ';'
;
specifier_qualifier_list
: type_specifier specifier_qualifier_list
| type_specifier
| type_qualifier specifier_qualifier_list
| type_qualifier
;
struct_declarator_list
: struct_declarator
| struct_declarator_list ',' struct_declarator
;
struct_declarator
: declarator
| ':' constant_expression
| declarator ':' constant_expression
;
enum_specifier
: ENUM '{' enumerator_list '}'
| ENUM IDENTIFIER '{' enumerator_list '}'
| ENUM IDENTIFIER
;
enumerator_list
: enumerator
| enumerator_list ',' enumerator
;
enumerator
: IDENTIFIER
| IDENTIFIER '=' constant_expression
;
type_qualifier
: CONST
| VOLATILE
;
declarator
: pointer direct_declarator
| direct_declarator
;
direct_declarator
: IDENTIFIER
| '(' declarator ')'
| direct_declarator '[' constant_expression ']'
| direct_declarator '[' ']'
| direct_declarator '(' parameter_type_list ')'
| direct_declarator '(' identifier_list ')'
| direct_declarator '(' ')'
;
pointer
: '*'
| '*' type_qualifier_list
| '*' pointer
| '*' type_qualifier_list pointer
;
type_qualifier_list
: type_qualifier
| type_qualifier_list type_qualifier
;
parameter_type_list
: parameter_list
| parameter_list ',' ELLIPSIS
;
parameter_list
: parameter_declaration
| parameter_list ',' parameter_declaration
;
parameter_declaration
: declaration_specifiers declarator
| declaration_specifiers abstract_declarator
| declaration_specifiers
;
identifier_list
: IDENTIFIER
| identifier_list ',' IDENTIFIER
;
type_name
: specifier_qualifier_list
| specifier_qualifier_list abstract_declarator
;
abstract_declarator
: pointer
| direct_abstract_declarator
| pointer direct_abstract_declarator
;
direct_abstract_declarator
: '(' abstract_declarator ')'
| '[' ']'
| '[' constant_expression ']'
| direct_abstract_declarator '[' ']'
| direct_abstract_declarator '[' constant_expression ']'
| '(' ')'
| '(' parameter_type_list ')'
| direct_abstract_declarator '(' ')'
| direct_abstract_declarator '(' parameter_type_list ')'
;
initializer
: assignment_expression
| '{' initializer_list '}'
| '{' initializer_list ',' '}'
;
initializer_list
: initializer
| initializer_list ',' initializer
;
statement
: labeled_statement
| compound_statement
| expression_statement
| selection_statement
| iteration_statement
| jump_statement
;
labeled_statement
: IDENTIFIER ':' statement
| CASE constant_expression ':' statement
| DEFAULT ':' statement
;
compound_statement
: '{' '}'
| '{' statement_list '}'
| '{' declaration_list '}'
| '{' declaration_list statement_list '}'
;
declaration_list
: declaration
| declaration_list declaration
;
statement_list
: statement
| statement_list statement
;
expression_statement
: ';'
| expression ';'
;
selection_statement
: IF '(' expression ')' statement
| IF '(' expression ')' statement ELSE statement
| SWITCH '(' expression ')' statement
;
iteration_statement
: WHILE '(' expression ')' statement
| DO statement WHILE '(' expression ')' ';'
| FOR '(' expression_statement expression_statement ')' statement
| FOR '(' expression_statement expression_statement expression ')' statement
;
jump_statement
: GOTO IDENTIFIER ';'
| CONTINUE ';'
| BREAK ';'
| RETURN ';'
| RETURN expression ';'
;
translation_unit
: external_declaration
| translation_unit external_declaration
;
external_declaration
: function_definition
| declaration
;
function_definition
: declaration_specifiers declarator declaration_list compound_statement
| declaration_specifiers declarator compound_statement
| declarator declaration_list compound_statement
| declarator compound_statement
;

8
vendor/jison/examples/basic.json vendored Normal file
View file

@ -0,0 +1,8 @@
{
"tokens": "ZERO PLUS",
"bnf": {
"E" :[ "E PLUS T",
"T" ],
"T" :[ "ZERO" ]
}
}

9
vendor/jison/examples/basic2.json vendored Normal file
View file

@ -0,0 +1,9 @@
{
"comment": "Basic grammar that contains a nullable A nonterminal.",
"tokens": "x",
"bnf": {
"A" :[ "A x",
"" ]
}
}

16
vendor/jison/examples/basic2_lex.json vendored Normal file
View file

@ -0,0 +1,16 @@
{
"comment": "Basic grammar that contains a nullable A nonterminal.",
"lex": {
"rules": [
["\\s+", "/* skip whitespace */"],
["x", "return 'x';"]
]
},
"tokens": "x",
"bnf": {
"A" :[ "A x",
"" ]
}
}

15
vendor/jison/examples/basic_lex.json vendored Normal file
View file

@ -0,0 +1,15 @@
{
"lex": {
"rules": [
["\\s+", "/* skip whitespace */"],
["[0-9]+", "return 'NAT';"],
["\\+", "return '+';"]
]
},
"bnf": {
"E" :[ "E + T",
"T" ],
"T" :[ "NAT" ]
}
}

38
vendor/jison/examples/calculator.jison vendored Normal file
View file

@ -0,0 +1,38 @@
/* description: Parses end executes mathematical expressions. */
%left '+' '-'
%left '*' '/'
%left '^'
%left UMINUS
%%
S
: e EOF
{print($1); return $1;}
;
e
: e '+' e
{$$ = $1+$3;}
| e '-' e
{$$ = $1-$3;}
| e '*' e
{$$ = $1*$3;}
| e '/' e
{$$ = $1/$3;}
| e '^' e
{$$ = Math.pow($1, $3);}
| '-' e
{$$ = -$2;} %prec UMINUS
| '(' e ')'
{$$ = $2;}
| NUMBER
{$$ = Number(yytext);}
| E
{$$ = Math.E;}
| PI
{$$ = Math.PI;}
;

View file

@ -0,0 +1,14 @@
%%
\s+ {/* skip whitespace */}
[0-9]+("."[0-9]+)?\b {return 'NUMBER';}
"*" {return '*';}
"/" {return '/';}
"-" {return '-';}
"+" {return '+';}
"^" {return '^';}
"(" {return '(';}
")" {return ')';}
"PI" {return 'PI';}
"E" {return 'E';}
<<EOF>> {return 'EOF';}

42
vendor/jison/examples/calculator.json vendored Normal file
View file

@ -0,0 +1,42 @@
{
"comment": "Parses end executes mathematical expressions.",
"lex": {
"rules": [
["\\s+", "/* skip whitespace */"],
["[0-9]+(?:\\.[0-9]+)?\\b", "return 'NUMBER';"],
["\\*", "return '*';"],
["\\/", "return '/';"],
["-", "return '-';"],
["\\+", "return '+';"],
["\\^", "return '^';"],
["\\(", "return '(';"],
["\\)", "return ')';"],
["PI\\b", "return 'PI';"],
["E\\b", "return 'E';"],
["$", "return 'EOF';"]
]
},
"operators": [
["left", "+", "-"],
["left", "*", "/"],
["left", "^"],
["left", "UMINUS"]
],
"bnf": {
"S" :[[ "e EOF", "print($1); return $1;" ]],
"e" :[[ "e + e", "$$ = $1+$3;" ],
[ "e - e", "$$ = $1-$3;" ],
[ "e * e", "$$ = $1*$3;" ],
[ "e / e", "$$ = $1/$3;" ],
[ "e ^ e", "$$ = Math.pow($1, $3);" ],
[ "- e", "$$ = -$2;", {"prec": "UMINUS"} ],
[ "( e )", "$$ = $2;" ],
[ "NUMBER", "$$ = Number(yytext);" ],
[ "E", "$$ = Math.E;" ],
[ "PI", "$$ = Math.PI;" ]]
}
}

105
vendor/jison/examples/classy.json vendored Normal file
View file

@ -0,0 +1,105 @@
{
"comment": "ClassyLang grammar. Very classy.",
"author": "Zach Carter",
"lex": {
"macros": {
"digit": "[0-9]",
"id": "[a-zA-Z][a-zA-Z0-9]*"
},
"rules": [
["//.*", "/* ignore comment */"],
["main\\b", "return 'MAIN';"],
["class\\b", "return 'CLASS';"],
["extends\\b", "return 'EXTENDS';"],
["nat\\b", "return 'NATTYPE';"],
["if\\b", "return 'IF';"],
["else\\b", "return 'ELSE';"],
["for\\b", "return 'FOR';"],
["printNat\\b", "return 'PRINTNAT';"],
["readNat\\b", "return 'READNAT';"],
["this\\b", "return 'THIS';"],
["new\\b", "return 'NEW';"],
["var\\b", "return 'VAR';"],
["null\\b", "return 'NUL';"],
["{digit}+", "return 'NATLITERAL';"],
["{id}", "return 'ID';"],
["==", "return 'EQUALITY';"],
["=", "return 'ASSIGN';"],
["\\+", "return 'PLUS';"],
["-", "return 'MINUS';"],
["\\*", "return 'TIMES';"],
[">", "return 'GREATER';"],
["\\|\\|", "return 'OR';"],
["!", "return 'NOT';"],
["\\.", "return 'DOT';"],
["\\{", "return 'LBRACE';"],
["\\}", "return 'RBRACE';"],
["\\(", "return 'LPAREN';"],
["\\)", "return 'RPAREN';"],
[";", "return 'SEMICOLON';"],
["\\s+", "/* skip whitespace */"],
[".", "throw 'Illegal character: '+yytext;"],
["$", "return 'ENDOFFILE';"]
]
},
"tokens": "MAIN CLASS EXTENDS NATTYPE IF ELSE FOR PRINTNAT READNAT THIS NEW VAR NUL NATLITERAL ID ASSIGN PLUS MINUS TIMES EQUALITY GREATER OR NOT DOT SEMICOLON LBRACE RBRACE LPAREN RPAREN ENDOFFILE",
"operators": [
["right", "ASSIGN"],
["left", "OR"],
["nonassoc", "EQUALITY", "GREATER"],
["left", "PLUS", "MINUS"],
["left", "TIMES"],
["right", "NOT"],
["left", "DOT"]
],
"bnf": {
"pgm": ["cdl MAIN LBRACE vdl el RBRACE ENDOFFILE"],
"cdl": ["c cdl",
""],
"c": ["CLASS id EXTENDS id LBRACE vdl mdl RBRACE"],
"vdl": ["VAR t id SEMICOLON vdl",
""],
"mdl": ["t id LPAREN t id RPAREN LBRACE vdl el RBRACE mdl",
""],
"t": ["NATTYPE",
"id"],
"id": ["ID"],
"el": ["e SEMICOLON el",
"e SEMICOLON"],
"e": ["NATLITERAL",
"NUL",
"id",
"NEW id",
"THIS",
"IF LPAREN e RPAREN LBRACE el RBRACE ELSE LBRACE el RBRACE",
"FOR LPAREN e SEMICOLON e SEMICOLON e RPAREN LBRACE el RBRACE",
"READNAT LPAREN RPAREN",
"PRINTNAT LPAREN e RPAREN",
"e PLUS e",
"e MINUS e",
"e TIMES e",
"e EQUALITY e",
"e GREATER e",
"NOT e",
"e OR e",
"e DOT id",
"id ASSIGN e",
"e DOT id ASSIGN e",
"id LPAREN e RPAREN",
"e DOT id LPAREN e RPAREN",
"LPAREN e RPAREN"]
}
}

126
vendor/jison/examples/classy_ast.json vendored Normal file
View file

@ -0,0 +1,126 @@
{
"comment": "ClassyLang grammar with AST-building actions. Very classy.",
"author": "Zach Carter",
"lex": {
"macros": {
"digit": "[0-9]",
"id": "[a-zA-Z][a-zA-Z0-9]*"
},
"rules": [
["//.*", "/* ignore comment */"],
["main\\b", "return 'MAIN';"],
["class\\b", "return 'CLASS';"],
["extends\\b", "return 'EXTENDS';"],
["nat\\b", "return 'NATTYPE';"],
["if\\b", "return 'IF';"],
["else\\b", "return 'ELSE';"],
["for\\b", "return 'FOR';"],
["printNat\\b", "return 'PRINTNAT';"],
["readNat\\b", "return 'READNAT';"],
["this\\b", "return 'THIS';"],
["new\\b", "return 'NEW';"],
["var\\b", "return 'VAR';"],
["null\\b", "return 'NUL';"],
["{digit}+", "return 'NATLITERAL';"],
["{id}", "return 'ID';"],
["==", "return 'EQUALITY';"],
["=", "return 'ASSIGN';"],
["\\+", "return 'PLUS';"],
["-", "return 'MINUS';"],
["\\*", "return 'TIMES';"],
[">", "return 'GREATER';"],
["\\|\\|", "return 'OR';"],
["!", "return 'NOT';"],
["\\.", "return 'DOT';"],
["\\{", "return 'LBRACE';"],
["\\}", "return 'RBRACE';"],
["\\(", "return 'LPAREN';"],
["\\)", "return 'RPAREN';"],
[";", "return 'SEMICOLON';"],
["\\s+", "/* skip whitespace */"],
[".", "throw 'Illegal character: '+yytext;"],
["$", "return 'ENDOFFILE';"]
]
},
"tokens": "MAIN CLASS EXTENDS NATTYPE IF ELSE FOR PRINTNAT READNAT THIS NEW VAR NUL NATLITERAL ID ASSIGN PLUS MINUS TIMES EQUALITY GREATER OR NOT DOT SEMICOLON LBRACE RBRACE LPAREN RPAREN ENDOFFILE",
"operators": [
["right", "ASSIGN"],
["left", "OR"],
["nonassoc", "EQUALITY", "GREATER"],
["left", "PLUS", "MINUS"],
["left", "TIMES"],
["right", "NOT"],
["left", "DOT"]
],
"bnf": {
"pgm": [["cdl MAIN LBRACE vdl el RBRACE ENDOFFILE",
"$$ = ['PROGRAM',{},$1,$4,$5]; return $$;"]],
"cdl": [["c cdl",
"$$ = prependChild($2, $1);"],
["",
"$$ = ['CLASS_DECL_LIST',{}];"]],
"c": [["CLASS id EXTENDS id LBRACE vdl mdl RBRACE",
"$$ = ['CLASS_DECL',{},$2,$4,$6,$7];"]],
"vdl": [["VAR t id SEMICOLON vdl",
"$$ = prependChild($5, ['VAR_DECL',{},$2,$3]);"],
["",
"$$ = ['VAR_DECL_LIST',{}];"]],
"mdl": [["t id LPAREN t id RPAREN LBRACE vdl el RBRACE mdl",
"$$ = prependChild($11, ['METHOD_DECL',{},$1,$2,$4,$5,$8,$9]);"],
["",
"$$ = ['METHOD_DECL_LIST',{}];"]],
"t": [["NATTYPE",
"$$ = ['NAT_TYPE',{}];"],
["id",
"$$ = $1"]],
"id": [["ID",
"$$ = ['AST_ID',{val:yytext}]"]],
"el": [["e SEMICOLON el",
"$$ = prependChild($3, $1);"],
["e SEMICOLON",
"$$ = ['EXPR_LIST',{},$1];"]],
"e": [["NATLITERAL", "$$ = ['NAT_LITERAL_EXPR',{val:parseInt(yytext)}];"],
["NUL", "$$ = ['NULL_EXPR',{}];"],
["id", "$$ = ['ID_EXPR',{},$1];"],
["NEW id", "$$ = ['NEW_EXPR',{},$2];"],
["THIS", "$$ = ['THIS_EXPR',{}];"],
["IF LPAREN e RPAREN LBRACE el RBRACE ELSE LBRACE el RBRACE",
"$$ = ['IF_THEN_ELSE_EXPR',{},$3,$6,$10];"],
["FOR LPAREN e SEMICOLON e SEMICOLON e RPAREN LBRACE el RBRACE",
"$$ = ['FOR_EXPR',{},$3,$5,$7,$10];"],
["READNAT LPAREN RPAREN",
"$$ = ['READ_EXPR',{}];"],
["PRINTNAT LPAREN e RPAREN",
"$$ = ['PRINT_EXPR',{},$3];"],
["e PLUS e", "$$ = ['PLUS_EXPR',{},$1,$3];"],
["e MINUS e", "$$ = ['MINUS_EXPR',{},$1,$3];"],
["e TIMES e", "$$ = ['TIMES_EXPR',{},$1,$3];"],
["e EQUALITY e", "$$ = ['EQUALITY_EXPR',{},$1,$3];"],
["e GREATER e", "$$ = ['GREATER_THAN_EXPR',{},$1,$3];"],
["NOT e", "$$ = ['NOT_EXPR',{},$2];"],
["e OR e", "$$ = ['OR_EXPR',{},$1,$3];"],
["e DOT id", "$$ = ['DOT_ID_EXPR',{},$1,$3];"],
["id ASSIGN e", "$$ = ['ASSIGN_EXPR',{},$1,$3];"],
["e DOT id ASSIGN e",
"$$ = ['DOT_ASSIGN_EXPR',{},$1,$3,$5];"],
["id LPAREN e RPAREN",
"$$ = ['METHOD_CALL_EXPR',{},$1,$3];"],
["e DOT id LPAREN e RPAREN",
"$$ = ['DOT_METHOD_CALL_EXPR',{},$1,$3,$5];"],
["LPAREN e RPAREN",
"$$ = $2;"]]
},
"actionInclude": "function prependChild(node, child){ node.splice(2,0,child); return node; }"
}

25
vendor/jison/examples/dism.json vendored Normal file
View file

@ -0,0 +1,25 @@
{
"author": "Jay Ligatti",
"tokens": "ADD SUB MUL MOV LOD STR JMP BEQ BLT RDN PTN HLT INT LABEL COLON",
"bnf": {
"pgm" :[ "instlist" ],
"instlist" :[ "label COLON inst instlist",
"inst instlist",
"" ],
"inst" :[ "ADD intt intt intt",
"SUB intt intt intt",
"MUL intt intt intt",
"MOV intt intt",
"LOD intt intt intt",
"STR intt intt intt",
"JMP intt intt intt",
"BEQ intt intt intt",
"BLT intt intt intt",
"RDN intt",
"PTN intt",
"HLT intt"],
"label" :[ "LABEL" ],
"intt" :[ "INT", "label" ]
}
}

26
vendor/jison/examples/dism_lr0.json vendored Normal file
View file

@ -0,0 +1,26 @@
{
"author": "Jay Ligatti",
"tokens": "ADD SUB MUL MOV LOD STR JMP BEQ BLT RDN PTN HLT INT LABEL COLON",
"bnf": {
"instlist" :[ "instlist label COLON inst",
"instlist inst",
"inst" ],
"inst" :[ "ADD intt intt intt",
"SUB intt intt intt",
"MUL intt intt intt",
"MOV intt intt",
"LOD intt intt intt",
"STR intt intt intt",
"JMP intt intt intt",
"BEQ intt intt intt",
"BLT intt intt intt",
"RDN intt",
"PTN intt",
"HLT intt"],
"label" :[ "LABEL" ],
"intt" :[ "INT", "label" ]
}
}

80
vendor/jison/examples/json.js vendored Normal file
View file

@ -0,0 +1,80 @@
var Generator = require("jison").Generator;
var system = require("system");
var fs = require("file");
exports.grammar = {
"comment": "ECMA-262 5th Edition, 15.12.1 The JSON Grammar.",
"author": "Zach Carter",
"lex": {
"macros": {
"digit": "[0-9]",
"esc": "\\\\",
"int": "-?(?:[0-9]|[1-9][0-9]+)",
"exp": "(?:[eE][-+]?[0-9]+)",
"frac": "(?:\\.[0-9]+)"
},
"rules": [
["\\s+", "/* skip whitespace */"],
["{int}{frac}?{exp}?\\b", "return 'NUMBER';"],
["\"(?:{esc}[\"bfnrt/{esc}]|{esc}u[a-fA-F0-9]{4}|[^\"{esc}])*\"", "yytext = yytext.substr(1,yyleng-2); return 'STRING';"],
["\\{", "return '{'"],
["\\}", "return '}'"],
["\\[", "return '['"],
["\\]", "return ']'"],
[",", "return ','"],
[":", "return ':'"],
["true\\b", "return 'TRUE'"],
["false\\b", "return 'FALSE'"],
["null\\b", "return 'NULL'"]
]
},
"tokens": "STRING NUMBER { } [ ] , : TRUE FALSE NULL",
"start": "JSONText",
"bnf": {
"JSONString": [ "STRING" ],
"JSONNumber": [ "NUMBER" ],
"JSONBooleanLiteral": [ "TRUE", "FALSE" ],
"JSONText": [ "JSONValue" ],
"JSONValue": [ "JSONNullLiteral",
"JSONBooleanLiteral",
"JSONString",
"JSONNumber",
"JSONObject",
"JSONArray" ],
"JSONObject": [ "{ }",
"{ JSONMemberList }" ],
"JSONMember": [ "JSONString : JSONValue" ],
"JSONMemberList": [ "JSONMember",
"JSONMemberList , JSONMember" ],
"JSONArray": [ "[ ]",
"[ JSONElementList ]" ],
"JSONElementList": [ "JSONValue",
"JSONElementList , JSONValue" ]
}
};
var options = {type: "slr", moduleType: "commonjs", moduleName: "jsoncheck"};
exports.main = function main (args) {
var cwd = fs.path(fs.cwd()),
code = new Generator(exports.grammar, options).generate(),
stream = cwd.join(options.moduleName+".js").open("w");
stream.print(code).close();
};
if (require.main === module)
exports.main(system.args);

83
vendor/jison/examples/json_ast.js vendored Normal file
View file

@ -0,0 +1,83 @@
var Generator = require("jison").Generator;
var system = require("system");
var fs = require("file");
exports.grammar = {
"comment": "ECMA-262 5th Edition, 15.12.1 The JSON Grammar. Parses JSON strings into objects.",
"author": "Zach Carter",
"lex": {
"macros": {
"digit": "[0-9]",
"esc": "\\\\",
"int": "-?(?:[0-9]|[1-9][0-9]+)",
"exp": "(?:[eE][-+]?[0-9]+)",
"frac": "(?:\\.[0-9]+)"
},
"rules": [
["\\s+", "/* skip whitespace */"],
["{int}{frac}?{exp}?\\b", "return 'NUMBER';"],
["\"(?:{esc}[\"bfnrt/{esc}]|{esc}u[a-fA-F0-9]{4}|[^\"{esc}])*\"", "yytext = yytext.substr(1,yyleng-2); return 'STRING';"],
["\\{", "return '{'"],
["\\}", "return '}'"],
["\\[", "return '['"],
["\\]", "return ']'"],
[",", "return ','"],
[":", "return ':'"],
["true\\b", "return 'TRUE'"],
["false\\b", "return 'FALSE'"],
["null\\b", "return 'NULL'"]
]
},
"tokens": "STRING NUMBER { } [ ] , : TRUE FALSE NULL",
"start": "JSONText",
"bnf": {
"JSONString": [[ "STRING", "$$ = yytext;" ]],
"JSONNumber": [[ "NUMBER", "$$ = Number(yytext);" ]],
"JSONNullLiteral": [[ "NULL", "$$ = null;" ]],
"JSONBooleanLiteral": [[ "TRUE", "$$ = true;" ],
[ "FALSE", "$$ = false;" ]],
"JSONText": [[ "JSONValue", "return $$ = $1;" ]],
"JSONValue": [[ "JSONNullLiteral", "$$ = $1;" ],
[ "JSONBooleanLiteral", "$$ = $1;" ],
[ "JSONString", "$$ = $1;" ],
[ "JSONNumber", "$$ = $1;" ],
[ "JSONObject", "$$ = $1;" ],
[ "JSONArray", "$$ = $1;" ]],
"JSONObject": [[ "{ }", "$$ = {};" ],
[ "{ JSONMemberList }", "$$ = $2;" ]],
"JSONMember": [[ "JSONString : JSONValue", "$$ = [$1, $3];" ]],
"JSONMemberList": [[ "JSONMember", "$$ = {}; $$[$1[0]] = $1[1];" ],
[ "JSONMemberList , JSONMember", "$$ = $1; $1[$3[0]] = $3[1];" ]],
"JSONArray": [[ "[ ]", "$$ = [];" ],
[ "[ JSONElementList ]", "$$ = $2;" ]],
"JSONElementList": [[ "JSONValue", "$$ = [$1];" ],
[ "JSONElementList , JSONValue", "$$ = $1; $1.push($3);" ]]
}
};
var options = {type: "slr", moduleType: "commonjs", moduleName: "jsonparse"};
exports.main = function main (args) {
var cwd = fs.path(fs.cwd()),
code = new Generator(exports.grammar, options).generate(),
stream = cwd.join(options.moduleName+".js").open("w");
stream.print(code).close();
};
if (require.main === module)
exports.main(system.args);

26
vendor/jison/examples/precedence.json vendored Normal file
View file

@ -0,0 +1,26 @@
{
"comment": "Grammar showing precedence operators and semantic actions.",
"lex": {
"rules": [
["\\s+", "/* skip whitespace */"],
["[0-9]+", "return 'NAT';"],
["\\+", "return '+';"],
["\\*", "return '*';"],
["$", "return 'EOF';"]
]
},
"tokens": "NAT + * EOF",
"operators": [
["left", "+"],
["left", "*"]
],
"bnf": {
"S" :[[ "e EOF", "return $1;" ]],
"e" :[[ "e + e", "$$ = [$1,'+', $3];" ],
[ "e * e", "$$ = [$1, '*', $3];" ],
[ "NAT", "$$ = parseInt(yytext);" ]]
}
}

View file

@ -0,0 +1,13 @@
{
"comment": "Produces a reduce-reduce conflict unless using LR(1).",
"tokens": "z d b c a",
"start": "S",
"bnf": {
"S" :[ "a A c",
"a B d",
"b A d",
"b B c"],
"A" :[ "z" ],
"B" :[ "z" ]
}
}

1385
vendor/jison/lib/jison.js vendored Normal file

File diff suppressed because it is too large Load diff

43
vendor/jison/lib/jison/bnf.js vendored Normal file
View file

@ -0,0 +1,43 @@
if (typeof require !== 'undefined') {
var bnf = require("./util/bnf-parser").parser;
exports.parse = function parse () { return bnf.parse.apply(bnf, arguments) };
}
// adds a declaration to the grammar
bnf.yy.addDeclaration = function (grammar, decl) {
if (decl.start) {
grammar.start = decl.start
}
if (decl.operator) {
if (!grammar.operators) {
grammar.operators = [];
}
grammar.operators.push(decl.operator);
}
};
// helps tokenize comments
bnf.yy.lexComment = function (lexer) {
var ch = lexer.input();
if (ch === '/') {
lexer.yytext = lexer.yytext.replace(/\*(.|\s)\/\*/, '*$1');
return;
} else {
lexer.unput('/*');
lexer.more();
}
}
// helps tokenize actions
bnf.yy.lexAction = function (lexer) {
var ch = lexer.input();
if (ch === '}') {
lexer.yytext = lexer.yytext.substr(2, lexer.yyleng-4).replace(/\}(.|\s)\{\{/, '}$1');
return 'ACTION';
} else {
lexer.unput('{{');
lexer.more();
}
}

18
vendor/jison/lib/jison/jisonlex.js vendored Normal file
View file

@ -0,0 +1,18 @@
if (typeof require !== 'undefined') {
var jisonlex = require("./util/lex-parser").parser;
exports.parse = function parse () {
jisonlex.yy.ruleSection = false;
return jisonlex.parse.apply(jisonlex, arguments);
};
}
function encodeRE (s) { return s.replace(/([.*+?^${}()|[\]\/\\])/g, '\\$1'); }
jisonlex.yy = {
prepareString: function (s) {
// unescape slashes
s = s.replace(/\\\\/g, "\\");
s = encodeRE(s);
return s;
}
};

146
vendor/jison/lib/jison/json2jison.js vendored Normal file
View file

@ -0,0 +1,146 @@
// converts json grammar format to Jison grammar format
function json2jison (grammar) {
var s = "";
s += genDecls(grammar);
s += genBNF(grammar.bnf);
return s;
}
function genDecls (grammar) {
var s = "",
key;
for (key in grammar) if (grammar.hasOwnProperty(key)) {
if (key === 'start') {
s += "\n%start "+grammar.start+"\n\n";
}
if (key === 'author') {
s += "\n/* author: "+grammar.author+" */\n\n";
}
if (key === 'comment') {
s += "\n/* description: "+grammar.comment+" */\n\n";
}
if (key === 'operators') {
for (var i=0; i<grammar.operators.length; i++) {
s += "%"+grammar.operators[i][0]+' '+quoteSymbols(grammar.operators[i].slice(1).join(' '))+"\n";
}
s += "\n";
}
}
return s;
}
function genBNF (bnf) {
var s = "%%\n",
sym;
for (sym in bnf) if (bnf.hasOwnProperty(sym)) {
s += ["\n",sym,'\n : ', genHandles(bnf[sym]),"\n ;\n"].join("");
}
return s;
}
function genHandles (handle) {
if (typeof handle === 'string') {
return handle;
} else { //array
var s = "";
for (var i=0; i< handle.length;i++) {
if (typeof handle[i] === 'string' && handle[i]) {
s += quoteSymbols(handle[i]);
} else if (handle[i] instanceof Array) {
s += (handle[i][0] && quoteSymbols(handle[i][0]));
if (typeof handle[i][1] === 'string') {
s += handle[i][1].match(/\}/) ?
"\n {{"+handle[i][1]+(handle[i][1].match(/\}$/) ? ' ' : '')+"}}" :
"\n {"+handle[i][1]+"}";
if (handle[i][2] && handle[i][2].prec) {
s += " %prec "+handle[i][2].prec;
}
} else if (handle[i][1].prec) {
s += " %prec "+handle[i][1].prec;
}
}
if (typeof handle[i+1] !== 'undefined')
s += "\n | ";
}
return s;
}
}
function quoteSymbols (rhs) {
rhs = rhs.split(' ');
for (var i=0; i<rhs.length; i++) {
rhs[i] = quoteSymbol(rhs[i]);
}
return rhs.join(' ');
}
function quoteSymbol (sym) {
if (!/[a-zA-Z][a-zA-Z0-9_-]*/.test(sym)) {
var quote = /'/.test(sym) ? '"' : "'";
sym = quote+sym+quote;
}
return sym;
}
// Generate lex format from lex JSON
function genLex (lex) {
var s = [];
if (lex.macros) {
for (var macro;macro=lex.macros.shift();) {
s.push(macro[0], '\t\t', macros[1], '\n');
}
}
if (lex.actionInclude) {
s.push('\n%{\n', lex.actionInclude, '\n%}\n');
}
s.push('\n%%\n');
if (lex.rules) {
for (var rule;rule=lex.rules.shift();) {
s.push(rule[0], ' ', genLexRule(rule[1]), '\n');
}
}
s.push('\n%%\n');
return s.join('');
}
function genLexRule (rule) {
return rule.match(/\\}/) ? '%{'+rule+'}%' : '{'+rule+'}';
}
exports.json2jison = json2jison;
exports.convert = json2jison;
exports.main = function main (args) {
var fs = require("file");
gfile = fs.path(fs.cwd()).join(args[1]),
grammar = JSON.parse(gfile.read({charset: "utf-8"}));
if (grammar.bnf) {
var fname = fs.path(fs.cwd()).join(gfile.basename(".json") + ".jison"),
stream = fname.open("w");
stream.print(json2jison(grammar));
stream.close();
}
var lex = grammar.lex || grammar.rules && grammar;
if (lex) {
var fname = fs.path(fs.cwd()).join(gfile.basename(".json").replace(/[._]?lex$/,'') + ".jisonlex"),
stream = fname.open("w");
stream.print(genLex(lex));
stream.close();
}
};

221
vendor/jison/lib/jison/lexer.js vendored Normal file
View file

@ -0,0 +1,221 @@
// Basic RegExp Lexer
// MIT Licensed
// Zachary Carter <zach@carter.name>
var RegExpLexer = (function () {
// expand macros and convert matchers to RegExp's
function prepareRules(rules, macros, actions, tokens) {
var m,i,k,action,
newRules = [];
actions.push('switch(arguments[2]) {');
for (i=0;i < rules.length; i++) {
m = rules[i][0];
for (k in macros) {
if (macros.hasOwnProperty(k) && typeof m === 'string') {
m = m.split("{"+k+"}").join(macros[k]);
}
}
if (typeof m === 'string') {
m = new RegExp("^"+m);
}
newRules.push(m);
if (typeof rules[i][1] === 'function') {
rules[i][1] = String(rules[i][1]).replace(/^\s*function \(\) \{/, '').replace(/\}\s*$/, '');
}
action = rules[i][1];
if (tokens && action.match(/return '[^']+'/)) {
action = action.replace(/return '([^']+)'/, function (str, pl) {
return "return "+(tokens[pl] ? tokens[pl] : "'"+pl+"'");
});
}
actions.push('case '+i+':' +action+'\nbreak;');
}
actions.push("}");
return newRules;
}
function buildActions (dict, tokens) {
var actions = [dict.actionInclude || ''];
var tok;
var toks = {};
for (tok in tokens) {
toks[tokens[tok]] = tok;
}
this.rules = prepareRules(dict.rules, dict.macros, actions, tokens && toks);
var fun = actions.join("\n");
"yytext yyleng yylineno".split(' ').forEach(function (yy) {
fun = fun.replace(new RegExp("("+yy+")", "g"), "yy_.$1");
});
return Function("yy", "yy_", fun);
}
function RegExpLexer (dict, input, tokens) {
dict = dict || {};
this.performAction = buildActions.call(this, dict, tokens);
this.yy = {};
if (input) {
this.setInput(input);
}
}
RegExpLexer.prototype = {
EOF: '',
parseError: function parseError(str, hash) {
if (this.yy.parseError) {
this.yy.parseError(str, hash);
} else {
throw new Error(str);
}
},
// resets the lexer, sets new input
setInput: function (input) {
this._input = input;
this._more = this._less = this.done = false;
this.yylineno = this.yyleng = 0;
this.yytext = this.matched = this.match = '';
return this;
},
// consumes and returns one char from the input
input: function () {
var ch = this._input[0];
this.yytext+=ch;
this.yyleng++;
this.match+=ch;
this.matched+=ch;
var lines = ch.match(/\n/);
if (lines) this.yylineno++;
this._input = this._input.slice(1);
return ch;
},
// unshifts one char into the input
unput: function (ch) {
this._input = ch + this._input;
return this;
},
// When called from action, caches matched text and appends it on next action
more: function () {
this._more = true;
return this;
},
// displays upcoming input, i.e. for error messages
pastInput: function () {
var past = this.matched.substr(0, this.matched.length - this.match.length);
return (past.length > 20 ? '...':'') + past.substr(-20).replace(/\n/g, "");
},
// displays upcoming input, i.e. for error messages
upcomingInput: function () {
var next = this.match;
if (next.length < 20) {
next += this._input.substr(0, 20-next.length);
}
return (next.substr(0,20)+(next.length > 20 ? '...':'')).replace(/\n/g, "");
},
// displays upcoming input, i.e. for error messages
showPosition: function () {
var pre = this.pastInput();
var c = new Array(pre.length + 1).join("-");
return pre + this.upcomingInput() + "\n" + c+"^";
},
// return next match in input
next: function () {
if (this.done) {
return this.EOF;
}
if (!this._input) this.done = true;
var token,
match,
lines;
if (!this._more) {
this.yytext = '';
this.match = '';
}
for (var i=0;i < this.rules.length; i++) {
match = this._input.match(this.rules[i]);
if (match) {
lines = match[0].match(/\n/g);
if (lines) this.yylineno += lines.length;
this.yytext += match[0];
this.match += match[0];
this.matches = match;
this.yyleng = this.yytext.length;
this._more = false;
this._input = this._input.slice(match[0].length);
this.matched += match[0];
token = this.performAction.call(this, this.yy, this, i);
if (token) return token;
else return;
}
}
if (this._input == this.EOF) {
return this.EOF;
} else {
this.parseError('Lexical error on line '+(this.yylineno+1)+'. Unrecognized text.\n'+this.showPosition(),
{text: "", token: null, line: this.yylineno});
}
},
// return next match that has a token
lex: function () {
var r = this.next();
if (typeof r !== 'undefined') {
return r;
} else {
return this.lex();
}
},
generate: function generate(opt) {
var code = "";
if (opt.commonjs)
code = this.generateCommonJSModule(opt);
else
code = this.generateModule(opt);
return code;
},
generateModule: function generateModule(opt) {
opt = opt || {};
var out = "/* Jison generated lexer */",
moduleName = opt.moduleName || "lexer";
out += "\nvar "+moduleName+" = (function(){var lexer = ({";
var p = [];
for (var k in RegExpLexer.prototype)
if (RegExpLexer.prototype.hasOwnProperty(k) && k.indexOf("generate") === -1)
p.push(k + ":" + (RegExpLexer.prototype[k].toString() || '""'));
out += p.join(",\n");
out += "})";
out += ";\nlexer.performAction = "+String(this.performAction);
out += ";\nlexer.rules = "+uneval(this.rules);
out += ";return lexer;})()";
return out;
},
generateCommonJSModule: function generateCommonJSModule(opt) {
opt = opt || {};
var out = "/* Jison generated lexer as commonjs module */",
moduleName = opt.moduleName || "lexer";
out += this.generateModule(opt);
out += "\nexports.lexer = "+moduleName;
out += ";\nexports.lex = function () { return "+moduleName+".lex.apply(lexer, arguments); };";
return out;
}
};
return RegExpLexer;
})()
if (typeof exports !== 'undefined')
exports.RegExpLexer = RegExpLexer;

View file

@ -0,0 +1,390 @@
/* Jison generated parser */
var bnf = (function(){
var parser = {trace: function trace() {
},
yy: {},
symbols_: {"spec":2,"declaration_list":3,"%%":4,"grammar":5,"EOF":6,"declaration":7,"START":8,"id":9,"operator":10,"associativity":11,"token_list":12,"LEFT":13,"RIGHT":14,"NONASSOC":15,"symbol":16,"production_list":17,"production":18,":":19,"handle_list":20,";":21,"|":22,"handle_action":23,"handle":24,"action":25,"prec":26,"PREC":27,"STRING":28,"ID":29,"ACTION":30,"$accept":0,"$end":1},
terminals_: {"4":"%%","6":"EOF","8":"START","13":"LEFT","14":"RIGHT","15":"NONASSOC","19":":","21":";","22":"|","27":"PREC","28":"STRING","29":"ID","30":"ACTION"},
productions_: [0,[2,4],[2,5],[3,2],[3,0],[7,2],[7,1],[10,2],[11,1],[11,1],[11,1],[12,2],[12,1],[5,1],[17,2],[17,1],[18,4],[20,3],[20,1],[23,3],[24,2],[24,0],[26,2],[26,0],[16,1],[16,1],[9,1],[25,1],[25,0]],
performAction: function anonymous(yytext, yyleng, yylineno, yy) {
var $$ = arguments[5], $0 = arguments[5].length;
switch (arguments[4]) {
case 1:
this.$ = $$[$0 - 4 + 1 - 1];
this.$.bnf = $$[$0 - 4 + 3 - 1];
return this.$;
break;
case 2:
this.$ = $$[$0 - 5 + 1 - 1];
this.$.bnf = $$[$0 - 5 + 3 - 1];
return this.$;
break;
case 3:
this.$ = $$[$0 - 2 + 1 - 1];
yy.addDeclaration(this.$, $$[$0 - 2 + 2 - 1]);
break;
case 4:
this.$ = {};
break;
case 5:
this.$ = {start: $$[$0 - 2 + 2 - 1]};
break;
case 6:
this.$ = {operator: $$[$0 - 1 + 1 - 1]};
break;
case 7:
this.$ = [$$[$0 - 2 + 1 - 1]];
this.$.push.apply(this.$, $$[$0 - 2 + 2 - 1]);
break;
case 8:
this.$ = "left";
break;
case 9:
this.$ = "right";
break;
case 10:
this.$ = "nonassoc";
break;
case 11:
this.$ = $$[$0 - 2 + 1 - 1];
this.$.push($$[$0 - 2 + 2 - 1]);
break;
case 12:
this.$ = [$$[$0 - 1 + 1 - 1]];
break;
case 13:
this.$ = $$[$0 - 1 + 1 - 1];
break;
case 14:
this.$ = $$[$0 - 2 + 1 - 1];
this.$[$$[$0 - 2 + 2 - 1][0]] = $$[$0 - 2 + 2 - 1][1];
break;
case 15:
this.$ = {};
this.$[$$[$0 - 1 + 1 - 1][0]] = $$[$0 - 1 + 1 - 1][1];
break;
case 16:
this.$ = [$$[$0 - 4 + 1 - 1], $$[$0 - 4 + 3 - 1]];
break;
case 17:
this.$ = $$[$0 - 3 + 1 - 1];
this.$.push($$[$0 - 3 + 3 - 1]);
break;
case 18:
this.$ = [$$[$0 - 1 + 1 - 1]];
break;
case 19:
this.$ = [$$[$0 - 3 + 1 - 1].length ? $$[$0 - 3 + 1 - 1].join(" ") : ""];
if ($$[$0 - 3 + 2 - 1]) {
this.$.push($$[$0 - 3 + 2 - 1]);
}
if ($$[$0 - 3 + 3 - 1]) {
this.$.push($$[$0 - 3 + 3 - 1]);
}
if (this.$.length === 1) {
this.$ = this.$[0];
}
break;
case 20:
this.$ = $$[$0 - 2 + 1 - 1];
this.$.push($$[$0 - 2 + 2 - 1]);
break;
case 21:
this.$ = [];
break;
case 22:
this.$ = {prec: $$[$0 - 2 + 2 - 1]};
break;
case 23:
this.$ = null;
break;
case 24:
this.$ = $$[$0 - 1 + 1 - 1];
break;
case 25:
this.$ = yytext;
break;
case 26:
this.$ = yytext;
break;
case 27:
this.$ = yytext;
break;
case 28:
this.$ = "";
break;
default:;
}
},
table: [{"2":1,"3":2,"4":[[2,4]],"15":[[2,4]],"14":[[2,4]],"13":[[2,4]],"8":[[2,4]]},{"1":[[3]]},{"4":[[1,3]],"7":4,"8":[[1,5]],"10":6,"11":7,"13":[[1,8]],"14":[[1,9]],"15":[[1,10]]},{"5":11,"17":12,"18":13,"9":14,"29":[[1,15]]},{"4":[[2,3]],"15":[[2,3]],"14":[[2,3]],"13":[[2,3]],"8":[[2,3]]},{"9":16,"29":[[1,15]]},{"8":[[2,6]],"13":[[2,6]],"14":[[2,6]],"15":[[2,6]],"4":[[2,6]]},{"12":17,"16":18,"9":19,"28":[[1,20]],"29":[[1,15]]},{"29":[[2,8]],"28":[[2,8]]},{"29":[[2,9]],"28":[[2,9]]},{"29":[[2,10]],"28":[[2,10]]},{"6":[[1,21]],"4":[[1,22]]},{"18":23,"9":14,"29":[[1,15]],"6":[[2,13]],"4":[[2,13]]},{"4":[[2,15]],"6":[[2,15]],"29":[[2,15]]},{"19":[[1,24]]},{"19":[[2,26]],"4":[[2,26]],"15":[[2,26]],"14":[[2,26]],"13":[[2,26]],"8":[[2,26]],"28":[[2,26]],"29":[[2,26]],"21":[[2,26]],"22":[[2,26]],"27":[[2,26]],"30":[[2,26]]},{"8":[[2,5]],"13":[[2,5]],"14":[[2,5]],"15":[[2,5]],"4":[[2,5]]},{"16":25,"9":19,"28":[[1,20]],"29":[[1,15]],"4":[[2,7]],"15":[[2,7]],"14":[[2,7]],"13":[[2,7]],"8":[[2,7]]},{"8":[[2,12]],"13":[[2,12]],"14":[[2,12]],"15":[[2,12]],"4":[[2,12]],"28":[[2,12]],"29":[[2,12]]},{"29":[[2,24]],"28":[[2,24]],"4":[[2,24]],"15":[[2,24]],"14":[[2,24]],"13":[[2,24]],"8":[[2,24]],"30":[[2,24]],"27":[[2,24]],"22":[[2,24]],"21":[[2,24]]},{"29":[[2,25]],"28":[[2,25]],"4":[[2,25]],"15":[[2,25]],"14":[[2,25]],"13":[[2,25]],"8":[[2,25]],"30":[[2,25]],"27":[[2,25]],"22":[[2,25]],"21":[[2,25]]},{"1":[[2,1]]},{"6":[[1,26]]},{"4":[[2,14]],"6":[[2,14]],"29":[[2,14]]},{"20":27,"23":28,"24":29,"21":[[2,21]],"22":[[2,21]],"27":[[2,21]],"30":[[2,21]],"28":[[2,21]],"29":[[2,21]]},{"8":[[2,11]],"13":[[2,11]],"14":[[2,11]],"15":[[2,11]],"4":[[2,11]],"28":[[2,11]],"29":[[2,11]]},{"1":[[2,2]]},{"21":[[1,30]],"22":[[1,31]]},{"21":[[2,18]],"22":[[2,18]]},{"25":32,"16":33,"30":[[1,34]],"9":19,"28":[[1,20]],"29":[[1,15]],"21":[[2,28]],"22":[[2,28]],"27":[[2,28]]},{"29":[[2,16]],"6":[[2,16]],"4":[[2,16]]},{"23":35,"24":29,"21":[[2,21]],"22":[[2,21]],"27":[[2,21]],"30":[[2,21]],"28":[[2,21]],"29":[[2,21]]},{"26":36,"27":[[1,37]],"21":[[2,23]],"22":[[2,23]]},{"21":[[2,20]],"22":[[2,20]],"27":[[2,20]],"30":[[2,20]],"28":[[2,20]],"29":[[2,20]]},{"21":[[2,27]],"22":[[2,27]],"27":[[2,27]]},{"21":[[2,17]],"22":[[2,17]]},{"22":[[2,19]],"21":[[2,19]]},{"16":38,"9":19,"28":[[1,20]],"29":[[1,15]]},{"21":[[2,22]],"22":[[2,22]]}],
parseError: function parseError(str, hash) {
throw new Error(str);
},
parse: function parse(input) {
var self = this, stack = [0], vstack = [null], table = this.table, yytext = "", yylineno = 0, yyleng = 0, shifts = 0, reductions = 0;
this.lexer.setInput(input);
this.lexer.yy = this.yy;
var parseError = this.yy.parseError = this.yy.parseError || this.parseError;
function lex() {
var token;
token = self.lexer.lex() || 1;
if (typeof token !== "number") {
token = self.symbols_[token];
}
return token;
}
var symbol, state, action, a, r, yyval = {}, p, len, ip = 0, newState, expected;
symbol = lex();
while (true) {
this.trace("stack:", JSON.stringify(stack), "\n\t\t\tinput:", this.lexer._input);
this.trace("vstack:", JSON.stringify(vstack));
state = stack[stack.length - 1];
action = table[state] && table[state][symbol];
if (typeof action == "undefined" || !action.length || !action[0]) {
expected = [];
for (p in table[state]) {
if (this.terminals_[p] && p != 1) {
expected.push("'" + this.terminals_[p] + "'");
}
}
self.trace("stack:", JSON.stringify(stack), "symbol:", symbol, "input", this.lexer.upcomingInput());
if (this.lexer.upcomingInput) {
self.trace("input", this.lexer.upcomingInput());
}
parseError("Parse error on line " + (yylineno + 1) + ". Expecting: " + expected.join(", ") + "\n" + (this.lexer.showPosition && this.lexer.showPosition()), {text: this.lexer.match, token: symbol, line: this.lexer.yylineno});
}
this.trace("action:", action);
if (action.length > 1) {
throw new Error("Parse Error: multiple actions possible at state: " + state + ", token: " + symbol);
}
a = action[0];
switch (a[0]) {
case 1:
shifts++;
stack.push(symbol);
++ip;
yyleng = this.lexer.yyleng;
yytext = this.lexer.yytext;
yylineno = this.lexer.yylineno;
symbol = lex();
vstack.push(null);
stack.push(a[1]);
break;
case 2:
reductions++;
len = this.productions_[a[1]][1];
this.trace("reduce by: ", this.productions ? this.productions[a[1]] : a[1]);
yyval.$ = vstack[vstack.length - len];
r = this.performAction.call(yyval, yytext, yyleng, yylineno, this.yy, a[1], vstack);
if (typeof r !== "undefined") {
return r;
}
this.trace("yyval=", JSON.stringify(yyval.$));
if (len) {
this.trace("production length:", len);
stack = stack.slice(0, -1 * len * 2);
vstack = vstack.slice(0, -1 * len);
}
stack.push(this.productions_[a[1]][0]);
vstack.push(yyval.$);
newState = table[stack[stack.length - 2]][stack[stack.length - 1]];
stack.push(newState);
break;
case 3:
this.trace("stack:", stack, "\n\tinput:", this.lexer._input);
this.trace("vstack:", JSON.stringify(vstack));
this.trace("Total reductions:", reductions);
this.trace("Total shifts:", shifts);
return true;
default:;
}
}
return true;
}};/* Jison generated lexer */
var lexer = (function(){var lexer = ({EOF:"",
parseError:function parseError(str, hash) {
if (this.yy.parseError) {
this.yy.parseError(str, hash);
} else {
throw new Error(str);
}
},
setInput:function (input) {
this._input = input;
this._more = this._less = this.done = false;
this.yylineno = this.yyleng = 0;
this.yytext = this.matched = this.match = "";
return this;
},
input:function () {
var ch = this._input[0];
this.yytext += ch;
this.yyleng++;
this.match += ch;
this.matched += ch;
var lines = ch.match(/\n/);
if (lines) {
this.yylineno++;
}
this._input = this._input.slice(1);
return ch;
},
unput:function (ch) {
this._input = ch + this._input;
return this;
},
more:function () {
this._more = true;
return this;
},
pastInput:function () {
var past = this.matched.substr(0, this.matched.length - this.match.length);
return (past.length > 20 ? "..." : "") + past.substr(-20).replace(/\n/g, "");
},
upcomingInput:function () {
var next = this.match;
if (next.length < 20) {
next += this._input.substr(0, 20 - next.length);
}
return (next.substr(0, 20) + (next.length > 20 ? "..." : "")).replace(/\n/g, "");
},
showPosition:function () {
var pre = this.pastInput();
var c = (new Array(pre.length + 1)).join("-");
return pre + this.upcomingInput() + "\n" + c + "^";
},
next:function () {
if (this.done) {
return this.EOF;
}
if (!this._input) {
this.done = true;
}
var token, match, lines;
if (!this._more) {
this.yytext = "";
this.match = "";
}
for (var i = 0; i < this.rules.length; i++) {
match = this._input.match(this.rules[i]);
if (match) {
lines = match[0].match(/\n/g);
if (lines) {
this.yylineno += lines.length;
}
this.yytext += match[0];
this.match += match[0];
this.matches = match;
this.yyleng = this.yytext.length;
this._more = false;
this._input = this._input.slice(match[0].length);
this.matched += match[0];
token = this.performAction.call(this, this.yy, this, i);
if (token) {
return token;
} else {
return;
}
}
}
if (this._input == this.EOF) {
return this.EOF;
} else {
this.parseError("Lexical error on line " + (this.yylineno + 1) + ". Unrecognized text.\n" + this.showPosition(), {text: "", token: null, line: this.yylineno});
}
},
lex:function () {
var r = this.next();
if (typeof r !== "undefined") {
return r;
} else {
return this.lex();
}
}});
lexer.performAction = function anonymous(yy, yy_) {
switch (arguments[2]) {
case 0:
break;
case 1:
return yy.lexComment(this);
break;
case 2:
return 29;
break;
case 3:
yy_.yytext = yy_.yytext.substr(1, yy_.yyleng - 2);
return 28;
break;
case 4:
yy_.yytext = yy_.yytext.substr(1, yy_.yyleng - 2);
return 28;
break;
case 5:
return 19;
break;
case 6:
return 21;
break;
case 7:
return 22;
break;
case 8:
return 4;
break;
case 9:
return 27;
break;
case 10:
return 8;
break;
case 11:
return 13;
break;
case 12:
return 14;
break;
case 13:
return 15;
break;
case 14:
break;
case 15:
return yy.lexAction(this);
break;
case 16:
yy_.yytext = yy_.yytext.substr(1, yy_.yyleng - 2);
return 30;
break;
case 17:
yy_.yytext = yy_.yytext.substr(1, yy_.yyleng - 2);
return 30;
break;
case 18:
break;
case 19:
return 6;
break;
default:;
}
};
lexer.rules = [/^\s+/, /^\/\*[^*]*\*/, /^[a-zA-Z][a-zA-Z0-9_-]*/, /^"[^"]+"/, /^'[^']+'/, /^:/, /^;/, /^\|/, /^%%/, /^%prec\b/, /^%start\b/, /^%left\b/, /^%right\b/, /^%nonassoc\b/, /^%[a-zA-Z]+[^\n]*/, /^\{\{[^}]*\}/, /^\{[^}]*\}/, /^<[^>]*>/, /^./, /^$/];return lexer;})()
parser.lexer = lexer;
return parser;
})();
if (typeof require !== 'undefined') {
exports.parser = bnf;
exports.parse = function () { return bnf.parse.apply(bnf, arguments); }
exports.main = function commonjsMain(args) {
var cwd = require("file").path(require("file").cwd());
if (!args[1]) {
throw new Error("Usage: " + args[0] + " FILE");
}
var source = cwd.join(args[1]).read({charset: "utf-8"});
this.parse(source);
}
if (require.main === module) {
exports.main(require("system").args);
}
}

File diff suppressed because one or more lines are too long

94
vendor/jison/lib/jison/util/set.js vendored Normal file
View file

@ -0,0 +1,94 @@
// Set class to wrap arrays
if (typeof require !== 'undefined')
var typal = require("./typal").typal;
var setMixin = {
constructor: function Set_constructor (set, raw) {
this._items = [];
if (set && set.constructor === Array)
this._items = raw ? set: set.slice(0);
else if(arguments.length)
this._items = [].slice.call(arguments,0);
},
concat: function concat (setB) {
this._items.push.apply(this._items, setB._items || setB);
return this;
},
eq: function eq (set) {
return this._items.length === set._items.length && this.subset(set);
},
indexOf: function indexOf (item) {
if(item && item.eq) {
for(var k=0; k<this._items.length;k++)
if(item.eq(this._items[k]))
return k;
return -1;
}
return this._items.indexOf(item);
},
union: function union (set) {
return (new Set(this._items)).concat(this.complement(set));
},
intersection: function intersection (set) {
return this.filter(function (elm) {
return set.contains(elm);
});
},
complement: function complement (set) {
var that = this;
return set.filter(function sub_complement (elm) {
return !that.contains(elm);
});
},
subset: function subset (set) {
var cont = true;
for (var i=0; i<this._items.length && cont;i++) {
cont = cont && set.contains(this._items[i]);
}
return cont;
},
superset: function superset (set) {
return set.subset(this);
},
joinSet: function joinSet (set) {
return this.concat(this.complement(set));
},
contains: function contains (item) { return this.indexOf(item) !== -1; },
item: function item (v, val) { return this._items[v]; },
i: function i (v, val) { return this._items[v]; },
first: function first () { return this._items[0]; },
last: function last () { return this._items[this._items.length-1]; },
size: function size () { return this._items.length; },
isEmpty: function isEmpty () { return this._items.length === 0; },
copy: function copy () { return new Set(this._items); },
toString: function toString () { return this._items.toString(); }
};
"push shift unshift forEach some every join sort".split(' ').forEach(function (e,i) {
setMixin[e] = function () { return Array.prototype[e].apply(this._items, arguments); };
setMixin[e].name = e;
});
"filter slice map".split(' ').forEach(function (e,i) {
setMixin[e] = function () { return new Set(Array.prototype[e].apply(this._items, arguments), true); };
setMixin[e].name = e;
});
var Set = typal.construct(setMixin).mix({
union: function (a, b) {
var ar = {};
for (var k=a.length-1;k >=0;--k) {
ar[a[k]] = true;
}
for (var i=b.length-1;i >= 0;--i) {
if (!ar[b[i]]) {
a.push(b[i]);
}
}
return a;
}
});
if (typeof exports !== 'undefined')
exports.Set = Set;

90
vendor/jison/lib/jison/util/typal.js vendored Normal file
View file

@ -0,0 +1,90 @@
/*
* Introduces a typal object to make classical/prototypal patterns easier
* Plus some AOP sugar
*
* By Zachary Carter <zach@carter.name>
* MIT Licensed
* */
var typal = (function () {
var create = Object.create || function (o) { function F(){}; F.prototype = o; return new F(); };
var position = /^(before|after)/;
// basic method layering
// always returns original method's return value
function layerMethod(k, fun) {
var pos = k.match(position)[0],
key = k.replace(position, ''),
prop = this[key];
if (pos === 'after') {
this[key] = function () {
var ret = prop.apply(this, arguments);
var args = [].slice.call(arguments);
args.splice(0, 0, ret);
fun.apply(this, args);
return ret;
}
} else if (pos === 'before') {
this[key] = function () {
fun.apply(this, arguments);
var ret = prop.apply(this, arguments);
return ret;
}
}
}
// mixes each argument's own properties into calling object,
// overwriting them or layering them. i.e. an object method 'meth' is
// layered by mixin methods 'beforemeth' or 'aftermeth'
function typal_mix() {
var self = this;
for(var i=0,o,k; i<arguments.length; i++) {
o=arguments[i];
if (!o) continue;
if (Object.prototype.hasOwnProperty.call(o,'constructor'))
this.constructor = o.constructor;
if (Object.prototype.hasOwnProperty.call(o,'toString'))
this.toString = o.toString;
for(k in o) {
if (Object.prototype.hasOwnProperty.call(o, k)) {
if(k.match(position) && typeof this[k.replace(position, '')] === 'function')
layerMethod.call(this, k, o[k]);
else
this[k] = o[k];
}
}
}
return this;
}
return {
// extend object with own typalperties of each argument
mix: typal_mix,
// sugar for object begetting and mixing
// - Object.create(typal).mix(etc, etc);
// + typal.beget(etc, etc);
beget: function typal_beget() {
return arguments.length ? typal_mix.apply(create(this), arguments) : create(this);
},
// Creates a new Class function based on an object with a constructor method
construct: function typal_construct() {
var o = typal_mix.apply(create(this), arguments);
var constructor = o.constructor;
var Klass = o.constructor = function () { return constructor.apply(this, arguments); };
Klass.prototype = o;
Klass.mix = typal_mix; // allow for easy singleton property extension
return Klass;
},
// no op
constructor: function typal_constructor() { return this; }
};
})();
if (typeof exports !== 'undefined')
exports.typal = typal;

14
vendor/jison/package.json vendored Normal file
View file

@ -0,0 +1,14 @@
{
"name": "jison",
"author": "Zach Carter",
"email": "zach@carter.name",
"keywords": [
"jison",
"parser",
"lexer",
"compiler"
],
"githubName": "jison",
"type": "zip",
"location": "http://github.com/zaach/jison/zipball/master"
}

110
vendor/jison/src/bnf.jison vendored Normal file
View file

@ -0,0 +1,110 @@
%%
spec
: declaration_list '%%' grammar EOF
{$$ = $1; $$.bnf = $3; return $$;}
| declaration_list '%%' grammar '%%' EOF
{$$ = $1; $$.bnf = $3; return $$;}
;
declaration_list
: declaration_list declaration
{$$ = $1; yy.addDeclaration($$, $2);}
|
<$$ = {};>
;
declaration
: START id
<$$ = {start: $2};>
| operator
<$$ = {operator: $1};>
;
operator
: associativity token_list
{$$ = [$1]; $$.push.apply($$, $2);}
;
associativity
: LEFT
{$$ = 'left';}
| RIGHT
{$$ = 'right';}
| NONASSOC
{$$ = 'nonassoc';}
;
token_list
: token_list symbol
{$$ = $1; $$.push($2);}
| symbol
{$$ = [$1];}
;
grammar
: production_list
{$$ = $1;}
;
production_list
: production_list production
{$$ = $1; $$[$2[0]] = $2[1];}
| production
<$$ = {}; $$[$1[0]] = $1[1];>
;
production
: id ':' handle_list ';'
{$$ = [$1, $3];}
;
handle_list
: handle_list '|' handle_action
{$$ = $1; $$.push($3);}
| handle_action
{$$ = [$1];}
;
handle_action
: handle action prec
{$$ = [($1.length ? $1.join(' ') : '')];
if($2) $$.push($2);
if($3) $$.push($3);
if ($$.length === 1) $$ = $$[0];
}
;
handle
: handle symbol
{$$ = $1; $$.push($2)}
|
{$$ = [];}
;
prec
: PREC symbol
<$$ = {prec: $2};>
|
{$$ = null;}
;
symbol
: id
{$$ = $1;}
| STRING
{$$ = yytext;}
;
id
: ID
{$$ = yytext;}
;
action
: ACTION
{$$ = yytext;}
|
{$$ = '';}
;

25
vendor/jison/src/bnf.jisonlex vendored Normal file
View file

@ -0,0 +1,25 @@
%%
\s+ {/* skip whitespace */}
"/*"[^*]*"*" {return yy.lexComment(this);}
[a-zA-Z][a-zA-Z0-9_-]* {return 'ID';}
'"'[^"]+'"' {yytext = yytext.substr(1, yyleng-2); return 'STRING';}
"'"[^']+"'" {yytext = yytext.substr(1, yyleng-2); return 'STRING';}
":" {return ':';}
";" {return ';';}
"|" {return '|';}
"%%" {return '%%';}
"%prec" {return 'PREC';}
"%start" {return 'START';}
"%left" {return 'LEFT';}
"%right" {return 'RIGHT';}
"%nonassoc" {return 'NONASSOC';}
"%"[a-zA-Z]+[^\n]* {/* ignore unrecognized decl */}
"{{"[^}]*"}" {return yy.lexAction(this);}
"{"[^}]*"}" {yytext = yytext.substr(1, yyleng-2); return 'ACTION';}
"<"[^>]*">" {yytext = yytext.substr(1, yyleng-2); return 'ACTION';}
. {/* ignore bad characters */}
<<EOF>> {return 'EOF';}
%%

24
vendor/jison/src/bnf.lex.json vendored Normal file
View file

@ -0,0 +1,24 @@
{
"rules": [
["\\s+", "/* skip whitespace */"],
["\\/\\*[^*]*\\*", "return yy.lexComment(this);"],
["[a-zA-Z][a-zA-Z0-9_-]*", "return 'ID';"],
["\"[^\"]+\"", "yytext = yytext.substr(1, yyleng-2); return 'STRING';"],
["'[^']+'", "yytext = yytext.substr(1, yyleng-2); return 'STRING';"],
[":", "return ':';"],
[";", "return ';';"],
["\\|", "return '|';"],
["%%", "return '%%';"],
["%prec\\b", "return 'PREC';"],
["%start\\b", "return 'START';"],
["%left\\b", "return 'LEFT';"],
["%right\\b", "return 'RIGHT';"],
["%nonassoc\\b", "return 'NONASSOC';"],
["%[a-zA-Z]+[^\\n]*", "/* ignore unrecognized decl */"],
["\\{\\{[^}]*\\}", "return yy.lexAction(this);"],
["\\{[^}]*\\}", "yytext = yytext.substr(1, yyleng-2); return 'ACTION';"],
["<[^>]*>", "yytext = yytext.substr(1, yyleng-2); return 'ACTION';"],
[".", "/* ignore bad characters */"],
["$", "return 'EOF';"]
]
}

129
vendor/jison/src/jisonlex.jison vendored Normal file
View file

@ -0,0 +1,129 @@
/* Jison lexer file format grammar */
%nonassoc '/'
%left '*' '+' '?' RANGE_REGEX
%%
lex
: definitions include '%%' rules '%%' EOF
{{ $$ = {rules: $4};
if ($1.length) $$.macros = $1;
if ($2) $$.actionInclude = $2;
return $$; }}
| definitions include '%%' rules EOF
{{ $$ = {rules: $4};
if ($1.length) $$.macros = $1;
if ($2) $$.actionInclude = $2;
return $$; }}
;
include
: action
|
;
definitions
: definitions definition
{ $$ = $1; $$.push($2); }
|
{ $$ = []; }
;
definition
: name regex
{ $$ = [$1, $2]; }
;
name
: NAME
{ $$ = yytext; }
;
rules
: rules rule
{ $$ = $1; $$.push($2); }
| rule
{ $$ = [$1]; }
;
rule
: regex action
{ $$ = [$1, $2]; }
;
action
: ACTION
{ $$ = yytext; }
;
regex
: regex_list
{ $$ = $1;
if ($$.match(/[\w\d]$/))
$$ += "\\b";
}
;
regex_list
: regex_list '|' regex_list
{ $$ = $1+'|'+$3; }
| regex_concat
;
regex_concat
: regex_concat regex_base
{ $$ = $1+$2; }
| regex_base
;
regex_base
: '(' regex_list ')'
{ $$ = '('+$2+')'; }
| regex_base '+'
{ $$ = $1+'+'; }
| regex_base '*'
{ $$ = $1+'*'; }
| regex_base '?'
{ $$ = $1+'?'; }
| '/' regex_base
{ $$ = '(?='+$2+')'; }
| name_expansion
| regex_base range_regex
{ $$ = $1+$2; }
| any_group_regex
| '.'
{ $$ = '.'; }
| '^'
{ $$ = '^'; }
| '$'
{ $$ = '$'; }
| string
| escape_char
;
name_expansion
: '{' name '}'
{{ $$ = '{'+$2+'}'; }}
;
any_group_regex
: ANY_GROUP_REGEX
{ $$ = yytext; }
;
escape_char
: ESCAPE_CHAR
{ $$ = yytext; }
;
range_regex
: RANGE_REGEX
{ $$ = yytext; }
;
string
: STRING_LIT
{ $$ = yy.prepareString(yytext.substr(1, yytext.length-2)); }
;

31
vendor/jison/src/jisonlex.jisonlex vendored Normal file
View file

@ -0,0 +1,31 @@
%%
\n+ {yy.freshLine = true;}
\s+ {if (yy.ruleSection) yy.freshLine = false;}
"y{"[^}]*"}" {yytext = yytext.substr(2, yytext.length-3);return 'ACTION';}
[a-zA-Z_][a-zA-Z0-9_-]* {return 'NAME';}
'"'("\\\\"|'\"'|[^"])*'"' {yytext = yytext.replace(/\\"/g,'"');return 'STRING_LIT';}
"'"("\\\\"|"\'"|[^'])*"'" {yytext = yytext.replace(/\\'/g,"'");return 'STRING_LIT';}
"|" {return '|';}
"["("\]"|[^\]])*"]" {return 'ANY_GROUP_REGEX';}
"(" {return '(';}
")" {return ')';}
"+" {return '+';}
"*" {return '*';}
"?" {return '?';}
"^" {return '^';}
"/" {return '/';}
"\\"[a-zA-Z0] {return 'ESCAPE_CHAR';}
"$" {return '$';}
"<<EOF>>" {return '$';}
"." {return '.';}
"%%" {yy.ruleSection = true; return '%%';}
"{"\d+(","\s?\d+|",")?"}" {return 'RANGE_REGEX';}
/"{" %{if (yy.freshLine) {this.input('{');return '{';} else this.unput('y');%}
"}" %{return '}';%}
"%{"(.|\n)*?"%}" {yytext = yytext.substr(2, yytext.length-4);return 'ACTION';}
. {/* ignore bad characters */}
<<EOF>> {return 'EOF';}
%%

30
vendor/jison/src/jisonlex.lex.json vendored Normal file
View file

@ -0,0 +1,30 @@
{
"rules": [
["\\n+", "yy.freshLine = true;"],
["\\s+", "yy.freshLine = false;"],
["y\\{[^}]*\\}", "yytext = yytext.substr(2, yytext.length-3);return 'ACTION';"],
["[a-zA-Z_][a-zA-Z0-9_-]*", "return 'NAME';"],
["\"(?:[^\"]|\\\\\")*\"", "return 'STRING_LIT';"],
["'(?:[^']|\\\\')*'", "return 'STRING_LIT';"],
["\\|", "return '|';"],
["\\[(?:\\\\\\]|[^\\]])*\\]", "return 'ANY_GROUP_REGEX';"],
["\\(", "return '(';"],
["\\)", "return ')';"],
["\\+", "return '+';"],
["\\*", "return '*';"],
["\\?", "return '?';"],
["\\^", "return '^';"],
["\\/", "return '/';"],
["\\\\[a-zA-Z0]", "return 'ESCAPE_CHAR';"],
["\\$", "return '$';"],
["<<EOF>>", "return '$';"],
["\\.", "return '.';"],
["%%", "return '%%';"],
["\\{\\d+(?:,\\s?\\d+|,)?\\}", "return 'RANGE_REGEX';"],
["(?=\\{)", "if(yy.freshLine){this.input('{');return '{';} else this.unput('y');"],
["\\}", "return '}';"],
["%\\{(?:.|\\n)*?\\}%", "yytext = yytext.substr(2, yytext.length-4);return 'ACTION';"],
[".", "/* ignore bad characters */"],
["$", "return 'EOF';"]
]
}

8
vendor/jison/tests/all-tests.js vendored Executable file
View file

@ -0,0 +1,8 @@
#!/usr/bin/env narwhal
exports.testParser = require("./parser/parser-tests");
exports.testLexer = require("./lexer/lexer-tests");
exports.testGrammar = require("./grammar/grammar-tests");
if (require.main === module)
require("os").exit(require("test").run(exports));

91
vendor/jison/tests/grammar/bnf.js vendored Normal file
View file

@ -0,0 +1,91 @@
var Jison = require("../setup").Jison,
Lexer = require("../setup").Lexer,
assert = require("assert");
exports["test BNF parser"] = function () {
var grammar = {
"lex": {
"rules": [
["\\s+", "/* skip whitespace */"],
["[a-zA-Z][a-zA-Z0-9_-]*", "return 'ID';"],
["\"[^\"]+\"", "yytext = yytext.substr(1, yyleng-2); return 'STRING';"],
["'[^']+'", "yytext = yytext.substr(1, yyleng-2); return 'STRING';"],
[":", "return ':';"],
[";", "return ';';"],
["\\|", "return '|';"],
["%%", "return '%%';"],
["%prec", "return 'PREC';"],
["%start", "return 'START';"],
["%left", "return 'LEFT';"],
["%right", "return 'RIGHT';"],
["%nonassoc", "return 'NONASSOC';"],
["\\{[^}]*\\}", "yytext = yytext.substr(1, yyleng-2); return 'ACTION';"],
[".", "/* ignore bad characters */"],
["$", "return 'EOF';"]
]
},
"bnf": {
"spec" :[[ "declaration_list %% grammar EOF", "$$ = $1; $$.bnf = $3; return $$;" ]],
"declaration_list" :[[ "declaration_list declaration", "$$ = $1; yy.addDeclaration($$, $2);" ],
[ "", "$$ = {};" ]],
"declaration" :[[ "START id", "$$ = {start: $2};" ],
[ "operator", "$$ = {operator: $1};" ]],
"operator" :[[ "associativity token_list", "$$ = [$1]; $$.push.apply($$, $2);" ]],
"associativity" :[[ "LEFT", "$$ = 'left';" ],
[ "RIGHT", "$$ = 'right';" ],
[ "NONASSOC", "$$ = 'nonassoc';" ]],
"token_list" :[[ "token_list symbol", "$$ = $1; $$.push($2);" ],
[ "symbol", "$$ = [$1];" ]],
"grammar" :[[ "production_list", "$$ = $1;" ]],
"production_list" :[[ "production_list production", "$$ = $1; $$[$2[0]] = $2[1];" ],
[ "production", "$$ = {}; $$[$1[0]] = $1[1];" ]],
"production" :[[ "id : handle_list ;", "$$ = [$1, $3];" ]],
"handle_list" :[[ "handle_list | handle_action", "$$ = $1; $$.push($3);" ],
[ "handle_action", "$$ = [$1];" ]],
"handle_action" :[[ "handle action prec", "$$ = [($1.length ? $1.join(' ') : '')]; if($2) $$.push($2); if($3) $$.push($3); if ($$.length === 1) $$ = $$[0];" ]],
"handle" :[[ "handle symbol", "$$ = $1; $$.push($2)" ],
[ "", "$$ = [];" ]],
"prec" :[[ "PREC symbol", "$$ = {prec: $2};" ],
[ "", "$$ = null;" ]],
"symbol" :[[ "id", "$$ = $1;" ],
[ "STRING", "$$ = yytext;" ]],
"id" :[[ "ID", "$$ = yytext;" ]],
"action" :[[ "ACTION", "$$ = yytext;" ],
[ "", "$$ = '';" ]]
}
};
var parser = new Jison.Parser(grammar);
parser.yy.addDeclaration = function (grammar, decl) {
if (decl.start) {
grammar.start = decl.start
}
if (decl.operator) {
if (!grammar.operators) {
grammar.operators = [];
}
grammar.operators.push(decl.operator);
}
};
var result = parser.parse('%start foo %left "+" "-" %right "*" "/" %nonassoc "=" STUFF %left UMINUS %% foo : bar baz blitz { stuff } %prec GEMINI | bar %prec UMINUS | ;\nbar: { things };\nbaz: | foo ;');
assert.ok(result, "parse bnf production");
};

65
vendor/jison/tests/grammar/bnf_parse.js vendored Normal file
View file

@ -0,0 +1,65 @@
var assert = require("assert"),
bnf = require("../../lib/jison/bnf"),
json2jison = require("../../lib/jison/json2jison");
exports["test basic grammar"] = function () {
var grammar = "%% test: foo bar | baz ; hello: world ;";
var expected = {bnf: {test: ["foo bar", "baz"], hello: ["world"]}};
assert.deepEqual(bnf.parse(grammar), expected, "grammar should be parsed correctly");
};
exports["test classy grammar"] = function () {
var grammar = "%%\n\npgm \n: cdl MAIN LBRACE vdl el RBRACE ENDOFFILE \n; cdl \n: c cdl \n| \n;";
var expected = {bnf: {pgm: ["cdl MAIN LBRACE vdl el RBRACE ENDOFFILE"], cdl: ["c cdl", ""]}};
assert.deepEqual(bnf.parse(grammar), expected, "grammar should be parsed correctly");
};
exports["test advanced grammar"] = function () {
var grammar = "%% test: foo bar {action} | baz ; hello: world %prec UMINUS ;extra: foo {action} %prec '-' ;";
var expected = {bnf: {test: [["foo bar", "action" ], "baz"], hello: [[ "world", {prec:"UMINUS"} ]], extra: [[ "foo", "action", {prec: "-"} ]]}};
assert.deepEqual(bnf.parse(grammar), expected, "grammar should be parsed correctly");
};
exports["test nullable rule"] = function () {
var grammar = "%% test: foo bar | ; hello: world ;";
var expected = {bnf: {test: ["foo bar", ""], hello: ["world"]}};
assert.deepEqual(bnf.parse(grammar), expected, "grammar should be parsed correctly");
};
exports["test nullable rule with action"] = function () {
var grammar = "%% test: foo bar | {action}; hello: world ;";
var expected = {bnf: {test: ["foo bar", [ "", "action" ]], hello: ["world"]}};
assert.deepEqual(bnf.parse(grammar), expected, "grammar should be parsed correctly");
};
exports["test nullable rule with < > delimited action"] = function () {
var grammar = "%% test: foo bar | <action{}>; hello: world ;";
var expected = {bnf: {test: ["foo bar", [ "", "action{}" ]], hello: ["world"]}};
assert.deepEqual(bnf.parse(grammar), expected, "grammar should be parsed correctly");
};
exports["test nullable rule with {{ }} delimited action"] = function () {
var grammar = "%% test: foo bar | {{action{};}}; hello: world ;";
var expected = {bnf: {test: ["foo bar", [ "", "action{};" ]], hello: ["world"]}};
assert.deepEqual(bnf.parse(grammar), expected, "grammar should be parsed correctly");
};
exports["test comment"] = function () {
var grammar = "/* comment */ %% hello: world ;";
var expected = {bnf: {hello: ["world"]}};
assert.deepEqual(bnf.parse(grammar), expected, "grammar should be parsed correctly");
};
exports["test comment with nested *"] = function () {
var grammar = "/* comment * not done */ %% hello: /* oh hai */ world ;";
var expected = {bnf: {hello: ["world"]}};
assert.deepEqual(bnf.parse(grammar), expected, "grammar should be parsed correctly");
};

10
vendor/jison/tests/grammar/grammar-tests.js vendored Executable file
View file

@ -0,0 +1,10 @@
#!/usr/bin/env narwhal
//exports.testBNF = require("./bnf");
exports.testBNFParse = require("./bnf_parse");
exports.testConvert = require("./json2jison");
//exports.testLex = require("./lex");
exports.testLexParse = require("./lex_parse");
if (require.main === module)
require("os").exit(require("test").run(exports));

View file

@ -0,0 +1,24 @@
var assert = require("assert"),
bnf = require("../../lib/jison/bnf");
json2jison = require("../../lib/jison/json2jison");
exports["test basic grammar"] = function () {
var grammar = "%% test: foo bar | baz ; hello: world ;";
var expected = {bnf: {test: ["foo bar", "baz"], hello: ["world"]}};
assert.deepEqual(json2jison.convert(bnf.parse(grammar)), json2jison.convert(expected), "grammar should be parsed correctly");
};
exports["test advanced grammar"] = function () {
var grammar = "%start foo %% test: foo bar | baz ; hello: world {action} %prec UM;";
var expected = {start: "foo", bnf: {test: ["foo bar", "baz"], hello: [[ "world", "action", {prec: "UM"} ]]}};
assert.deepEqual(json2jison.convert(bnf.parse(grammar)), json2jison.convert(expected), "grammar should be parsed correctly");
};
exports["test actions"] = function () {
var grammar = "%start foo %% test: foo bar | baz ; hello: world {{action{} }} %prec UM;";
var expected = {start: "foo", bnf: {test: ["foo bar", "baz"], hello: [[ "world", "action{}", {prec: "UM"} ]]}};
assert.deepEqual(json2jison.convert(bnf.parse(grammar)), json2jison.convert(expected), "grammar should be parsed correctly");
};

119
vendor/jison/tests/grammar/lex.jison vendored Normal file
View file

@ -0,0 +1,119 @@
%%
lex
: definitions include '%%' rules '%%' EOF
{{ $$ = {macros: $1, rules: $4};
if ($2) $$.actionInclude = $2;
return $$; }}
| definitions include '%%' rules EOF
{{ $$ = {macros: $1, rules: $4};
if ($2) $$.actionInclude = $2;
return $$; }}
;
include
: action
|
;
definitions
: definitions definition
{ $$ = $1; $$.concat($2); }
| definition
{ $$ = [$1]; }
;
definition
: name regex
{ $$ = [$1, $2]; }
;
name
: NAME
{ $$ = yytext; }
;
rules
: rules rule
{ $$ = $1; $$.push($2); }
| rule
{ $$ = [$1]; }
;
rule
: regex action
{ $$ = [$1, $2]; }
;
action
: ACTION
{ $$ = yytext; }
;
regex
: start_caret regex_list end_dollar
{ $$ = $1+$2+$3; }
;
start_caret
: '^'
{ $$ = '^'; }
|
{ $$ = ''; }
;
end_dollar
: '$'
{ $$ = '$'; }
|
{ $$ = ''; }
;
regex_list
: regex_list '|' regex_list
{ $$ = $1+'|'+$3; }
| regex_list regex_base
{ $$ = $1+$2;}
| regex_base
{ $$ = $1;}
;
regex_base
: '(' regex_list ')'
{ $$ = '('+$2+')'; }
| regex_base '+'
{ $$ = $1+'+'; }
| regex_base '*'
{ $$ = $1+'*'; }
| regex_base '?'
{ $$ = $1+'?'; }
| '/' regex_base
{ $$ = '(?='+$2+')'; }
| name_expansion
| regex_base range_regex
{ $$ = $1+$2; }
| any_group_regex
| '.'
{ $$ = '.'; }
| string
;
name_expansion
: '{' name '}'
{{ $$ = '{'+$2+'}'; }}
;
any_group_regex
: ANY_GROUP_REGEX
{ $$ = yytext; }
;
range_regex
: RANGE_REGEX
{ $$ = yytext; }
;
string
: STRING_LIT
{ $$ = yy.prepareString(yytext.substr(1, yytext.length-2)); }
;

58
vendor/jison/tests/grammar/lex.js vendored Normal file
View file

@ -0,0 +1,58 @@
var Jison = require("../setup").Jison,
Lexer = require("../setup").Lexer,
bnf = require("../../lib/jison/bnf"),
assert = require("assert");
exports["test Lex parser"] = function () {
var lex = {
"rules": [
["\\n+", "yy.freshLine = true;"],
["\\s+", "yy.freshLine = false;"],
["y\\{[^}]*\\}", "yytext = yytext.substr(2, yytext.length-3);return 'ACTION';"],
["[a-zA-Z_][a-zA-Z0-9_-]*", "return 'NAME';"],
["\"(?:[^\"]|\\\\\")*\"", "return 'STRING_LIT';"],
["'(?:[^']|\\\\')*'", "return 'STRING_LIT';"],
["\\|", "return '|';"],
["\\[(?:[^\\]]|\\\\])*\\]", "return 'ANY_GROUP_REGEX';"],
["\\(", "return '(';"],
["\\)", "return ')';"],
["\\+", "return '+';"],
["\\*", "return '*';"],
["\\?", "return '?';"],
["\\^", "return '^';"],
["\\/", "return '/';"],
["\\$", "return '$';"],
["%%", "return '%%';"],
["\\{\\d+(?:,\\s?\\d+\\|,)?\\}", "return 'RANGE_REGEX';"],
["(?=\\{)", "if(yy.freshLine){this.input('{');return '{';} else this.unput('y');"],
["\\}", "return '}';"],
["%\\{(?:.|\\n)*?\\}%", "yytext = yytext.substr(2, yytext.length-4);return 'ACTION';"],
[".", "/* ignore bad characters */"],
["$", "return 'EOF';"]
]
};
var fs = require("file");
var grammar = bnf.parse(fs.path(fs.dirname(module.id))
.join('lex.jison')
.read({charset: "utf-8"}));
var parser = new Jison.Parser(grammar);
parser.lexer = new Lexer(lex);
function encodeRE (s) { return s.replace(/([.*+?^${}()|[\]\/\\])/g, '\\$1'); }
parser.yy = {
prepareString: function (s) {
s = encodeRE(s);
if (s.match(/\w|\d$/)) {
s = s+"\\b";
}
return s;
}
};
var result = parser.parse('D [0-9]\nID [a-zA-Z][a-zA-Z0-9]+\n%%\n\n{D}"ohh\nai" {print(9);}\n"}" {stuff}');
assert.ok(result, "parse bnf production");
};

View file

@ -0,0 +1,115 @@
D [0-9]
L [a-zA-Z_]
H [a-fA-F0-9]
E [Ee][+-]?{D}+
FS [fFlL]
IS [uUlL]*
%{
#include <stdio.h>
#include "y.tab.h"
void count();
%}
%%
"/*" { comment(); }
"auto" { count(); return(AUTO); }
"break" { count(); return(BREAK); }
"case" { count(); return(CASE); }
"char" { count(); return(CHAR); }
"const" { count(); return(CONST); }
"continue" { count(); return(CONTINUE); }
"default" { count(); return(DEFAULT); }
"do" { count(); return(DO); }
"double" { count(); return(DOUBLE); }
"else" { count(); return(ELSE); }
"enum" { count(); return(ENUM); }
"extern" { count(); return(EXTERN); }
"float" { count(); return(FLOAT); }
"for" { count(); return(FOR); }
"goto" { count(); return(GOTO); }
"if" { count(); return(IF); }
"int" { count(); return(INT); }
"long" { count(); return(LONG); }
"register" { count(); return(REGISTER); }
"return" { count(); return(RETURN); }
"short" { count(); return(SHORT); }
"signed" { count(); return(SIGNED); }
"sizeof" { count(); return(SIZEOF); }
"static" { count(); return(STATIC); }
"struct" { count(); return(STRUCT); }
"switch" { count(); return(SWITCH); }
"typedef" { count(); return(TYPEDEF); }
"union" { count(); return(UNION); }
"unsigned" { count(); return(UNSIGNED); }
"void" { count(); return(VOID); }
"volatile" { count(); return(VOLATILE); }
"while" { count(); return(WHILE); }
{L}({L}|{D})* { count(); return(check_type()); }
"0"[xX]{H}+{IS}? { count(); return(CONSTANT); }
"0"{D}+{IS}? { count(); return(CONSTANT); }
{D}+{IS}? { count(); return(CONSTANT); }
"L"?"'"("\'"|[^'])+"'" { count(); return(CONSTANT); }
{D}+{E}{FS}? { count(); return(CONSTANT); }
{D}*"."{D}+({E})?{FS}? { count(); return(CONSTANT); }
{D}+"."{D}*({E})?{FS}? { count(); return(CONSTANT); }
"L"?'"'('\"'|[^"])*'"' { count(); return(STRING_LITERAL); }
"..." { count(); return(ELLIPSIS); }
">>=" { count(); return(RIGHT_ASSIGN); }
"<<=" { count(); return(LEFT_ASSIGN); }
"+=" { count(); return(ADD_ASSIGN); }
"-=" { count(); return(SUB_ASSIGN); }
"*=" { count(); return(MUL_ASSIGN); }
"/=" { count(); return(DIV_ASSIGN); }
"%=" { count(); return(MOD_ASSIGN); }
"&=" { count(); return(AND_ASSIGN); }
"^=" { count(); return(XOR_ASSIGN); }
"|=" { count(); return(OR_ASSIGN); }
">>" { count(); return(RIGHT_OP); }
"<<" { count(); return(LEFT_OP); }
"++" { count(); return(INC_OP); }
"--" { count(); return(DEC_OP); }
"->" { count(); return(PTR_OP); }
"&&" { count(); return(AND_OP); }
"||" { count(); return(OR_OP); }
"<=" { count(); return(LE_OP); }
">=" { count(); return(GE_OP); }
"==" { count(); return(EQ_OP); }
"!=" { count(); return(NE_OP); }
";" { count(); return(';'); }
("{"|"<%") { count(); return('{'); }
("}"|"%>") %{ count(); return('}'); %}
"," { count(); return(','); }
":" { count(); return(':'); }
"=" { count(); return('='); }
"(" { count(); return('('); }
")" { count(); return(')'); }
("["|"<:") { count(); return('['); }
("]"|":>") { count(); return(']'); }
"." { count(); return('.'); }
"&" { count(); return('&'); }
"!" { count(); return('!'); }
"~" { count(); return('~'); }
"-" { count(); return('-'); }
"+" { count(); return('+'); }
"*" { count(); return('*'); }
"/" { count(); return('/'); }
"%" { count(); return('%'); }
"<" { count(); return('<'); }
">" { count(); return('>'); }
"^" { count(); return('^'); }
"|" { count(); return('|'); }
"?" { count(); return('?'); }
[ \t\v\n\f] { count(); }
. { /* ignore bad characters */ }
%%

View file

@ -0,0 +1,25 @@
%%
\s+ {/* skip whitespace */}
"/*"[^*]*"*" {return yy.lexComment(this);}
[a-zA-Z][a-zA-Z0-9_-]* {return 'ID';}
'"'[^"]+'"' {yytext = yytext.substr(1, yyleng-2); return 'STRING';}
"'"[^']+"'" {yytext = yytext.substr(1, yyleng-2); return 'STRING';}
":" {return ':';}
";" {return ';';}
"|" {return '|';}
"%%" {return '%%';}
"%prec" {return 'PREC';}
"%start" {return 'START';}
"%left" {return 'LEFT';}
"%right" {return 'RIGHT';}
"%nonassoc" {return 'NONASSOC';}
"%"[a-zA-Z]+[^\n]* {/* ignore unrecognized decl */}
"{{"[^}]*"}" {return yy.lexAction(this);}
"{"[^}]*"}" {yytext = yytext.substr(1, yyleng-2); return 'ACTION';}
"<"[^>]*">" {yytext = yytext.substr(1, yyleng-2); return 'ACTION';}
. {/* ignore bad characters */}
<<EOF>> {return 'EOF';}
%%

View file

@ -0,0 +1,24 @@
{
"rules": [
["\\s+", "/* skip whitespace */"],
["\\/\\*[^*]*\\*", "return yy.lexComment(this);"],
["[a-zA-Z][a-zA-Z0-9_-]*", "return 'ID';"],
["\"[^\"]+\"", "yytext = yytext.substr(1, yyleng-2); return 'STRING';"],
["'[^']+'", "yytext = yytext.substr(1, yyleng-2); return 'STRING';"],
[":", "return ':';"],
[";", "return ';';"],
["\\|", "return '|';"],
["%%", "return '%%';"],
["%prec\\b", "return 'PREC';"],
["%start\\b", "return 'START';"],
["%left\\b", "return 'LEFT';"],
["%right\\b", "return 'RIGHT';"],
["%nonassoc\\b", "return 'NONASSOC';"],
["%[a-zA-Z]+[^\\n]*", "/* ignore unrecognized decl */"],
["\\{\\{[^}]*\\}", "return yy.lexAction(this);"],
["\\{[^}]*\\}", "yytext = yytext.substr(1, yyleng-2); return 'ACTION';"],
["<[^>]*>", "yytext = yytext.substr(1, yyleng-2); return 'ACTION';"],
[".", "/* ignore bad characters */"],
["$", "return 'EOF';"]
]
}

View file

@ -0,0 +1,31 @@
%%
\n+ {yy.freshLine = true;}
\s+ {yy.freshLine = false;}
"y{"[^}]*"}" {yytext = yytext.substr(2, yytext.length-3);return 'ACTION';}
[a-zA-Z_][a-zA-Z0-9_-]* {return 'NAME';}
'"'([^"]|'\"')*'"' {return 'STRING_LIT';}
"'"([^']|"\'")*"'" {return 'STRING_LIT';}
"|" {return '|';}
"["("\]"|[^\]])*"]" {return 'ANY_GROUP_REGEX';}
"(" {return '(';}
")" {return ')';}
"+" {return '+';}
"*" {return '*';}
"?" {return '?';}
"^" {return '^';}
"/" {return '/';}
"\\"[a-zA-Z0] {return 'ESCAPE_CHAR';}
"$" {return '$';}
"<<EOF>>" {return '$';}
"." {return '.';}
"%%" {return '%%';}
"{"\d+(","\s?\d+|",")?"}" {return 'RANGE_REGEX';}
/"{" %{if(yy.freshLine){this.input('{');return '{';} else this.unput('y');%}
"}" %{return '}';%}
"%{"(.|\n)*?"}%" {yytext = yytext.substr(2, yytext.length-4);return 'ACTION';}
. {/* ignore bad characters */}
<<EOF>> {return 'EOF';}
%%

View file

@ -0,0 +1,30 @@
{
"rules": [
["\\n+", "yy.freshLine = true;"],
["\\s+", "yy.freshLine = false;"],
["y\\{[^}]*\\}", "yytext = yytext.substr(2, yytext.length-3);return 'ACTION';"],
["[a-zA-Z_][a-zA-Z0-9_-]*", "return 'NAME';"],
["\"([^\"]|\\\\\")*\"", "return 'STRING_LIT';"],
["'([^']|\\\\')*'", "return 'STRING_LIT';"],
["\\|", "return '|';"],
["\\[(\\\\\\]|[^\\]])*\\]", "return 'ANY_GROUP_REGEX';"],
["\\(", "return '(';"],
["\\)", "return ')';"],
["\\+", "return '+';"],
["\\*", "return '*';"],
["\\?", "return '?';"],
["\\^", "return '^';"],
["\\/", "return '/';"],
["\\\\[a-zA-Z0]", "return 'ESCAPE_CHAR';"],
["\\$", "return '$';"],
["<<EOF>>", "return '$';"],
["\\.", "return '.';"],
["%%", "return '%%';"],
["\\{\\d+(,\\s?\\d+|,)?\\}", "return 'RANGE_REGEX';"],
["(?=\\{)", "if(yy.freshLine){this.input('{');return '{';} else this.unput('y');"],
["\\}", "return '}';"],
["%\\{(.|\\n)*?\\}%", "yytext = yytext.substr(2, yytext.length-4);return 'ACTION';"],
[".", "/* ignore bad characters */"],
["$", "return 'EOF';"]
]
}

117
vendor/jison/tests/grammar/lex_parse.js vendored Normal file
View file

@ -0,0 +1,117 @@
var assert = require("assert"),
lex = require("../../lib/jison/jisonlex");
exports["test lex grammar with macros"] = function () {
var lexgrammar = 'D [0-9]\nID [a-zA-Z][a-zA-Z0-9]+\n%%\n\n{D}"ohhai" {print(9);}\n"{" {return \'{\';}';
var expected = {
macros: [["D", "[0-9]"], ["ID", "[a-zA-Z][a-zA-Z0-9]+"]],
rules: [
["{D}ohhai\\b", "print(9);"],
["\\{", "return '{';"]
]
};
assert.deepEqual(lex.parse(lexgrammar), expected, "grammar should be parsed correctly");
};
exports["test excaped chars"] = function () {
var lexgrammar = '%%\n"\\n"+ {return \'NL\';}\n\\n+ {return \'NL2\';}\n\\s+ {/* skip */}';
var expected = {
rules: [
["\\\\n+", "return 'NL';"],
["\\n+", "return 'NL2';"],
["\\s+", "/* skip */"]
]
};
assert.deepEqual(lex.parse(lexgrammar), expected, "grammar should be parsed correctly");
};
exports["test advanced"] = function () {
var lexgrammar = '%%\n$ {return \'EOF\';}\n. {/* skip */}\n"stuff"*/("{"|";") {/* ok */}\n(.+)[a-z]{1,2}"hi"*? {/* skip */}\n';
var expected = {
rules: [
["$", "return 'EOF';"],
[".", "/* skip */"],
["stuff*(?=(\\{|;))", "/* ok */"],
["(.+)[a-z]{1,2}hi*?", "/* skip */"]
]
};
assert.deepEqual(lex.parse(lexgrammar), expected, "grammar should be parsed correctly");
};
exports["test [^\]]"] = function () {
var lexgrammar = '%%\n"["[^\\]]"]" {return true;}\n\'f"oo\\\'bar\' {return \'baz2\';}\n"fo\\"obar" {return \'baz\';}\n';
var expected = {
rules: [
["\\[[^\\]]\\]", "return true;"],
["f\"oo'bar\\b", "return 'baz2';"],
['fo"obar\\b', "return 'baz';"]
]
};
assert.deepEqual(lex.parse(lexgrammar), expected, "grammar should be parsed correctly");
};
exports["test multiline action"] = function () {
var lexgrammar = '%%\n"["[^\\]]"]" %{\nreturn true;\n%}\n';
var expected = {
rules: [
["\\[[^\\]]\\]", "\nreturn true;\n"]
]
};
assert.deepEqual(lex.parse(lexgrammar), expected, "grammar should be parsed correctly");
};
exports["test include"] = function () {
var lexgrammar = '\nRULE [0-9]\n\n%{\n hi <stuff> \n%}\n%%\n"["[^\\]]"]" %{\nreturn true;\n%}\n';
var expected = {
macros: [["RULE", "[0-9]"]],
actionInclude: "\n hi <stuff> \n",
rules: [
["\\[[^\\]]\\]", "\nreturn true;\n"]
]
};
assert.deepEqual(lex.parse(lexgrammar), expected, "grammar should be parsed correctly");
};
exports["test bnf lex grammar"] = function () {
var fs = require("file");
var lexgrammar = lex.parse(fs.path(fs.dirname(module.id))
.join('lex', 'bnf.jisonlex')
.read({charset: "utf-8"}));
var expected = JSON.parse(fs.path(fs.dirname(module.id))
.join('lex', 'bnf.lex.json')
.read({charset: "utf-8"}));
assert.deepEqual(lexgrammar, expected, "grammar should be parsed correctly");
};
exports["test lex grammar bootstrap"] = function () {
var fs = require("file");
var lexgrammar = lex.parse(fs.path(fs.dirname(module.id))
.join('lex', 'lex_grammar.jisonlex')
.read({charset: "utf-8"}));
var expected = JSON.parse(fs.path(fs.dirname(module.id))
.join('lex', 'lex_grammar.lex.json')
.read({charset: "utf-8"}));
assert.deepEqual(lexgrammar, expected, "grammar should be parsed correctly");
};
exports["test ANSI C lexical grammar"] = function () {
var fs = require("file");
var lexgrammar = lex.parse(fs.path(fs.dirname(module.id))
.join('lex', 'ansic.jisonlex')
.read({charset: "utf-8"}));
assert.ok(lexgrammar, "grammar should be parsed correctly");
};

6
vendor/jison/tests/lexer/lexer-tests.js vendored Executable file
View file

@ -0,0 +1,6 @@
#!/usr/bin/env narwhal
exports.testRegExpLexer = require("./regexplexer");
if (require.main === module)
require("os").exit(require("test").run(exports));

405
vendor/jison/tests/lexer/regexplexer.js vendored Normal file
View file

@ -0,0 +1,405 @@
var RegExpLexer = require("../setup").RegExpLexer,
assert = require("assert"),
jsDump = require("test/jsdump").jsDump;
exports["test basic matchers"] = function() {
var dict = {
rules: [
["x", "return 'X';" ],
["y", "return 'Y';" ],
["$", "return 'EOF';" ]
]
};
var input = "xxyx";
var lexer = new RegExpLexer(dict, input);
assert.equal(lexer.lex(), "X");
assert.equal(lexer.lex(), "X");
assert.equal(lexer.lex(), "Y");
assert.equal(lexer.lex(), "X");
assert.equal(lexer.lex(), "EOF");
};
exports["test set input after"] = function() {
var dict = {
rules: [
["x", "return 'X';" ],
["y", "return 'Y';" ],
["$", "return 'EOF';" ]
]
};
var input = "xxyx";
var lexer = new RegExpLexer(dict);
lexer.setInput(input);
assert.equal(lexer.lex(), "X");
assert.equal(lexer.lex(), "X");
assert.equal(lexer.lex(), "Y");
assert.equal(lexer.lex(), "X");
assert.equal(lexer.lex(), "EOF");
};
exports["test unrecognized char"] = function() {
var dict = {
rules: [
["x", "return 'X';" ],
["y", "return 'Y';" ],
["$", "return 'EOF';" ]
]
};
var input = "xa";
var lexer = new RegExpLexer(dict, input);
assert.equal(lexer.lex(), "X");
assert["throws"](function(){lexer.lex()}, "bad char");
};
exports["test macro"] = function() {
var dict = {
macros: {
"digit": "[0-9]"
},
rules: [
["x", "return 'X';" ],
["y", "return 'Y';" ],
["{digit}+", "return 'NAT';" ],
["$", "return 'EOF';" ]
]
};
var input = "x12234y42";
var lexer = new RegExpLexer(dict, input);
assert.equal(lexer.lex(), "X");
assert.equal(lexer.lex(), "NAT");
assert.equal(lexer.lex(), "Y");
assert.equal(lexer.lex(), "NAT");
assert.equal(lexer.lex(), "EOF");
};
exports["test action include"] = function() {
var dict = {
rules: [
["x", "return included ? 'Y' : 'N';" ],
["$", "return 'EOF';" ]
],
actionInclude: "var included = true;"
};
var input = "x";
var lexer = new RegExpLexer(dict, input);
assert.equal(lexer.lex(), "Y");
assert.equal(lexer.lex(), "EOF");
};
exports["test ignored"] = function() {
var dict = {
rules: [
["x", "return 'X';" ],
["y", "return 'Y';" ],
["\\s+", "/* skip whitespace */" ],
["$", "return 'EOF';" ]
]
};
var input = "x x y x";
var lexer = new RegExpLexer(dict, input);
assert.equal(lexer.lex(), "X");
assert.equal(lexer.lex(), "X");
assert.equal(lexer.lex(), "Y");
assert.equal(lexer.lex(), "X");
assert.equal(lexer.lex(), "EOF");
};
exports["test dissambiguate"] = function() {
var dict = {
rules: [
["for\\b", "return 'FOR';" ],
["if\\b", "return 'IF';" ],
["[a-z]+", "return 'IDENTIFIER';" ],
["\\s+", "/* skip whitespace */" ],
["$", "return 'EOF';" ]
]
};
var input = "if forever for for";
var lexer = new RegExpLexer(dict, input);
assert.equal(lexer.lex(), "IF");
assert.equal(lexer.lex(), "IDENTIFIER");
assert.equal(lexer.lex(), "FOR");
assert.equal(lexer.lex(), "FOR");
assert.equal(lexer.lex(), "EOF");
};
exports["test yytext overwrite"] = function() {
var dict = {
rules: [
["x", "yytext = 'hi der'; return 'X';" ]
]
};
var input = "x";
var lexer = new RegExpLexer(dict, input);
lexer.lex();
assert.equal(lexer.yytext, "hi der");
};
exports["test yylineno"] = function() {
var dict = {
rules: [
["\\s+", "/* skip whitespace */" ],
["x", "return 'x';" ],
["y", "return 'y';" ]
]
};
var input = "x\nxy\n\n\nx";
var lexer = new RegExpLexer(dict, input);
assert.equal(lexer.yylineno, 0);
assert.equal(lexer.lex(), "x");
assert.equal(lexer.lex(), "x");
assert.equal(lexer.yylineno, 1);
assert.equal(lexer.lex(), "y");
assert.equal(lexer.yylineno, 1);
assert.equal(lexer.lex(), "x");
assert.equal(lexer.yylineno, 4);
};
exports["test more()"] = function() {
var dict = {
rules: [
["x", "return 'X';" ],
['"[^"]*', function(){
if(yytext.charAt(yyleng-1) == '\\') {
this.more();
} else {
yytext += this.input(); // swallow end quote
return "STRING";
}
} ],
["$", "return 'EOF';" ]
]
};
var input = 'x"fgjdrtj\\"sdfsdf"x';
var lexer = new RegExpLexer(dict, input);
assert.equal(lexer.lex(), "X");
assert.equal(lexer.lex(), "STRING");
assert.equal(lexer.lex(), "X");
assert.equal(lexer.lex(), "EOF");
};
exports["test defined token returns"] = function() {
var tokens = {"2":"X", "3":"Y", "4":"EOF"};
var dict = {
rules: [
["x", "return 'X';" ],
["y", "return 'Y';" ],
["$", "return 'EOF';" ]
]
};
var input = "xxyx";
var lexer = new RegExpLexer(dict, input, tokens);
assert.equal(lexer.lex(), 2);
assert.equal(lexer.lex(), 2);
assert.equal(lexer.lex(), 3);
assert.equal(lexer.lex(), 2);
assert.equal(lexer.lex(), 4);
};
exports["test module generator"] = function() {
var dict = {
rules: [
["x", "return 'X';" ],
["y", "return 'Y';" ],
["$", "return 'EOF';" ]
]
};
var input = "xxyx";
var lexer_ = new RegExpLexer(dict);
var lexerSource = lexer_.generateModule();
eval(lexerSource);
lexer.setInput(input);
assert.equal(lexer.lex(), "X");
assert.equal(lexer.lex(), "X");
assert.equal(lexer.lex(), "Y");
assert.equal(lexer.lex(), "X");
assert.equal(lexer.lex(), "EOF");
};
exports["test generator with more complex lexer"] = function() {
var dict = {
rules: [
["x", "return 'X';" ],
['"[^"]*', function(){
if(yytext.charAt(yyleng-1) == '\\') {
this.more();
} else {
yytext += this.input(); // swallow end quote
return "STRING";
}
} ],
["$", "return 'EOF';" ]
]
};
var input = 'x"fgjdrtj\\"sdfsdf"x';
var lexer_ = new RegExpLexer(dict);
var lexerSource = lexer_.generateModule();
eval(lexerSource);
lexer.setInput(input);
assert.equal(lexer.lex(), "X");
assert.equal(lexer.lex(), "STRING");
assert.equal(lexer.lex(), "X");
assert.equal(lexer.lex(), "EOF");
};
exports["test commonjs module generator"] = function() {
var dict = {
rules: [
["x", "return 'X';" ],
["y", "return 'Y';" ],
["$", "return 'EOF';" ]
]
};
var input = "xxyx";
var lexer_ = new RegExpLexer(dict);
var lexerSource = lexer_.generateCommonJSModule();
var exports = {};
eval(lexerSource);
exports.lexer.setInput(input);
assert.equal(exports.lex(), "X");
assert.equal(exports.lex(), "X");
assert.equal(exports.lex(), "Y");
assert.equal(exports.lex(), "X");
assert.equal(exports.lex(), "EOF");
};
exports["test DJ lexer"] = function() {
var dict = {
"lex": {
"macros": {
"digit": "[0-9]",
"id": "[a-zA-Z][a-zA-Z0-9]*"
},
"rules": [
["//.*", "/* ignore comment */"],
["main\\b", "return 'MAIN';"],
["class\\b", "return 'CLASS';"],
["extends\\b", "return 'EXTENDS';"],
["nat\\b", "return 'NATTYPE';"],
["if\\b", "return 'IF';"],
["else\\b", "return 'ELSE';"],
["for\\b", "return 'FOR';"],
["printNat\\b", "return 'PRINTNAT';"],
["readNat\\b", "return 'READNAT';"],
["this\\b", "return 'THIS';"],
["new\\b", "return 'NEW';"],
["var\\b", "return 'VAR';"],
["null\\b", "return 'NUL';"],
["{digit}+", "return 'NATLITERAL';"],
["{id}", "return 'ID';"],
["==", "return 'EQUALITY';"],
["=", "return 'ASSIGN';"],
["\\+", "return 'PLUS';"],
["-", "return 'MINUS';"],
["\\*", "return 'TIMES';"],
[">", "return 'GREATER';"],
["\\|\\|", "return 'OR';"],
["!", "return 'NOT';"],
["\\.", "return 'DOT';"],
["\\{", "return 'LBRACE';"],
["\\}", "return 'RBRACE';"],
["\\(", "return 'LPAREN';"],
["\\)", "return 'RPAREN';"],
[";", "return 'SEMICOLON';"],
["\\s+", "/* skip whitespace */"],
[".", "print('Illegal character');throw 'Illegal character';"],
["$", "return 'ENDOFFILE';"]
]
}
};
var input = "class Node extends Object { \
var nat value var nat value;\
var Node next;\
var nat index;\
}\
\
class List extends Object {\
var Node start;\
\
Node prepend(Node startNode) {\
startNode.next = start;\
start = startNode;\
}\
\
nat find(nat index) {\
var nat value;\
var Node node;\
\
for(node = start;!(node == null);node = node.next){\
if(node.index == index){\
value = node.value;\
} else { 0; };\
};\
\
value;\
}\
}\
\
main {\
var nat index;\
var nat value;\
var List list;\
var Node startNode;\
\
index = readNat();\
list = new List;\
\
for(0;!(index==0);0){\
value = readNat();\
startNode = new Node;\
startNode.index = index;\
startNode.value = value;\
list.prepend(startNode);\
index = readNat();\
};\
\
index = readNat();\
\
for(0;!(index==0);0){\
printNat(list.find(index));\
index = readNat();\
};\
}";
var lexer = new RegExpLexer(dict.lex);
lexer.setInput(input);
var tok;
while (tok = lexer.lex()) {
assert.equal(typeof tok, "string");
}
};

311
vendor/jison/tests/parser/actions.js vendored Normal file
View file

@ -0,0 +1,311 @@
var Jison = require("../setup").Jison,
RegExpLexer = require("../setup").RegExpLexer,
assert = require("assert");
exports["test Semantic action basic return"] = function() {
var lexData = {
rules: [
["x", "return 'x';"],
["y", "return 'y';"]
]
};
var grammar = {
bnf: {
"E" :[ ["E x", "return 0"],
["E y", "return 1"],
"" ]
}
};
var parser = new Jison.Parser(grammar);
parser.lexer = new RegExpLexer(lexData);
assert.equal(parser.parse('x'), 0, "semantic action");
assert.equal(parser.parse('y'), 1, "semantic action");
};
exports["test return null"] = function() {
var lexData = {
rules: [
["x", "return 'x';"]
]
};
var grammar = {
bnf: {
"E" :[ ["E x", "return null;"],
"" ]
}
};
var parser = new Jison.Parser(grammar);
parser.lexer = new RegExpLexer(lexData);
assert.equal(parser.parse('x'), null, "semantic action");
};
exports["test terminal semantic values are null"] = function() {
var lexData = {
rules: [
["x", "return 'x';"],
["y", "return 'y';"]
]
};
var grammar = {
bnf: {
"E" :[ ["E x", "return [$2 === null]"],
["E y", "return [$2]"],
"" ]
}
};
var parser = new Jison.Parser(grammar);
parser.lexer = new RegExpLexer(lexData);
assert.deepEqual(parser.parse('x'), [true], "semantic action");
assert.deepEqual(parser.parse('y'), [null], "semantic action");
};
exports["test Semantic action stack lookup"] = function() {
var lexData = {
rules: [
["x", "return 'x';"],
["y", "return 'y';"]
]
};
var grammar = {
bnf: {
"pgm" :[ ["E", "return $1"] ],
"E" :[ ["B E", "return $1+$2"],
["x", "$$ = 'EX'"] ],
"B" :[ ["y", "$$ = 'BY'"] ]
}
};
var parser = new Jison.Parser(grammar);
parser.lexer = new RegExpLexer(lexData);
assert.equal(parser.parse('x'), "EX", "return first token");
assert.equal(parser.parse('yx'), "BYEX", "return first after reduction");
};
exports["test Semantic actions on nullable grammar"] = function() {
var lexData = {
rules: [
["x", "return 'x';"]
]
};
var grammar = {
bnf: {
"S" :[ ["A", "return $1"] ],
"A" :[ ['x A', "$$ = $2+'x'" ],
['', "$$ = '->'" ] ]
}
};
var parser = new Jison.Parser(grammar);
parser.lexer = new RegExpLexer(lexData);
assert.equal(parser.parse('xx'), "->xx", "return first after reduction");
};
exports["test named semantic value"] = function() {
var lexData = {
rules: [
["x", "return 'x';"]
]
};
var grammar = {
bnf: {
"S" :[ ["A", "return $A"] ],
"A" :[ ['x A', "$$ = $A+'x'" ],
['', "$$ = '->'" ] ]
}
};
var parser = new Jison.Parser(grammar);
parser.lexer = new RegExpLexer(lexData);
assert.equal(parser.parse('xx'), "->xx", "return first after reduction");
};
exports["test ambiguous named semantic value"] = function() {
var lexData = {
rules: [
["x", "return 'x';"],
["y", "return 'y';"]
]
};
var grammar = {
operators: [["left", "y"]],
bnf: {
"S" :[ ["A", "return $A"] ],
"A" :[ ['A y A', "$$ = $A2+'y'+$A1" ],
['x', "$$ = 'x'" ] ]
}
};
var parser = new Jison.Parser(grammar);
parser.lexer = new RegExpLexer(lexData);
assert.equal(parser.parse('xyx'), "xyx", "return first after reduction");
};
exports["test Build AST"] = function() {
var lexData = {
rules: [
["x", "return 'x';"]
]
};
var grammar = {
bnf: {
"S" :[ ['A', "return $1;" ] ],
"A" :[ ['x A', "$2.push(['ID',{value:'x'}]); $$ = $2;"],
['', "$$ = ['A',{}];"] ]
}
};
var parser = new Jison.Parser(grammar);
parser.lexer = new RegExpLexer(lexData);
var expectedAST = ['A',{},
['ID',{value:'x'}],
['ID',{value:'x'}],
['ID',{value:'x'}]];
var r = parser.parse("xxx");
assert.deepEqual(r, expectedAST);
};
exports["test 0+0 grammar"] = function() {
var lexData2 = {
rules: [
["0", "return 'ZERO';"],
["\\+", "return 'PLUS';"],
["$", "return 'EOF';"]
]
};
var grammar = {
bnf: {
"S" :[ [ "E EOF", "return $1" ]],
"E" :[ [ "E PLUS T", "$$ = ['+',$1,$3]" ],
[ "T", "$$ = $1" ] ],
"T" :[ [ "ZERO", "$$ = [0]" ] ]
}
};
var parser = new Jison.Parser(grammar);
parser.lexer = new RegExpLexer(lexData2);
var expectedAST = ["+", ["+", [0], [0]], [0]];
assert.deepEqual(parser.parse("0+0+0"), expectedAST);
};
exports["test implicit $$ = $1 action"] = function() {
var lexData2 = {
rules: [
["0", "return 'ZERO';"],
["\\+", "return 'PLUS';"],
["$", "return 'EOF';"]
]
};
var grammar = {
bnf: {
"S" :[ [ "E EOF", "return $1" ]],
"E" :[ [ "E PLUS T", "$$ = ['+',$1,$3]" ],
"T" ],
"T" :[ [ "ZERO", "$$ = [0]" ] ]
}
};
var parser = new Jison.Parser(grammar);
parser.lexer = new RegExpLexer(lexData2);
var expectedAST = ["+", ["+", [0], [0]], [0]];
assert.deepEqual(parser.parse("0+0+0"), expectedAST);
};
exports["test yytext"] = function() {
var lexData = {
rules: [
["x", "return 'x';"]
]
};
var grammar = {
bnf: {
"pgm" :[ ["Xexpr", "return $1;"] ],
"Xexpr" :[ ["x", "$$ = yytext;"] ]
}
};
var parser = new Jison.Parser(grammar);
parser.lexer = new RegExpLexer(lexData);
assert.equal(parser.parse('x'), "x", "return first token");
};
exports["test yyleng"] = function() {
var lexData = {
rules: [
["x", "return 'x';"]
]
};
var grammar = {
bnf: {
"pgm" :[ ["Xexpr", "return $1;"] ],
"Xexpr" :[ ["x", "$$ = yyleng;"] ]
}
};
var parser = new Jison.Parser(grammar);
parser.lexer = new RegExpLexer(lexData);
assert.equal(parser.parse('x'), 1, "return first token");
};
exports["test yytext more"] = function() {
var lexData = {
rules: [
["x", "return 'x';"],
["y", "return 'y';"]
]
};
var grammar = {
bnf: {
"pgm" :[ ["expr expr", "return $1+$2;"] ],
"expr" :[ ["x", "$$ = yytext;"],
["y", "$$ = yytext;"] ]
}
};
var parser = new Jison.Parser(grammar);
parser.lexer = new RegExpLexer(lexData);
assert.equal(parser.parse('xy'), "xy", "return first token");
};
exports["test action include"] = function() {
var lexData = {
rules: [
["y", "return 'y';"]
]
};
var grammar = {
bnf: {
"E" :[ ["E y", "return test();"],
"" ]
},
actionInclude: function () {
function test(val) {
return 1;
}
}
};
var parser = new Jison.Parser(grammar);
parser.lexer = new RegExpLexer(lexData);
assert.equal(parser.parse('y'), 1, "semantic action");
};

235
vendor/jison/tests/parser/api.js vendored Normal file
View file

@ -0,0 +1,235 @@
var Jison = require("../setup").Jison,
Lexer = require("../setup").Lexer,
assert = require("assert");
var lexData = {
rules: [
["x", "return 'x';"],
["y", "return 'y';"]
]
};
exports["test tokens as a string"] = function () {
var grammar = {
tokens: "x y",
startSymbol: "A",
bnf: {
"A" :[ 'A x',
'A y',
'' ]
}
};
var parser = new Jison.Parser(grammar);
parser.lexer = new Lexer(lexData);
assert.ok(parser.parse('xyx'), "parse xyx");
};
exports["test generator"] = function () {
var grammar = {
bnf: {
"A" :[ 'A x',
'A y',
'' ]
}
};
var parser = new Jison.Parser(grammar);
parser.lexer = new Lexer(lexData);
assert.ok(parser.parse('xyx'), "parse xyx");
};
exports["test extra spaces in productions"] = function () {
var grammar = {
tokens: "x y",
startSymbol: "A",
bnf: {
"A" :[ 'A x ',
'A y',
'' ]
}
};
var parser = new Jison.Parser(grammar);
parser.lexer = new Lexer(lexData);
assert.ok(parser.parse('xyx'), "parse xyx");
};
exports["test | seperated rules"] = function () {
var grammar = {
tokens: "x y",
startSymbol: "A",
bnf: {
"A" :"A x | A y | "
}
};
var parser = new Jison.Parser(grammar);
parser.lexer = new Lexer(lexData);
assert.ok(parser.parse('xyx'), "parse xyx");
};
exports["test start symbol optional"] = function () {
var grammar = {
tokens: "x y",
bnf: {
"A" :"A x | A y | "
}
};
var parser = new Jison.Parser(grammar);
var ok = true;
assert.ok(ok, "no error");
};
exports["test start symbol should be nonterminal"] = function () {
var grammar = {
tokens: "x y",
startSymbol: "x",
bnf: {
"A" :"A x | A y | "
}
};
assert["throws"](function(){new Jison.Generator(grammar);}, "throws error");
};
exports["test token list as string"] = function () {
var grammar = {
tokens: "x y",
startSymbol: "A",
bnf: {
"A" :"A x | A y | "
}
};
var gen = new Jison.Generator(grammar);
assert.deepEqual(gen.terminals, ["$end", "x", "y"]);
};
exports["test grammar options"] = function () {
var grammar = {
options: {type: "slr"},
tokens: "x y",
startSymbol: "A",
bnf: {
"A" :[ 'A x',
'A y',
'' ]
}
};
var gen = new Jison.Generator(grammar);
assert.ok(gen);
};
exports["test overwrite grammar options"] = function () {
var grammar = {
options: {type: "slr"},
tokens: "x y",
startSymbol: "A",
bnf: {
"A" :[ 'A x',
'A y',
'' ]
}
};
var gen = new Jison.Generator(grammar, {type: "lr0"});
assert.equal(gen.constructor, Jison.LR0Generator);
};
exports["test yy shared scope"] = function () {
var lexData = {
rules: [
["x", "return 'x';"],
["y", "return yy.xed ? 'yfoo' : 'ybar';"]
]
};
var grammar = {
tokens: "x yfoo ybar",
startSymbol: "A",
bnf: {
"A" :[[ 'A x', "yy.xed = true;" ],
[ 'A yfoo', " return 'foo';" ],
[ 'A ybar', " return 'bar';" ],
'' ]
}
};
var parser = new Jison.Parser(grammar, {type: "lr0"});
parser.lexer = new Lexer(lexData);
assert.equal(parser.parse('y'), "bar", "should return bar");
assert.equal(parser.parse('xxy'), "foo", "should return foo");
};
exports["test optional token declaration"] = function () {
var grammar = {
options: {type: "slr"},
bnf: {
"A" :[ 'A x',
'A y',
'' ]
}
};
var gen = new Jison.Generator(grammar, {type: "lr0"});
assert.equal(gen.constructor, Jison.LR0Generator);
};
exports["test custom parse error method"] = function () {
var lexData = {
rules: [
["a", "return 'a';"],
["b", "return 'b';"],
["c", "return 'c';"],
["d", "return 'd';"],
["g", "return 'g';"]
]
};
var grammar = {
"tokens": "a b c d g",
"startSymbol": "S",
"bnf": {
"S" :[ "a g d",
"a A c",
"b A d",
"b g c" ],
"A" :[ "B" ],
"B" :[ "g" ]
}
};
var parser = new Jison.Parser(grammar, {type: "lalr"});
parser.lexer = new Lexer(lexData);
var result={};
parser.yy.parseError = function (str, hash) {
result = hash;
throw str;
};
assert["throws"](function () {parser.parse("agb")});
assert.equal(result.text, "b", "parse error text should equal b");
assert["throws"](function () {parser.parse("agz")});
assert.equal(result.line, 0, "lexical error should have correct line");
};
exports["test jison grammar as string"] = function () {
var grammar = "%% A : A x | A y | ;"
var parser = new Jison.Generator(grammar).createParser();
parser.lexer = new Lexer(lexData);
assert.ok(parser.parse('xyx'), "parse xyx");
};

196
vendor/jison/tests/parser/generator.js vendored Normal file
View file

@ -0,0 +1,196 @@
var Jison = require("../setup").Jison,
Lexer = require("../setup").Lexer,
assert = require("assert");
exports["test commonjs module generator"] = function () {
var lexData = {
rules: [
["x", "return 'x';"],
["y", "return 'y';"]
]
};
var grammar = {
tokens: "x y",
startSymbol: "A",
bnf: {
"A" :[ 'A x',
'A y',
'' ]
}
};
var input = "xyxxxy";
var gen = new Jison.Generator(grammar);
gen.lexer = new Lexer(lexData);
var parserSource = gen.generateCommonJSModule();
var exports = {};
eval(parserSource);
assert.ok(exports.parse(input));
};
exports["test module generator"] = function () {
var lexData = {
rules: [
["x", "return 'x';"],
["y", "return 'y';"]
]
};
var grammar = {
tokens: "x y",
startSymbol: "A",
bnf: {
"A" :[ 'A x',
'A y',
'' ]
}
};
var input = "xyxxxy";
var gen = new Jison.Generator(grammar);
gen.lexer = new Lexer(lexData);
var parserSource = gen.generateModule();
eval(parserSource);
assert.ok(parser.parse(input));
};
exports["test module generator with module name"] = function () {
var lexData = {
rules: [
["x", "return 'x';"],
["y", "return 'y';"]
]
};
var grammar = {
tokens: "x y",
startSymbol: "A",
bnf: {
"A" :[ 'A x',
'A y',
'' ]
}
};
var input = "xyxxxy";
var gen = new Jison.Generator(grammar);
gen.lexer = new Lexer(lexData);
var parserSource = gen.generate({moduleType: "js", moduleName: "parsey"});
eval(parserSource);
assert.ok(parsey.parse(input));
};
exports["test module generator with namespaced module name"] = function () {
var lexData = {
rules: [
["x", "return 'x';"],
["y", "return 'y';"]
]
};
var grammar = {
tokens: "x y",
startSymbol: "A",
bnf: {
"A" :[ 'A x',
'A y',
'' ]
}
};
var compiler = {};
var input = "xyxxxy";
var gen = new Jison.Generator(grammar);
gen.lexer = new Lexer(lexData);
var parserSource = gen.generateModule({moduleName: "compiler.parser"});
eval(parserSource);
assert.ok(compiler.parser.parse(input));
};
exports["test module include"] = function () {
var grammar = {
"comment": "ECMA-262 5th Edition, 15.12.1 The JSON Grammar. (Incomplete implementation)",
"author": "Zach Carter",
"lex": {
"macros": {
"digit": "[0-9]",
"exp": "([eE][-+]?{digit}+)"
},
"rules": [
["\\s+", "/* skip whitespace */"],
["-?{digit}+(\\.{digit}+)?{exp}?", "return 'NUMBER';"],
["\"[^\"]*", function(){
if(yytext.charAt(yyleng-1) == '\\') {
// remove escape
yytext = yytext.substr(0,yyleng-2);
this.more();
} else {
yytext = yytext.substr(1); // swallow start quote
this.input(); // swallow end quote
return "STRING";
}
}],
["\\{", "return '{'"],
["\\}", "return '}'"],
["\\[", "return '['"],
["\\]", "return ']'"],
[",", "return ','"],
[":", "return ':'"],
["true\\b", "return 'TRUE'"],
["false\\b", "return 'FALSE'"],
["null\\b", "return 'NULL'"]
]
},
"tokens": "STRING NUMBER { } [ ] , : TRUE FALSE NULL",
"start": "JSONText",
"bnf": {
"JSONString": [ "STRING" ],
"JSONNumber": [ "NUMBER" ],
"JSONBooleanLiteral": [ "TRUE", "FALSE" ],
"JSONText": [ "JSONValue" ],
"JSONValue": [ "JSONNullLiteral",
"JSONBooleanLiteral",
"JSONString",
"JSONNumber",
"JSONObject",
"JSONArray" ],
"JSONObject": [ "{ }",
"{ JSONMemberList }" ],
"JSONMember": [ "JSONString : JSONValue" ],
"JSONMemberList": [ "JSONMember",
"JSONMemberList , JSONMember" ],
"JSONArray": [ "[ ]",
"[ JSONElementList ]" ],
"JSONElementList": [ "JSONValue",
"JSONElementList , JSONValue" ]
}
};
var gen = new Jison.Generator(grammar);
var parserSource = gen.generateModule();
eval(parserSource);
assert.ok(parser.parse(JSON.stringify(grammar.bnf)));
};

169
vendor/jison/tests/parser/lalr.js vendored Normal file
View file

@ -0,0 +1,169 @@
var Jison = require("../setup").Jison,
Lexer = require("../setup").Lexer,
assert = require("assert");
exports["test 0+0 grammar"] = function () {
var lexData2 = {
rules: [
["0", "return 'ZERO';"],
["\\+", "return 'PLUS';"]
]
};
var grammar = {
tokens: [ "ZERO", "PLUS"],
startSymbol: "E",
bnf: {
"E" :[ "E PLUS T",
"T" ],
"T" :[ "ZERO" ]
}
};
var parser = new Jison.Parser(grammar, {type: "lalr"});
parser.lexer = new Lexer(lexData2);
assert.ok(parser.parse("0+0+0"), "parse");
assert.ok(parser.parse("0"), "parse single 0");
assert["throws"](function () {parser.parse("+")}, "throws parse error on invalid");
};
exports["test xx nullable grammar"] = function () {
var lexData = {
rules: [
["x", "return 'x';"]
]
};
var grammar = {
tokens: [ 'x' ],
startSymbol: "A",
bnf: {
"A" :[ 'A x',
'' ]
}
};
var parser = new Jison.Parser(grammar, {type: "lalr"});
parser.lexer = new Lexer(lexData);
assert.ok(parser.parse("xxx"), "parse");
assert.ok(parser.parse("x"), "parse single x");
assert["throws"](function (){parser.parse("+");}, "throws parse error on invalid");
};
exports["test LALR algorithm from Bermudez, Logothetis"] = function () {
var lexData = {
rules: [
["a", "return 'a';"],
["b", "return 'b';"],
["c", "return 'c';"],
["d", "return 'd';"],
["g", "return 'g';"]
]
};
var grammar = {
"tokens": "a b c d g",
"startSymbol": "S",
"bnf": {
"S" :[ "a g d",
"a A c",
"b A d",
"b g c" ],
"A" :[ "B" ],
"B" :[ "g" ]
}
};
var parser = new Jison.Parser(grammar, {type: "lalr"});
parser.lexer = new Lexer(lexData);
assert.ok(parser.parse("agd"));
assert.ok(parser.parse("agc"));
assert.ok(parser.parse("bgd"));
assert.ok(parser.parse("bgc"));
};
exports["test basic JSON grammar"] = function () {
var grammar = {
"lex": {
"macros": {
"digit": "[0-9]",
"esc": "\\\\",
"int": "-?(?:[0-9]|[1-9][0-9]+)",
"exp": "(?:[eE][-+]?[0-9]+)",
"frac": "(?:\\.[0-9]+)"
},
"rules": [
["\\s+", "/* skip whitespace */"],
["{int}{frac}?{exp}?\\b", "return 'NUMBER';"],
["\"(?:{esc}[\"bfnrt/{esc}]|{esc}u[a-fA-F0-9]{4}|[^\"{esc}])*\"", "yytext = yytext.substr(1,yyleng-2); return 'STRING';"],
["\\{", "return '{'"],
["\\}", "return '}'"],
["\\[", "return '['"],
["\\]", "return ']'"],
[",", "return ','"],
[":", "return ':'"],
["true\\b", "return 'TRUE'"],
["false\\b", "return 'FALSE'"],
["null\\b", "return 'NULL'"]
]
},
"tokens": "STRING NUMBER { } [ ] , : TRUE FALSE NULL",
"bnf": {
"JsonThing": [ "JsonObject",
"JsonArray" ],
"JsonObject": [ "{ JsonPropertyList }" ],
"JsonPropertyList": [ "JsonProperty",
"JsonPropertyList , JsonProperty" ],
"JsonProperty": [ "StringLiteral : JsonValue" ],
"JsonArray": [ "[ JsonValueList ]" ],
"JsonValueList": [ "JsonValue",
"JsonValueList , JsonValue" ],
"JsonValue": [ "StringLiteral",
"NumericalLiteral",
"JsonObject",
"JsonArray",
"TRUE",
"FALSE",
"NULL" ],
"StringLiteral": [ "STRING" ],
"NumericalLiteral": [ "NUMBER" ]
},
};
var source = '{"foo": "Bar", "hi": 42, "array": [1,2,3.004, -4.04e-4], "false": false, "true":true, "null": null, "obj": {"ha":"ho"}, "string": "str\\ting\\"sgfg" }';
var gen = new Jison.Generator(grammar, {type: "lalr"});
var parser = gen.createParser();
var gen2 = new Jison.Generator(grammar, {type: "slr"});
var parser2 = gen2.createParser();
assert.deepEqual(gen.table, gen2.table, "SLR(1) and LALR(1) tables should be equal");
assert.ok(parser.parse(source));
};
exports["test LR(1) grammar"] = function () {
var grammar = {
"comment": "Produces a reduce-reduce conflict unless using LR(1).",
"tokens": "z d b c a",
"start": "S",
"bnf": {
"S" :[ "a A c",
"a B d",
"b A d",
"b B c"],
"A" :[ "z" ],
"B" :[ "z" ]
}
};
var gen = new Jison.Generator(grammar, {type: "lalr"});
assert.equal(gen.conflicts, 2);
};

72
vendor/jison/tests/parser/lr0.js vendored Normal file
View file

@ -0,0 +1,72 @@
var Jison = require("../setup").Jison,
Lexer = require("../setup").Lexer,
assert = require("assert");
var lexData = {
rules: [
["x", "return 'x';"],
["y", "return 'y';"]
]
};
exports["test left-recursive nullable grammar"] = function () {
var grammar = {
tokens: [ 'x' ],
startSymbol: "A",
bnf: {
"A" :[ 'A x',
'' ]
}
};
var parser = new Jison.Parser(grammar, {type: "lr0"});
parser.lexer = new Lexer(lexData);
assert.ok(parser.parse('xxx'), "parse 3 x's");
assert.ok(parser.parse("x"), "parse single x");
assert["throws"](function () {parser.parse("y")}, "throws parse error on invalid token");
};
exports["test right-recursive nullable grammar"] = function () {
var grammar = {
tokens: [ 'x' ],
startSymbol: "A",
bnf: {
"A" :[ 'x A',
'' ]
}
};
var gen = new Jison.Generator(grammar, {type: "lr0"});
assert.ok(gen.table.length == 4, "table has 4 states");
assert.ok(gen.conflicts == 2, "encountered 2 conflicts");
};
exports["test 0+0 grammar"] = function () {
var lexData2 = {
rules: [
["0", "return 'ZERO';"],
["\\+", "return 'PLUS';"]
]
};
var grammar = {
tokens: [ "ZERO", "PLUS"],
startSymbol: "E",
bnf: {
"E" :[ "E PLUS T",
"T" ],
"T" :[ "ZERO" ]
}
};
var parser = new Jison.Parser(grammar, {type: "lr0"});
parser.lexer = new Lexer(lexData2);
assert.ok(parser.parse("0+0+0"), "parse");
assert.ok(parser.parse("0"), "parse single 0");
assert["throws"](function () {parser.parse("+")}, "throws parse error on invalid");
};

119
vendor/jison/tests/parser/lr1.js vendored Normal file
View file

@ -0,0 +1,119 @@
var Jison = require("../setup").Jison,
Lexer = require("../setup").Lexer,
assert = require("assert");
exports["test xx nullable grammar"] = function () {
var lexData = {
rules: [
["x", "return 'x';"],
["y", "return 'y';"]
]
};
var grammar = {
tokens: [ 'x' ],
startSymbol: "A",
bnf: {
"A" :[ 'A x',
'' ]
}
};
var parser = new Jison.Parser(grammar, {type: "lr"});
parser.lexer = new Lexer(lexData);
assert.ok(parser.parse("xxx"), "parse");
assert.ok(parser.parse("x"), "parse single x");
assert["throws"](function (){parser.parse("+");}, "throws parse error on invalid");
};
exports["test LR parse"] = function () {
var lexData2 = {
rules: [
["0", "return 'ZERO';"],
["\\+", "return 'PLUS';"]
]
};
var grammar = {
tokens: [ "ZERO", "PLUS"],
startSymbol: "E",
bnf: {
"E" :[ "E PLUS T",
"T" ],
"T" :[ "ZERO" ]
}
};
var parser = new Jison.Parser(grammar, {type: "lr"});
parser.lexer = new Lexer(lexData2);
assert.ok(parser.parse("0+0+0"), "parse");
};
exports["test basic JSON grammar"] = function () {
var grammar = {
"lex": {
"macros": {
"digit": "[0-9]"
},
"rules": [
["\\s+", "/* skip whitespace */"],
["{digit}+(\\.{digit}+)?", "return 'NUMBER';"],
["\"[^\"]*", function(){
if(yytext.charAt(yyleng-1) == '\\') {
// remove escape
yytext = yytext.substr(0,yyleng-2);
this.more();
} else {
yytext = yytext.substr(1); // swallow start quote
this.input(); // swallow end quote
return "STRING";
}
}],
["\\{", "return '{'"],
["\\}", "return '}'"],
["\\[", "return '['"],
["\\]", "return ']'"],
[",", "return ','"],
[":", "return ':'"],
["true\\b", "return 'TRUE'"],
["false\\b", "return 'FALSE'"],
["null\\b", "return 'NULL'"]
]
},
"tokens": "STRING NUMBER { } [ ] , : TRUE FALSE NULL",
"bnf": {
"JsonThing": [ "JsonObject",
"JsonArray" ],
"JsonObject": [ "{ JsonPropertyList }" ],
"JsonPropertyList": [ "JsonProperty",
"JsonPropertyList , JsonProperty" ],
"JsonProperty": [ "StringLiteral : JsonValue" ],
"JsonArray": [ "[ JsonValueList ]" ],
"JsonValueList": [ "JsonValue",
"JsonValueList , JsonValue" ],
"JsonValue": [ "StringLiteral",
"NumericalLiteral",
"JsonObject",
"JsonArray",
"TRUE",
"FALSE",
"NULL" ],
"StringLiteral": [ "STRING" ],
"NumericalLiteral": [ "NUMBER" ]
},
};
var source = '{"foo": "Bar", "hi": 42, "array": [1,2,3.004,4], "false": false, "true":true, "null": null, "obj": {"ha":"ho"}, "string": "string\\"sgfg" }';
var parser = new Jison.Parser(grammar, {type: "lr"});
assert.ok(parser.parse(source));
}

14
vendor/jison/tests/parser/parser-tests.js vendored Executable file
View file

@ -0,0 +1,14 @@
#!/usr/bin/env narwhal
exports.testAPI = require("./api");
exports.testLR0 = require("./lr0");
exports.testSLR = require("./slr");
exports.testLALR = require("./lalr");
exports.testLR1 = require("./lr1");
exports.testAST = require("./actions");
exports.testTables = require("./tables");
exports.testPrecedence = require("./precedence");
exports.testGenerator = require("./generator");
if (require.main === module)
require("os").exit(require("test").run(exports));

237
vendor/jison/tests/parser/precedence.js vendored Normal file
View file

@ -0,0 +1,237 @@
var Jison = require("../setup").Jison,
RegExpLexer = require("../setup").RegExpLexer,
assert = require("assert");
var lexData = {
rules: [
["x", "return 'x';"],
["\\+", "return '+';"],
["$", "return 'EOF';"]
]
};
exports["test Left associative rule"] = function () {
var lexData = {
rules: [
["x", "return 'x';"],
["\\+", "return '+';"],
["$", "return 'EOF';"]
]
};
var grammar = {
tokens: [ "x", "+", "EOF" ],
startSymbol: "S",
operators: [
["left", "+"]
],
bnf: {
"S" :[ [ 'E EOF', "return $1;" ] ],
"E" :[ [ "E + E", "$$ = ['+', $1, $3];" ],
[ "x", "$$ = ['x'];"] ]
}
};
var parser = new Jison.Parser(grammar);
parser.lexer = new RegExpLexer(lexData);
var expectedAST = ["+", ["+", ["x"], ["x"]], ["x"]];
var r = parser.parse("x+x+x");
assert.deepEqual(r, expectedAST);
};
exports["test Right associative rule"] = function () {
var lexData = {
rules: [
["x", "return 'x';"],
["\\+", "return '+';"],
["$", "return 'EOF';"]
]
};
var grammar = {
tokens: [ "x", "+", "EOF" ],
startSymbol: "S",
operators: [
["right", "+"]
],
bnf: {
"S" :[ [ "E EOF", "return $1;" ] ],
"E" :[ [ "E + E", "$$ = ['+', $1, $3];" ],
[ "x", "$$ = ['x'];" ] ]
}
};
var parser = new Jison.Parser(grammar);
parser.lexer = new RegExpLexer(lexData);
var expectedAST = ["+", ["x"], ["+", ["x"], ["x"]]];
var r = parser.parse("x+x+x");
assert.deepEqual(r, expectedAST);
};
exports["test Multiple precedence operators"] = function () {
var lexData = {
rules: [
["x", "return 'x';"],
["\\+", "return '+';"],
["\\*", "return '*';"],
["$", "return 'EOF';"]
]
};
var grammar = {
tokens: [ "x", "+", "*", "EOF" ],
startSymbol: "S",
operators: [
["left", "+"],
["left", "*"]
],
bnf: {
"S" :[ [ "E EOF", "return $1;" ] ],
"E" :[ [ "E + E", "$$ = ['+', $1, $3];" ],
[ "E * E", "$$ = ['*', $1, $3];" ],
[ "x", "$$ = ['x'];" ] ]
}
};
var parser = new Jison.Parser(grammar);
parser.lexer = new RegExpLexer(lexData);
var expectedAST = ["+", ["*", ["x"], ["x"]], ["x"]];
var r = parser.parse("x*x+x");
assert.deepEqual(r, expectedAST);
};
exports["test Multiple precedence operators"] = function () {
var lexData = {
rules: [
["x", "return 'x';"],
["\\+", "return '+';"],
["\\*", "return '*';"],
["$", "return 'EOF';"]
]
};
var grammar = {
tokens: [ "x", "+", "*", "EOF" ],
startSymbol: "S",
operators: [
["left", "+"],
["left", "*"]
],
bnf: {
"S" :[ [ "E EOF", "return $1;" ] ],
"E" :[ [ "E + E", "$$ = [$1,'+', $3];" ],
[ "E * E", "$$ = [$1, '*', $3];" ],
[ "x", "$$ = ['x'];" ] ]
}
};
var parser = new Jison.Parser(grammar);
parser.lexer = new RegExpLexer(lexData);
var expectedAST = [["x"], "+", [["x"], "*", ["x"]]];
var r = parser.parse("x+x*x");
assert.deepEqual(r, expectedAST);
};
exports["test Non-associative operator"] = function () {
var lexData = {
rules: [
["x", "return 'x';"],
["=", "return '=';"],
["$", "return 'EOF';"]
]
};
var grammar = {
tokens: [ "x", "=", "EOF" ],
startSymbol: "S",
operators: [
["nonassoc", "="]
],
bnf: {
"S" :[ "E EOF" ],
"E" :[ "E = E",
"x" ]
}
};
var parser = new Jison.Parser(grammar, {type: "lalr"});
parser.lexer = new RegExpLexer(lexData);
assert["throws"](function () {parser.parse("x=x=x");}, "throws parse error when operator used twice.");
assert.ok(parser.parse("x=x"), "normal use is okay.");
};
exports["test Context-dependent precedence"] = function () {
var lexData = {
rules: [
["x", "return 'x';"],
["-", "return '-';"],
["\\+", "return '+';"],
["\\*", "return '*';"],
["$", "return 'EOF';"]
]
};
var grammar = {
tokens: [ "x", "-", "+", "*", "EOF" ],
startSymbol: "S",
operators: [
["left", "-", "+"],
["left", "*"],
["left", "UMINUS"]
],
bnf: {
"S" :[ [ "E EOF", "return $1;" ] ],
"E" :[ [ "E - E", "$$ = [$1,'-', $3];" ],
[ "E + E", "$$ = [$1,'+', $3];" ],
[ "E * E", "$$ = [$1,'*', $3];" ],
[ "- E", "$$ = ['#', $2];", {prec: "UMINUS"} ],
[ "x", "$$ = ['x'];" ] ]
}
};
var parser = new Jison.Parser(grammar, {type: "slr"});
parser.lexer = new RegExpLexer(lexData);
var expectedAST = [[[["#", ["x"]], "*", ["#", ["x"]]], "*", ["x"]], "-", ["x"]];
var r = parser.parse("-x*-x*x-x");
assert.deepEqual(r, expectedAST);
};
exports["test multi-operator rules"] = function () {
var lexData = {
rules: [
["x", "return 'ID';"],
["\\.", "return 'DOT';"],
["=", "return 'ASSIGN';"],
["\\(", "return 'LPAREN';"],
["\\)", "return 'RPAREN';"],
["$", "return 'EOF';"]
]
};
var grammar = {
tokens: "ID DOT ASSIGN LPAREN RPAREN EOF",
startSymbol: "S",
operators: [
["right", "ASSIGN"],
["left", "DOT"]
],
bnf: {
"S" :[ [ "e EOF", "return $1;" ] ],
"id":[ [ "ID", "$$ = ['ID'];"] ],
"e" :[ [ "e DOT id", "$$ = [$1,'-', $3];" ],
[ "e DOT id ASSIGN e", "$$ = [$1,'=', $3];" ],
[ "e DOT id LPAREN e RPAREN", "$$ = [$1,'+', $3];" ],
[ "id ASSIGN e", "$$ = [$1,'+', $3];" ],
[ "id LPAREN e RPAREN", "$$ = [$1,'+', $3];" ],
[ "id", "$$ = $1;" ] ]
}
};
var gen = new Jison.Generator(grammar, {type: 'slr'});
assert.equal(gen.conflicts, 0);
};

52
vendor/jison/tests/parser/slr.js vendored Normal file
View file

@ -0,0 +1,52 @@
var Jison = require("../setup").Jison,
Lexer = require("../setup").Lexer,
assert = require("assert");
var lexData = {
rules: [
["x", "return 'x';"],
["y", "return 'y';"]
]
};
exports["test left-recursive nullable grammar"] = function () {
var grammar = {
tokens: [ 'x' ],
startSymbol: "A",
bnf: {
"A" :[ 'A x',
'' ]
}
};
var gen = new Jison.Generator(grammar, {type: "slr"});
var parser = gen.createParser();
parser.lexer = new Lexer(lexData);
assert.ok(parser.parse('xxx'), "parse 3 x's");
assert.ok(parser.parse("x"), "parse single x");
assert["throws"](function(){parser.parse("y")}, "throws parse error on invalid token");
assert.ok(gen.conflicts == 0, "no conflicts");
};
exports["test right-recursive nullable grammar"] = function () {
var grammar = {
tokens: [ 'x' ],
startSymbol: "A",
bnf: {
"A" :[ 'x A',
'' ]
}
};
var gen = new Jison.Generator(grammar, {type: "slr"});
var parser = gen.createParser();
parser.lexer = new Lexer(lexData);
assert.ok(parser.parse('xxx'), "parse 3 x's");
assert.ok(gen.table.length == 4, "table has 4 states");
assert.ok(gen.conflicts == 0, "no conflicts");
assert.equal(gen.nullable('A'), true, "A is nullable");
};

126
vendor/jison/tests/parser/tables.js vendored Normal file
View file

@ -0,0 +1,126 @@
var Jison = require("../setup").Jison,
assert = require("assert");
exports["test right-recursive nullable grammar"] = function () {
var grammar = {
tokens: [ 'x' ],
startSymbol: "A",
bnf: {
"A" :[ 'x A',
'' ]
}
};
var gen = new Jison.Generator(grammar, {type: "slr"});
var gen2 = new Jison.Generator(grammar, {type: "lalr"});
assert.equal(gen.table.length, 4, "table has 4 states");
assert.equal(gen.nullable('A'), true, "A is nullable");
assert.equal(gen.conflicts, 0, "should have no conflict");
assert.deepEqual(gen.table, gen2.table, "should have identical tables");
};
exports["test slr lalr lr tables are equal"] = function () {
var grammar = {
tokens: [ "ZERO", "PLUS"],
startSymbol: "E",
bnf: {
"E" :[ "E PLUS T",
"T" ],
"T" :[ "ZERO" ]
}
};
var gen = new Jison.Generator(grammar, {type: "slr"});
var gen2 = new Jison.Generator(grammar, {type: "lalr"});
var gen3 = new Jison.Generator(grammar, {type: "lr"});
assert.deepEqual(gen.table, gen2.table, "slr lalr should have identical tables");
assert.deepEqual(gen2.table, gen3.table, "lalr lr should have identical tables");
};
exports["test LL parse table"] = function () {
var grammar = {
tokens: [ 'x' ],
startSymbol: "A",
bnf: {
"A" :[ 'x A',
'' ]
}
};
var gen = new Jison.Generator(grammar, {type: "ll"});
assert.deepEqual(gen.table, {$accept:{x:[0], $end:[0]}, A:{x:[1], $end:[2]}}, "ll table has 2 states");
};
exports["test LL parse table with conflict"] = function () {
var grammar = {
tokens: [ 'x' ],
startSymbol: "L",
bnf: {
"L" :[ 'T L T',
'' ],
"T" :[ "x" ]
}
};
var gen = new Jison.Generator(grammar, {type: "ll"});
assert.equal(gen.conflicts, 1, "should have 1 conflict");
};
exports["test Ambigous grammar"] = function () {
var grammar = {
tokens: [ 'x', 'y' ],
startSymbol: "A",
bnf: {
"A" :[ 'A B A',
'x' ],
"B" :[ '',
'y' ]
}
};
var gen = new Jison.Generator(grammar, {type: "lr"});
assert.equal(gen.conflicts, 2, "should have 2 conflict");
};
// for Minimal LR testing. Not there yet.
/*exports["test Spector grammar G1"] = function () {*/
//var grammar = {
//"tokens": "z d b c a",
//"startSymbol": "S",
//"bnf": {
//"S" :[ "a A c",
//"a B d",
//"b A d",
//"b B c"],
//"A" :[ "z" ],
//"B" :[ "z" ]
//}
//};
//var gen = new Jison.Generator(grammar, {type: "mlr", debug:true});
//assert.strictEqual(gen.conflicts, 0, "should have no conflict");
//};
//exports["test De Remer G4"] = function () {
//var grammar = {
//"tokens": "z d b c a",
//"startSymbol": "S",
//"bnf": {
//"S" : "a A d | b A c | b B d",
//"A" : "e A | e",
//"B" : "e B | e"
//}
//};
//var gen = new Jison.Generator(grammar, {type: "mlr", debug:true});
//assert.strictEqual(gen.conflicts, 0, "should have no conflict");
/*};*/

110
vendor/jison/tests/performance.js vendored Executable file
View file

@ -0,0 +1,110 @@
#!/usr/bin/env narwhal
// TODO: ...should probably have some real performance tests.
var Jison = require("./setup").Jison;
var grammar = {
"lex": {
"macros": {
"digit": "[0-9]",
"id": "[a-zA-Z][a-zA-Z0-9]*"
},
"rules": [
["//.*", "/* ignore comment */"],
["main\\b", "return 'MAIN';"],
["class\\b", "return 'CLASS';"],
["extends\\b", "return 'EXTENDS';"],
["nat\\b", "return 'NATTYPE';"],
["if\\b", "return 'IF';"],
["else\\b", "return 'ELSE';"],
["for\\b", "return 'FOR';"],
["printNat\\b", "return 'PRINTNAT';"],
["readNat\\b", "return 'READNAT';"],
["this\\b", "return 'THIS';"],
["new\\b", "return 'NEW';"],
["var\\b", "return 'VAR';"],
["null\\b", "return 'NUL';"],
["{digit}+", "return 'NATLITERAL';"],
["{id}", "return 'ID';"],
["==", "return 'EQUALITY';"],
["=", "return 'ASSIGN';"],
["\\+", "return 'PLUS';"],
["-", "return 'MINUS';"],
["\\*", "return 'TIMES';"],
[">", "return 'GREATER';"],
["\\|\\|", "return 'OR';"],
["!", "return 'NOT';"],
["\\.", "return 'DOT';"],
["\\{", "return 'LBRACE';"],
["\\}", "return 'RBRACE';"],
["\\(", "return 'LPAREN';"],
["\\)", "return 'RPAREN';"],
[";", "return 'SEMICOLON';"],
["\\s+", "/* skip whitespace */"],
[".", "print('Illegal character');throw 'Illegal character';"],
["$", "return 'ENDOFFILE';"]
]
},
"tokens": "MAIN CLASS EXTENDS NATTYPE IF ELSE FOR PRINTNAT READNAT THIS NEW VAR NUL NATLITERAL ID ASSIGN PLUS MINUS TIMES EQUALITY GREATER OR NOT DOT SEMICOLON LBRACE RBRACE LPAREN RPAREN ENDOFFILE",
"operators": [
["right", "ASSIGN"],
["left", "OR"],
["nonassoc", "EQUALITY", "GREATER"],
["left", "PLUS", "MINUS"],
["left", "TIMES"],
["right", "NOT"],
["left", "DOT"]
],
"bnf": {
"pgm": ["cdl MAIN LBRACE vdl el RBRACE ENDOFFILE"],
"cdl": ["c cdl",
""],
"c": ["CLASS id EXTENDS id LBRACE vdl mdl RBRACE"],
"vdl": ["VAR t id SEMICOLON vdl",
""],
"mdl": ["t id LPAREN t id RPAREN LBRACE vdl el RBRACE mdl",
""],
"t": ["NATTYPE",
"id"],
"id": ["ID"],
"el": ["e SEMICOLON el",
"e SEMICOLON"],
"e": ["NATLITERAL",
"NUL",
"id",
"NEW id",
"THIS",
"IF LPAREN e RPAREN LBRACE el RBRACE ELSE LBRACE el RBRACE ",
"FOR LPAREN e SEMICOLON e SEMICOLON e RPAREN LBRACE el RBRACE",
"READNAT LPAREN RPAREN",
"PRINTNAT LPAREN e RPAREN",
"e PLUS e",
"e MINUS e",
"e TIMES e",
"e EQUALITY e",
"e GREATER e",
"NOT e",
"e OR e",
"e DOT id",
"id ASSIGN e",
"e DOT id ASSIGN e",
"id LPAREN e RPAREN",
"e DOT id LPAREN e RPAREN",
"LPAREN e RPAREN"]
}
};
var parser = new Jison.Parser(grammar, {type: 'lalr'});

3
vendor/jison/tests/setup.js vendored Normal file
View file

@ -0,0 +1,3 @@
exports.Jison = require("../lib/jison").Jison;
exports.Lexer = exports.RegExpLexer = require("../lib/jison/lexer").RegExpLexer;

161
vendor/optparse-js/README.md vendored Normal file
View file

@ -0,0 +1,161 @@
optparse-js
===========
Optparse-js is a command line option parser for Javascript. It's slightly based on Ruby's implementation optparse but with some differences (different languages has different needs) such as custom parsers.
All examples in this readme is using [Node.js](http://nodejs.org/). How ever, the library works with all kinds of Javascript implementations.
QUICK START
-----------
The library defines one class, the OptionParser class. The class constructor takes one single argument, a list with a set of rules. Here is a quick example:
// Import the sys library
var sys = require('sys');
// Import the optparse library.
var optparse = require('optparse');
// Define an option called ´´help´´. We give it a quick alias named ´´-h´´
// and a quick help text.
var switches = [
['-h', '--help', 'Shows help sections']
];
// Create a new OptionParser.
var parser = new optparse.OptionParser(switches);
// Hook the help option. The callback will be executed when the OptionParser
// hits the switch ´´-h´´ or ´´--help´´. Each representatio
parser.on('help', function() {
sys.puts('Help');
});
DEFINING RULES
--------------
The OptionParser constructor takes an Array with rules. Each rule is represented by an array (tuple) of two or three values. A typical rule definition may look like this:
['-h', '--help', 'Print this help']
The first value is optional, and represents an alias for the long-named switch (the second value, in this case ´´--help´´).
The second argument is the actual rule. The rule must start with a double dash followed by a switch name (in this case ´help´). The OptionParser also supports special option arguments. Define an option argument in the rule by adding a named argument after the leading double dash and switch name (E.G '--port-number PORT_NUMBER'). The argument is then parsed to the option handler. To define an optional option argument, just add a braces around argument in the rule (E.G '--port-number [PORT_NUMBER]). The OptionParser also supports filter. More on that in in the section called ´Option Filters´.
The third argument is an optional rule description.
OPTION FILTERS
--------------
Filters is a neat feature that let you filter option arguments. The OptionParser itself as already a set of built-in common filter's. These are:
- NUMBER, supports both decimal and hexadecimal numbers.
- DATE, filters arguments that matches YYYY-MM-DD.
- EMAIL, filters arguments that matches my@email.com.
It's simple to use any of the filter above in your rule-set. Here is a quick example how to filter number:
var rules = [
['--first-option NUMBER', 'Takes a number as argument'],
['--second-option [NUMBER]', 'Takes an optional number as argument']
]
You can add your own set of filter by calling the *parser_instance.filter* method:
parser.filter('single_char', function(value) {
if(value.length != 1) throw "Filter mismatch.";
return value;
});
OPTION PARSER
-------------
The OptionParser class has the following properties and methods:
### string banner
An optional usage banner. This text is included when calling ´´toString´´. Default value is: "Usage: [Options]".
### string options_title
An optional title for the options list. This text is included when calling ´´toString´´. Default value is: "Available options:".
### function on(switch_or_arg_index, callback)
Add's a callback for a switch or an argument (defined by index). Switch hooks MUST be typed witout the leading ´´--´´. This example show how to hook a switch:
parser.on('help', function(optional_argument) {
// Show help section
});
And this example show how to hook an argument (an option without the leading - or --):
parser.on(0, function(opt) {
puts('The first non-switch option is:' + opt);
});
It's also possible to define a default handler. The default handler is called when no rule's are meet. Here is an example how to add a ´default handler´:
parser.on(function(opt) {
puts('No handler was defined for option:' + opt);
});
Use the wildcard handler to build a custom ´´on´´ handler.
parser.on('*', function(opt, value) {
puts('option=' + opt + ', value=' + value);
});
### function filter(name, callback)
Adds a new filter extension to the OptionParser instance. The first argument is the name of the filter (trigger). The second argument is the actual filter See the ´OPTION FILTERS´ section for more info.
It's possible to override the default filters by passing the value "_DEFAULT" to the ´´name´´ argument. The name of the filter is automatically transformed into
upper case.
### function halt([callback])
Interrupt's further parsing. This function should be called from an ´on´ -callbacks, to cancel the parsing. This can be useful when the program should ignore all other arguments (when displaying help or version information).
The function also takes an optional callback argument. If the callback argument is specified, a ´halt´ callback will be added (instead of executing the ´halt´ command).
Here is an example how to add an ´on_halt´ callback:
parser.halt(function() {
puts('An option callback interupted the parser');
});
### function parse(arguments)
Start's parsing of arguments. This should be the last thing you do.
### function options()
Returns an Array with all defined option rules
### function toString()
Returns a string representation of this OptionParser instance (a formatted help section).
MORE EXAMPLES
-------------
See examples/nodejs-test.js and examples/browser-test-html for more info how to
use the script.
SUGGESTIONS
-----------
All comments in how to improve this library is very welcome. Feel free post suggestions to the [Issue tracker](http://github.com/jfd/optparse-js/issues), or even better, fork the repository to implement your own features.
LICENSE
-------
Released under a MIT-style license.
COPYRIGHT
---------
Copyright (c) 2009 Johan Dahlberg

1
vendor/optparse-js/TODO vendored Normal file
View file

@ -0,0 +1 @@
- Support for Argument lists (for switches)

View file

@ -0,0 +1,75 @@
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN"
"http://www.w3.org/TR/html4/strict.dtd">
<html>
<head>
<meta http-equiv="Content-type" content="text/html; charset=utf-8">
<title>optparse.js example</title>
<script type="text/javascript" charset="utf-8" src="../src/optparse.js"></script>
<script>
// Arguments to be passed to the parser
var ARGS = ['-p', 'This is a message', '-i', 'test.html', '--debug'];
// Define some options
var SWITCHES = [
['-i', '--include-file FILE', "Includes a file"],
['-p', '--print [MESSAGE]', "Prints a message on screen"],
['-d', '--debug', "Enables debug mode"],
];
function puts(msg) {
var body = document.getElementById('body');
var pre = document.createElement('pre');
pre.innerHTML = msg;
body.appendChild(pre);
}
function onLoad() {
puts("optparse.js");
// Create a new OptionParser with defined switches
var parser = new optparse.OptionParser(SWITCHES);
// Internal variable to store options.
var options = {
debug: false,
files: []
};
// Handle the first argument (switches excluded)
parser.on(0, function(value) {
puts("First non-switch argument is: " + value);
});
// Handle the --include-file switch
parser.on('include-file', function(value) {
options.files.push(value);
});
// Handle the --print switch
parser.on('print', function(value) {
puts('PRINT: ' + value);
});
// Handle the --debug switch
parser.on('debug', function() {
options.debug = true;
});
// Parse command line arguments
parser.parse(ARGS);
// Output all files that was included.
puts("No of files to include: " + options.files.length);
for(var i = 0; i < options.files.length; i++) {
puts("File [" + (i + 1) + "]:" + options.files[i]);
}
// Is debug-mode enabled?
puts("Debug mode is set to: " + options.debug);
}
</script>
</head>
<body id="body" onload="onLoad()">
</body>
</html>

View file

@ -0,0 +1,88 @@
// Import the optparse script
var optparse = require('../src/optparse');
process.mixin(require("utils"));
// Define some options
var SWITCHES = [
['-i', '--include-file FILE', "Includes a file"],
['-p', '--print [MESSAGE]', "Prints an optional message on screen"],
['-d', '--debug', "Enables debug mode"],
['-H', '--help', "Shows this help section"],
['--date DATE', "A date. A date is expected E.G. 2009-01-14"],
['--number NUMBER', "A Number. Supported formats are 123, 123.123, 0xA123"],
['--other NAME', "No handler defined for this option. Will be handled by the wildcard handler."],
];
// Create a new OptionParser with defined switches
var parser = new optparse.OptionParser(SWITCHES), print_summary = true,
first_arg;
parser.banner = 'Usage: nodejs-test.js [options]';
// Internal variable to store options.
var options = {
debug: false,
files: [],
number: undefined,
date: undefined
};
// Handle the first argument (switches excluded)
parser.on(0, function(value) {
first_arg = value;
});
// Handle the --include-file switch
parser.on('include-file', function(value) {
options.files.push(value);
});
// Handle the --print switch
parser.on('print', function(value) {
puts('PRINT: ' + (value || 'No message entered'));
});
// Handle the --date switch
parser.on('date', function(value) {
options.date = value;
});
// Handle the --number switch
parser.on('number', function(value) {
options.number = value;
});
// Handle the --debug switch
parser.on('debug', function() {
options.debug = true;
});
// Handle the --help switch
parser.on('help', function() {
puts(parser.toString());
print_summary = false;
});
// Set a default handler
parser.on('*', function(opt, value) {
puts('wild handler for ' + opt + ', value=' + value);
});
// Parse command line arguments
parser.parse(process.ARGV);
if(print_summary) {
puts("First non-switch argument is: " + first_arg);
// Output all files that was included.
puts("No of files to include: " + options.files.length);
for(var i = 0; i < options.files.length; i++) {
puts("File [" + (i + 1) + "]:" + options.files[i]);
}
// Is debug-mode enabled?
puts("Debug mode is set to: " + options.debug);
puts("Number value is: " + options.number);
puts("Date value is: " + options.date);
}

309
vendor/optparse-js/src/optparse.js vendored Executable file
View file

@ -0,0 +1,309 @@
// Optparse.js 1.0 - Option Parser for Javascript
//
// Copyright (c) 2009 Johan Dahlberg
//
// See README.md for license.
//
var optparse = {};
try{ optparse = exports } catch(e) {}; // Try to export the lib for node.js
(function(self) {
var VERSION = '1.0';
var LONG_SWITCH_RE = /^--\w/;
var SHORT_SWITCH_RE = /^-\w/;
var NUMBER_RE = /^(0x[A-Fa-f0-9]+)|([0-9]+\.[0-9]+)|(\d+)$/;
var DATE_RE = /^\d{4}-(0[0-9]|1[0,1,2])-([0,1,2][0-9]|3[0,1])$/;
var EMAIL_RE = /^([0-9a-zA-Z]+([_.-]?[0-9a-zA-Z]+)*@[0-9a-zA-Z]+[0-9,a-z,A-Z,.,-]*(.){1}[a-zA-Z]{2,4})+$/;
var EXT_RULE_RE = /(\-\-[\w_-]+)\s+([\w\[\]_-]+)|(\-\-[\w_-]+)/;
var ARG_OPTIONAL_RE = /\[(.+)\]/;
// The default switch argument filter to use, when argument name doesnt match
// any other names.
var DEFAULT_FILTER = '_DEFAULT';
var PREDEFINED_FILTERS = {};
// The default switch argument filter. Parses the argument as text.
function filter_text(value) {
return value;
}
// Switch argument filter that expects an integer, HEX or a decimal value. An
// exception is throwed if the criteria is not matched.
// Valid input formats are: 0xFFFFFFF, 12345 and 1234.1234
function filter_number(value) {
var m = NUMBER_RE(value);
if(m == null) throw OptError('Expected a number representative');
if(m[1]) {
// The number is in HEX format. Convert into a number, then return it
return parseInt(m[1], 16);
} else {
// The number is in regular- or decimal form. Just run in through
// the float caster.
return parseFloat(m[2] || m[3]);
}
};
// Switch argument filter that expects a Date expression. The date string MUST be
// formated as: "yyyy-mm-dd" An exception is throwed if the criteria is not
// matched. An DATE object is returned on success.
function filter_date(value) {
var m = DATE_RE(value);
if(m == null) throw OptError('Expected a date representation in the "yyyy-mm-dd" format.');
return new Date(parseInt(m[0]), parseInt(m[1]), parseInt(m[2]));
};
// Switch argument filter that expects an email address. An exception is throwed
// if the criteria doesn`t match.
function filter_email(value) {
var m = EMAIL_RE(value);
if(m == null) throw OptError('Excpeted an email address.');
return m[1];
}
// Register all predefined filters. This dict is used by each OptionParser
// instance, when parsing arguments. Custom filters can be added to the parser
// instance by calling the "add_filter" -method.
PREDEFINED_FILTERS[DEFAULT_FILTER] = filter_text;
PREDEFINED_FILTERS['TEXT'] = filter_text;
PREDEFINED_FILTERS['NUMBER'] = filter_number;
PREDEFINED_FILTERS['DATE'] = filter_date;
PREDEFINED_FILTERS['EMAIL'] = filter_email;
// Buildes rules from a switches collection. The switches collection is defined
// when constructing a new OptionParser object.
function build_rules(filters, arr) {
var rules = [];
for(var i=0; i<arr.length; i++) {
var r = arr[i], rule
if(!contains_expr(r)) throw OptError('Rule MUST contain an option.');
switch(r.length) {
case 1:
rule = build_rule(filters, r[0]);
break;
case 2:
var expr = LONG_SWITCH_RE(r[0]) ? 0 : 1;
var alias = expr == 0 ? -1 : 0;
var desc = alias == -1 ? 1 : -1;
rule = build_rule(filters, r[alias], r[expr], r[desc]);
break;
case 3:
rule = build_rule(filters, r[0], r[1], r[2]);
break;
default:
case 0:
continue;
}
rules.push(rule)
}
return rules;
}
// Builds a rule with specified expression, short style switch and help. This
// function expects a dict with filters to work correctly.
//
// Return format:
// name The name of the switch.
// short The short style switch
// long The long style switch
// decl The declaration expression (the input expression)
// desc The optional help section for the switch
// optional_arg Indicates that switch argument is optional
// filter The filter to use when parsing the arg. An
// <<undefined>> value means that the switch does
// not take anargument.
function build_rule(filters, short, expr, desc) {
var optional, filter;
var m = expr.match(EXT_RULE_RE);
if(m == null) throw OptError('The switch is not well-formed.');
var long = m[1] || m[3];
if(m[2] != undefined) {
// A switch argument is expected. Check if the argument is optional,
// then find a filter that suites.
var optional_match = ARG_OPTIONAL_RE(m[2]);
var filter_name = optional_match === null ? m[2] : optional_match[1];
optional = optional_match !== null;
filter = filters[filter_name];
if(filter === undefined) filter = filters[DEFAULT_FILTER];
}
return {
name: long.substr(2),
short: short,
long: long,
decl: expr,
desc: desc,
optional_arg: optional,
filter: filter
}
}
// Loop's trough all elements of an array and check if there is valid
// options expression within. An valid option is a token that starts
// double dashes. E.G. --my_option
function contains_expr(arr) {
if(!arr || !arr.length) return false;
var l = arr.length;
while(l-- > 0) if(LONG_SWITCH_RE(arr[l])) return true;
return false;
}
// Extends destination object with members of source object
function extend(dest, src) {
var result = dest;
for(var n in src) {
result[n] = src[n];
}
return result;
}
// Appends spaces to match specified number of chars
function spaces(arg1, arg2) {
var l, builder = [];
if(arg1.constructor === Number) {
l = arg1;
} else {
if(arg1.length == arg2) return arg1;
l = arg2 - arg1.length;
builder.push(arg1);
}
while(l-- > 0) builder.push(' ');
return builder.join('');
}
// Create a new Parser object that can be used to parse command line arguments.
//
//
function Parser(rules) {
return new OptionParser(rules);
}
// Creates an error object with specified error message.
function OptError(msg) {
return new function() {
this.msg = msg;
this.toString = function() {
return this.msg;
}
}
}
function OptionParser(rules) {
this.banner = 'Usage: [Options]';
this.options_title = 'Available options:'
this._rules = rules;
this._halt = false;
this.filters = extend({}, PREDEFINED_FILTERS);
this.on_args = {};
this.on_switches = {};
this.on_halt = function() {};
this.default_handler = function() {};
}
OptionParser.prototype = {
// Adds args and switchs handler.
on: function(value, fn) {
if(value.constructor === Function ) {
this.default_handler = value;
} else if(value.constructor === Number) {
this.on_args[value] = fn;
} else {
this.on_switches[value] = fn;
}
},
// Adds a custom filter to the parser. It's possible to override the
// default filter by passing the value "_DEFAULT" to the ´´name´´
// argument. The name of the filter is automatically transformed into
// upper case.
filter: function(name, fn) {
this.filters[name.toUpperCase()] = fn;
},
// Parses specified args. Returns remaining arguments.
parse: function(args) {
var result = [], callback;
var rules = build_rules(this.filters, this._rules);
var tokens = args.concat([]);
while((token = tokens.shift()) && this._halt == false) {
if(LONG_SWITCH_RE(token) || SHORT_SWITCH_RE(token)) {
var arg = undefined;
// The token is a long or a short switch. Get the corresponding
// rule, filter and handle it. Pass the switch to the default
// handler if no rule matched.
for(var i = 0; i < rules.length; i++) {
var rule = rules[i];
if(rule.long == token || rule.short == token) {
if(rule.filter !== undefined) {
arg = tokens.shift();
if(!LONG_SWITCH_RE(arg) && !SHORT_SWITCH_RE(arg)) {
try {
arg = rule.filter(arg);
} catch(e) {
throw OptError(token + ': ' + e.toString());
}
} else if(rule.optional_arg) {
tokens.unshift(arg);
} else {
throw OptError('Expected switch argument.');
}
}
callback = this.on_switches[rule.name];
if (!callback) callback = this.on_switches['*'];
if(callback) callback.apply(this, [rule.name, arg]);
break;
}
}
if(i == rules.length) this.default_handler.apply(this, [token]);
} else {
// Did not match long or short switch. Parse the token as a
// normal argument.
callback = this.on_args[result.length];
result.push(token);
if(callback) callback.apply(this, [token]);
}
}
return this._halt ? this.on_halt.apply(this, []) : result;
},
// Returns an Array with all defined option rules
options: function() {
return build_rules(this.filters, this._rules);
},
// Add an on_halt callback if argument ´´fn´´ is specified. on_switch handlers can
// call instance.halt to abort the argument parsing. This can be useful when
// displaying help or version information.
halt: function(fn) {
this._halt = fn === undefined
if(fn) this.on_halt = fn;
},
// Returns a string representation of this OptionParser instance.
toString: function() {
var builder = [this.banner, '', this.options_title],
shorts = false, longest = 0, rule;
var rules = build_rules(this.filters, this._rules);
for(var i = 0; i < rules.length; i++) {
rule = rules[i];
// Quick-analyze the options.
if(rule.short) shorts = true;
if(rule.decl.length > longest) longest = rule.decl.length;
}
for(var i = 0; i < rules.length; i++) {
var text;
rule = rules[i];
if(shorts) {
if(rule.short) text = spaces(2) + rule.short + ', ';
else text = spaces(6);
}
text += spaces(rule.decl, longest) + spaces(3);
text += rule.desc;
builder.push(text);
}
return builder.join('\n');
}
}
self.VERSION = VERSION;
self.OptionParser = OptionParser;
})(optparse);