2010-01-30 03:51:51 +00:00
|
|
|
(function(){
|
2010-02-18 04:22:05 +00:00
|
|
|
var lexer, parser, path;
|
2010-02-13 21:23:03 +00:00
|
|
|
// Set up for both the browser and the server.
|
2010-02-18 04:22:05 +00:00
|
|
|
if ((typeof process !== "undefined" && process !== null)) {
|
2010-02-17 05:50:08 +00:00
|
|
|
process.mixin(require('nodes'));
|
2010-02-13 20:25:04 +00:00
|
|
|
path = require('path');
|
2010-02-17 05:50:08 +00:00
|
|
|
lexer = new (require('lexer').Lexer)();
|
|
|
|
parser = require('parser').parser;
|
2010-02-13 20:25:04 +00:00
|
|
|
} else {
|
2010-02-18 04:22:05 +00:00
|
|
|
this.exports = this;
|
2010-02-13 20:25:04 +00:00
|
|
|
lexer = new Lexer();
|
2010-02-18 04:22:05 +00:00
|
|
|
parser = exports.parser;
|
2010-02-13 20:25:04 +00:00
|
|
|
}
|
2010-02-11 06:57:33 +00:00
|
|
|
// Thin wrapper for Jison compatibility around the real lexer.
|
|
|
|
parser.lexer = {
|
|
|
|
lex: function lex() {
|
|
|
|
var token;
|
|
|
|
token = this.tokens[this.pos] || [""];
|
|
|
|
this.pos += 1;
|
|
|
|
this.yylineno = token[2];
|
|
|
|
this.yytext = token[1];
|
|
|
|
return token[0];
|
|
|
|
},
|
|
|
|
setInput: function setInput(tokens) {
|
|
|
|
this.tokens = tokens;
|
|
|
|
return this.pos = 0;
|
|
|
|
},
|
|
|
|
upcomingInput: function upcomingInput() {
|
|
|
|
return "";
|
|
|
|
},
|
|
|
|
showPosition: function showPosition() {
|
|
|
|
return this.pos;
|
|
|
|
}
|
|
|
|
};
|
2010-02-13 21:23:03 +00:00
|
|
|
// Improved error messages.
|
2010-02-13 21:28:07 +00:00
|
|
|
// parser.parseError: (message, hash) ->
|
|
|
|
// throw new Error 'Unexpected ' + parser.terminals_[hash.token] + ' on line ' + hash.line
|
2010-02-11 06:57:33 +00:00
|
|
|
exports.VERSION = '0.5.0';
|
|
|
|
// Compile CoffeeScript to JavaScript, using the Coffee/Jison compiler.
|
2010-02-13 15:27:18 +00:00
|
|
|
exports.compile = function compile(code, options) {
|
|
|
|
return (parser.parse(lexer.tokenize(code))).compile(options);
|
2010-02-11 07:39:57 +00:00
|
|
|
};
|
|
|
|
// Just the tokens.
|
|
|
|
exports.tokenize = function tokenize(code) {
|
|
|
|
return lexer.tokenize(code);
|
2010-02-11 06:57:33 +00:00
|
|
|
};
|
2010-02-12 04:11:05 +00:00
|
|
|
// Just the nodes.
|
|
|
|
exports.tree = function tree(code) {
|
|
|
|
return parser.parse(lexer.tokenize(code));
|
|
|
|
};
|
2010-02-15 23:09:01 +00:00
|
|
|
// Pretty-print a token stream.
|
|
|
|
exports.print_tokens = function print_tokens(tokens) {
|
2010-02-16 23:00:40 +00:00
|
|
|
var _a, _b, _c, strings, token;
|
2010-02-15 23:09:01 +00:00
|
|
|
strings = (function() {
|
2010-02-16 23:00:40 +00:00
|
|
|
_a = []; _b = tokens;
|
|
|
|
for (_c = 0; _c < _b.length; _c++) {
|
|
|
|
token = _b[_c];
|
|
|
|
_a.push('[' + token[0] + ' ' + token[1].toString().replace(/\n/, '\\n') + ']');
|
2010-02-15 23:09:01 +00:00
|
|
|
}
|
2010-02-16 23:00:40 +00:00
|
|
|
return _a;
|
2010-02-15 23:09:01 +00:00
|
|
|
}).call(this);
|
2010-02-16 01:46:36 +00:00
|
|
|
return puts(strings.join(' '));
|
2010-02-15 23:09:01 +00:00
|
|
|
};
|
2010-01-30 03:51:51 +00:00
|
|
|
})();
|