1
0
Fork 0
mirror of https://github.com/jashkenas/coffeescript.git synced 2022-11-09 12:23:24 -05:00
This commit is contained in:
Jeremy Ashkenas 2013-02-25 17:41:27 +13:00
commit 5e498ca395
17 changed files with 849 additions and 393 deletions

View file

@ -176,9 +176,11 @@ runTests = (CoffeeScript) ->
fn.call(fn) fn.call(fn)
++passedTests ++passedTests
catch e catch e
e.description = description if description? failures.push
e.source = fn.toString() if fn.toString? filename: currentFile
failures.push filename: currentFile, error: e error: e
description: description if description?
source: fn.toString() if fn.toString?
# See http://wiki.ecmascript.org/doku.php?id=harmony:egal # See http://wiki.ecmascript.org/doku.php?id=harmony:egal
egal = (a, b) -> egal = (a, b) ->
@ -206,16 +208,16 @@ runTests = (CoffeeScript) ->
return log(message, green) unless failures.length return log(message, green) unless failures.length
log "failed #{failures.length} and #{message}", red log "failed #{failures.length} and #{message}", red
for fail in failures for fail in failures
{error, filename} = fail {error, filename, description, source} = fail
jsFilename = filename.replace(/\.coffee$/,'.js') jsFilename = filename.replace(/\.coffee$/,'.js')
match = error.stack?.match(new RegExp(fail.file+":(\\d+):(\\d+)")) match = error.stack?.match(new RegExp(fail.file+":(\\d+):(\\d+)"))
match = error.stack?.match(/on line (\d+):/) unless match match = error.stack?.match(/on line (\d+):/) unless match
[match, line, col] = match if match [match, line, col] = match if match
console.log '' console.log ''
log " #{error.description}", red if error.description log " #{description}", red if description
log " #{error.stack}", red log " #{error.stack}", red
log " #{jsFilename}: line #{line ? 'unknown'}, column #{col ? 'unknown'}", red log " #{jsFilename}: line #{line ? 'unknown'}, column #{col ? 'unknown'}", red
console.log " #{error.source}" if error.source console.log " #{source}" if source
return return
# Run every test in the `test` folder, recording failures. # Run every test in the `test` folder, recording failures.

View file

@ -152,8 +152,14 @@
parser.lexer = { parser.lexer = {
lex: function() { lex: function() {
var tag, _ref; var tag, token;
_ref = this.tokens[this.pos++] || [''], tag = _ref[0], this.yytext = _ref[1], this.yylineno = _ref[2]; token = this.tokens[this.pos++];
if (token) {
tag = token[0], this.yytext = token[1], this.yylloc = token[2];
this.yylineno = this.yylloc.first_line;
} else {
tag = '';
}
return tag; return tag;
}, },
setInput: function(tokens) { setInput: function(tokens) {

View file

@ -431,14 +431,16 @@
}; };
printTokens = function(tokens) { printTokens = function(tokens) {
var strings, tag, token, value; var locationData, strings, tag, token, value;
strings = (function() { strings = (function() {
var _i, _len, _ref1, _results; var _i, _len, _results;
_results = []; _results = [];
for (_i = 0, _len = tokens.length; _i < _len; _i++) { for (_i = 0, _len = tokens.length; _i < _len; _i++) {
token = tokens[_i]; token = tokens[_i];
_ref1 = [token[0], token[1].toString().replace(/\n/, '\\n')], tag = _ref1[0], value = _ref1[1]; tag = token[0];
_results.push("[" + tag + " " + value + "]"); value = token[1].toString().replace(/\n/, '\\n');
locationData = helpers.locationDataToString(token[2]);
_results.push("[" + tag + " " + value + " " + locationData + "]");
} }
return _results; return _results;
})(); })();

View file

@ -7,8 +7,9 @@
unwrap = /^function\s*\(\)\s*\{\s*return\s*([\s\S]*);\s*\}/; unwrap = /^function\s*\(\)\s*\{\s*return\s*([\s\S]*);\s*\}/;
o = function(patternString, action, options) { o = function(patternString, action, options) {
var match; var addLocationDataFn, match, patternCount;
patternString = patternString.replace(/\s{2,}/g, ' '); patternString = patternString.replace(/\s{2,}/g, ' ');
patternCount = patternString.split(' ').length;
if (!action) { if (!action) {
return [patternString, '$$ = $1;', options]; return [patternString, '$$ = $1;', options];
} }
@ -16,7 +17,16 @@
action = action.replace(/\bnew /g, '$&yy.'); action = action.replace(/\bnew /g, '$&yy.');
action = action.replace(/\b(?:Block\.wrap|extend)\b/g, 'yy.$&'); action = action.replace(/\b(?:Block\.wrap|extend)\b/g, 'yy.$&');
action = action.replace(/\b(Op|Value\.(create|wrap))\b/g, 'yy.$&'); action = action.replace(/\b(Op|Value\.(create|wrap))\b/g, 'yy.$&');
return [patternString, "$$ = " + action + ";", options]; addLocationDataFn = function(first, last) {
if (!last) {
return "yy.addLocationDataFn(@" + first + ")";
} else {
return "yy.addLocationDataFn(@" + first + ", @" + last + ")";
}
};
action = action.replace(/LOCDATA\(([0-9]*)\)/g, addLocationDataFn('$1'));
action = action.replace(/LOCDATA\(([0-9]*),\s*([0-9]*)\)/g, addLocationDataFn('$1', '$2'));
return [patternString, "$$ = " + (addLocationDataFn(1, patternCount)) + "(" + action + ");", options];
}; };
grammar = { grammar = {
@ -86,10 +96,10 @@
o('ObjAssignable', function() { o('ObjAssignable', function() {
return Value.wrap($1); return Value.wrap($1);
}), o('ObjAssignable : Expression', function() { }), o('ObjAssignable : Expression', function() {
return new Assign(Value.wrap($1), $3, 'object'); return new Assign(LOCDATA(1)(Value.wrap($1)), $3, 'object');
}), o('ObjAssignable :\ }), o('ObjAssignable :\
INDENT Expression OUTDENT', function() { INDENT Expression OUTDENT', function() {
return new Assign(Value.wrap($1), $4, 'object'); return new Assign(LOCDATA(1)(Value.wrap($1)), $4, 'object');
}), o('Comment') }), o('Comment')
], ],
ObjAssignable: [o('Identifier'), o('AlphaNumeric'), o('ThisProperty')], ObjAssignable: [o('Identifier'), o('AlphaNumeric'), o('ThisProperty')],
@ -179,7 +189,7 @@
}), o('?. Identifier', function() { }), o('?. Identifier', function() {
return new Access($2, 'soak'); return new Access($2, 'soak');
}), o(':: Identifier', function() { }), o(':: Identifier', function() {
return [new Access(new Literal('prototype')), new Access($2)]; return [LOCDATA(1)(new Access(new Literal('prototype'))), LOCDATA(2)(new Access($2))];
}), o('::', function() { }), o('::', function() {
return new Access(new Literal('prototype')); return new Access(new Literal('prototype'));
}), o('Index') }), o('Index')
@ -271,7 +281,7 @@
], ],
ThisProperty: [ ThisProperty: [
o('@ Identifier', function() { o('@ Identifier', function() {
return Value.wrap(new Literal('this'), [new Access($2)], 'this'); return Value.wrap(LOCDATA(1)(new Literal('this')), [LOCDATA(2)(new Access($2))], 'this');
}) })
], ],
Array: [ Array: [
@ -338,7 +348,7 @@
o('CATCH Identifier Block', function() { o('CATCH Identifier Block', function() {
return [$2, $3]; return [$2, $3];
}), o('CATCH Object Block', function() { }), o('CATCH Object Block', function() {
return [Value.wrap($2), $3]; return [LOCDATA(2)(Value.wrap($2)), $3];
}) })
], ],
Throw: [ Throw: [
@ -375,18 +385,18 @@
o('WhileSource Block', function() { o('WhileSource Block', function() {
return $1.addBody($2); return $1.addBody($2);
}), o('Statement WhileSource', function() { }), o('Statement WhileSource', function() {
return $2.addBody(Block.wrap([$1])); return $2.addBody(LOCDATA(1)(Block.wrap([$1])));
}), o('Expression WhileSource', function() { }), o('Expression WhileSource', function() {
return $2.addBody(Block.wrap([$1])); return $2.addBody(LOCDATA(1)(Block.wrap([$1])));
}), o('Loop', function() { }), o('Loop', function() {
return $1; return $1;
}) })
], ],
Loop: [ Loop: [
o('LOOP Block', function() { o('LOOP Block', function() {
return new While(new Literal('true')).addBody($2); return new While(LOCDATA(1)(new Literal('true'))).addBody($2);
}), o('LOOP Expression', function() { }), o('LOOP Expression', function() {
return new While(new Literal('true')).addBody(Block.wrap([$2])); return new While(LOCDATA(1)(new Literal('true'))).addBody(LOCDATA(2)(Block.wrap([$2])));
}) })
], ],
For: [ For: [
@ -401,7 +411,7 @@
ForBody: [ ForBody: [
o('FOR Range', function() { o('FOR Range', function() {
return { return {
source: Value.wrap($2) source: LOCDATA(2)(Value.wrap($2))
}; };
}), o('ForStart ForSource', function() { }), o('ForStart ForSource', function() {
$2.own = $1.own; $2.own = $1.own;
@ -510,12 +520,12 @@
o('IfBlock'), o('IfBlock ELSE Block', function() { o('IfBlock'), o('IfBlock ELSE Block', function() {
return $1.addElse($3); return $1.addElse($3);
}), o('Statement POST_IF Expression', function() { }), o('Statement POST_IF Expression', function() {
return new If($3, Block.wrap([$1]), { return new If($3, LOCDATA(1)(Block.wrap([$1])), {
type: $2, type: $2,
statement: true statement: true
}); });
}), o('Expression POST_IF Expression', function() { }), o('Expression POST_IF Expression', function() {
return new If($3, Block.wrap([$1]), { return new If($3, LOCDATA(1)(Block.wrap([$1])), {
type: $2, type: $2,
statement: true statement: true
}); });

View file

@ -1,6 +1,6 @@
// Generated by CoffeeScript 1.5.0-pre // Generated by CoffeeScript 1.5.0-pre
(function() { (function() {
var extend, flatten, _ref; var buildLocationData, extend, flatten, _ref;
exports.starts = function(string, literal, start) { exports.starts = function(string, literal, start) {
return literal === string.substr(start, literal.length); return literal === string.substr(start, literal.length);
@ -85,4 +85,40 @@
return false; return false;
}; };
buildLocationData = function(first, last) {
if (!last) {
return first;
} else {
return {
first_line: first.first_line,
first_column: first.first_column,
last_line: last.last_line,
last_column: last.last_column
};
}
};
exports.addLocationDataFn = function(first, last) {
return function(obj) {
if (((typeof obj) === 'object') && (!!obj['updateLocationDataIfMissing'])) {
obj.updateLocationDataIfMissing(buildLocationData(first, last));
}
return obj;
};
};
exports.locationDataToString = function(obj) {
var locationData;
if (("2" in obj) && ("first_line" in obj[2])) {
locationData = obj[2];
} else if ("first_line" in obj) {
locationData = obj;
}
if (locationData) {
return ("" + (locationData.first_line + 1) + ":" + (locationData.first_column + 1) + "-") + ("" + (locationData.last_line + 1) + ":" + (locationData.last_column + 1));
} else {
return "No location data";
}
};
}).call(this); }).call(this);

View file

@ -1,33 +1,36 @@
// Generated by CoffeeScript 1.5.0-pre // Generated by CoffeeScript 1.5.0-pre
(function() { (function() {
var BOM, BOOL, CALLABLE, CODE, COFFEE_ALIASES, COFFEE_ALIAS_MAP, COFFEE_KEYWORDS, COMMENT, COMPARE, COMPOUND_ASSIGN, HEREDOC, HEREDOC_ILLEGAL, HEREDOC_INDENT, HEREGEX, HEREGEX_OMIT, IDENTIFIER, INDEXABLE, INVERSES, JSTOKEN, JS_FORBIDDEN, JS_KEYWORDS, LINE_BREAK, LINE_CONTINUER, LITERATE, LOGIC, Lexer, MATH, MULTILINER, MULTI_DENT, NOT_REGEX, NOT_SPACED_REGEX, NUMBER, OPERATOR, REGEX, RELATION, RESERVED, Rewriter, SHIFT, SIMPLESTR, STRICT_PROSCRIBED, TRAILING_SPACES, UNARY, WHITESPACE, compact, count, key, last, starts, _ref, _ref1, var BOM, BOOL, CALLABLE, CODE, COFFEE_ALIASES, COFFEE_ALIAS_MAP, COFFEE_KEYWORDS, COMMENT, COMPARE, COMPOUND_ASSIGN, HEREDOC, HEREDOC_ILLEGAL, HEREDOC_INDENT, HEREGEX, HEREGEX_OMIT, IDENTIFIER, INDEXABLE, INVERSES, JSTOKEN, JS_FORBIDDEN, JS_KEYWORDS, LINE_BREAK, LINE_CONTINUER, LITERATE, LOGIC, Lexer, MATH, MULTILINER, MULTI_DENT, NOT_REGEX, NOT_SPACED_REGEX, NUMBER, OPERATOR, REGEX, RELATION, RESERVED, Rewriter, SHIFT, SIMPLESTR, STRICT_PROSCRIBED, TRAILING_SPACES, UNARY, WHITESPACE, compact, count, key, last, locationDataToString, starts, _ref, _ref1,
__indexOf = [].indexOf || function(item) { for (var i = 0, l = this.length; i < l; i++) { if (i in this && this[i] === item) return i; } return -1; }; __indexOf = [].indexOf || function(item) { for (var i = 0, l = this.length; i < l; i++) { if (i in this && this[i] === item) return i; } return -1; };
_ref = require('./rewriter'), Rewriter = _ref.Rewriter, INVERSES = _ref.INVERSES; _ref = require('./rewriter'), Rewriter = _ref.Rewriter, INVERSES = _ref.INVERSES;
_ref1 = require('./helpers'), count = _ref1.count, starts = _ref1.starts, compact = _ref1.compact, last = _ref1.last; _ref1 = require('./helpers'), count = _ref1.count, starts = _ref1.starts, compact = _ref1.compact, last = _ref1.last, locationDataToString = _ref1.locationDataToString;
exports.Lexer = Lexer = (function() { exports.Lexer = Lexer = (function() {
function Lexer() {} function Lexer() {}
Lexer.prototype.tokenize = function(code, opts) { Lexer.prototype.tokenize = function(code, opts) {
var i, tag; var consumed, i, tag, _ref2;
if (opts == null) { if (opts == null) {
opts = {}; opts = {};
} }
this.literate = opts.literate; this.literate = opts.literate;
code = this.clean(code); code = this.clean(code);
this.line = opts.line || 0;
this.indent = 0; this.indent = 0;
this.indebt = 0; this.indebt = 0;
this.outdebt = 0; this.outdebt = 0;
this.indents = []; this.indents = [];
this.ends = []; this.ends = [];
this.tokens = []; this.tokens = [];
this.chunkLine = opts.line || 0;
this.chunkColumn = opts.column || 0;
i = 0; i = 0;
while (this.chunk = code.slice(i)) { while (this.chunk = code.slice(i)) {
i += this.identifierToken() || this.commentToken() || this.whitespaceToken() || this.lineToken() || this.heredocToken() || this.stringToken() || this.numberToken() || this.regexToken() || this.jsToken() || this.literalToken(); consumed = this.identifierToken() || this.commentToken() || this.whitespaceToken() || this.lineToken() || this.heredocToken() || this.stringToken() || this.numberToken() || this.regexToken() || this.jsToken() || this.literalToken();
_ref2 = this.getLineAndColumnFromChunk(consumed), this.chunkLine = _ref2[0], this.chunkColumn = _ref2[1];
i += consumed;
} }
this.closeIndentation(); this.closeIndentation();
if (tag = this.ends.pop()) { if (tag = this.ends.pop()) {
@ -69,11 +72,13 @@
}; };
Lexer.prototype.identifierToken = function() { Lexer.prototype.identifierToken = function() {
var colon, forcedIdentifier, id, input, match, prev, tag, _ref2, _ref3; var colon, colonOffset, forcedIdentifier, id, idLength, input, match, poppedToken, prev, tag, tagToken, _ref2, _ref3, _ref4;
if (!(match = IDENTIFIER.exec(this.chunk))) { if (!(match = IDENTIFIER.exec(this.chunk))) {
return 0; return 0;
} }
input = match[0], id = match[1], colon = match[2]; input = match[0], id = match[1], colon = match[2];
idLength = id.length;
poppedToken = void 0;
if (id === 'own' && this.tag() === 'FOR') { if (id === 'own' && this.tag() === 'FOR') {
this.token('OWN', id); this.token('OWN', id);
return id.length; return id.length;
@ -97,7 +102,7 @@
} else { } else {
tag = 'RELATION'; tag = 'RELATION';
if (this.value() === '!') { if (this.value() === '!') {
this.tokens.pop(); poppedToken = this.tokens.pop();
id = '!' + id; id = '!' + id;
} }
} }
@ -137,9 +142,13 @@
} }
})(); })();
} }
this.token(tag, id); tagToken = this.token(tag, id, 0, idLength);
if (poppedToken) {
_ref4 = [poppedToken[2].first_line, poppedToken[2].first_column], tagToken[2].first_line = _ref4[0], tagToken[2].first_column = _ref4[1];
}
if (colon) { if (colon) {
this.token(':', ':'); colonOffset = input.lastIndexOf(':');
this.token(':', ':', colonOffset, colon.length);
} }
return input.length; return input.length;
}; };
@ -166,7 +175,7 @@
if (binaryLiteral = /^0b([01]+)/.exec(number)) { if (binaryLiteral = /^0b([01]+)/.exec(number)) {
number = '0x' + (parseInt(binaryLiteral[1], 2)).toString(16); number = '0x' + (parseInt(binaryLiteral[1], 2)).toString(16);
} }
this.token('NUMBER', number); this.token('NUMBER', number, 0, lexedLength);
return lexedLength; return lexedLength;
}; };
@ -177,16 +186,20 @@
if (!(match = SIMPLESTR.exec(this.chunk))) { if (!(match = SIMPLESTR.exec(this.chunk))) {
return 0; return 0;
} }
this.token('STRING', (string = match[0]).replace(MULTILINER, '\\\n')); string = match[0];
this.token('STRING', string.replace(MULTILINER, '\\\n'), 0, string.length);
break; break;
case '"': case '"':
if (!(string = this.balancedString(this.chunk, '"'))) { if (!(string = this.balancedString(this.chunk, '"'))) {
return 0; return 0;
} }
if (0 < string.indexOf('#{', 1)) { if (0 < string.indexOf('#{', 1)) {
this.interpolateString(string.slice(1, -1)); this.interpolateString(string.slice(1, -1), {
strOffset: 1,
lexedLength: string.length
});
} else { } else {
this.token('STRING', this.escapeLines(string)); this.token('STRING', this.escapeLines(string, 0, string.length));
} }
break; break;
default: default:
@ -195,7 +208,6 @@
if (octalEsc = /^(?:\\.|[^\\])*\\(?:0[0-7]|[1-7])/.test(string)) { if (octalEsc = /^(?:\\.|[^\\])*\\(?:0[0-7]|[1-7])/.test(string)) {
this.error("octal escape sequences " + string + " are not allowed"); this.error("octal escape sequences " + string + " are not allowed");
} }
this.line += count(string, '\n');
return string.length; return string.length;
}; };
@ -212,12 +224,13 @@
}); });
if (quote === '"' && 0 <= doc.indexOf('#{')) { if (quote === '"' && 0 <= doc.indexOf('#{')) {
this.interpolateString(doc, { this.interpolateString(doc, {
heredoc: true heredoc: true,
strOffset: 3,
lexedLength: heredoc.length
}); });
} else { } else {
this.token('STRING', this.makeString(doc, quote, true)); this.token('STRING', this.makeString(doc, quote, true), 0, heredoc.length);
} }
this.line += count(heredoc, '\n');
return heredoc.length; return heredoc.length;
}; };
@ -231,9 +244,8 @@
this.token('HERECOMMENT', this.sanitizeHeredoc(here, { this.token('HERECOMMENT', this.sanitizeHeredoc(here, {
herecomment: true, herecomment: true,
indent: Array(this.indent + 1).join(' ') indent: Array(this.indent + 1).join(' ')
})); }), 0, comment.length);
} }
this.line += count(comment, '\n');
return comment.length; return comment.length;
}; };
@ -242,8 +254,7 @@
if (!(this.chunk.charAt(0) === '`' && (match = JSTOKEN.exec(this.chunk)))) { if (!(this.chunk.charAt(0) === '`' && (match = JSTOKEN.exec(this.chunk)))) {
return 0; return 0;
} }
this.token('JS', (script = match[0]).slice(1, -1)); this.token('JS', (script = match[0]).slice(1, -1), 0, script.length);
this.line += count(script, '\n');
return script.length; return script.length;
}; };
@ -254,7 +265,6 @@
} }
if (match = HEREGEX.exec(this.chunk)) { if (match = HEREGEX.exec(this.chunk)) {
length = this.heregexToken(match); length = this.heregexToken(match);
this.line += count(match[0], '\n');
return length; return length;
} }
prev = last(this.tokens); prev = last(this.tokens);
@ -271,49 +281,60 @@
if (regex === '//') { if (regex === '//') {
regex = '/(?:)/'; regex = '/(?:)/';
} }
this.token('REGEX', "" + regex + flags); this.token('REGEX', "" + regex + flags, 0, match.length);
return match.length; return match.length;
}; };
Lexer.prototype.heregexToken = function(match) { Lexer.prototype.heregexToken = function(match) {
var body, flags, heregex, re, tag, tokens, value, _i, _len, _ref2, _ref3, _ref4, _ref5; var body, flags, flagsOffset, heregex, plusToken, prev, re, tag, token, tokens, value, _i, _len, _ref2, _ref3, _ref4;
heregex = match[0], body = match[1], flags = match[2]; heregex = match[0], body = match[1], flags = match[2];
if (0 > body.indexOf('#{')) { if (0 > body.indexOf('#{')) {
re = body.replace(HEREGEX_OMIT, '').replace(/\//g, '\\/'); re = body.replace(HEREGEX_OMIT, '').replace(/\//g, '\\/');
if (re.match(/^\*/)) { if (re.match(/^\*/)) {
this.error('regular expressions cannot begin with `*`'); this.error('regular expressions cannot begin with `*`');
} }
this.token('REGEX', "/" + (re || '(?:)') + "/" + flags); this.token('REGEX', "/" + (re || '(?:)') + "/" + flags, 0, heregex.length);
return heregex.length; return heregex.length;
} }
this.token('IDENTIFIER', 'RegExp'); this.token('IDENTIFIER', 'RegExp', 0, 0);
this.tokens.push(['CALL_START', '(']); this.token('CALL_START', '(', 0, 0);
tokens = []; tokens = [];
_ref2 = this.interpolateString(body, { _ref2 = this.interpolateString(body, {
regex: true regex: true
}); });
for (_i = 0, _len = _ref2.length; _i < _len; _i++) { for (_i = 0, _len = _ref2.length; _i < _len; _i++) {
_ref3 = _ref2[_i], tag = _ref3[0], value = _ref3[1]; token = _ref2[_i];
tag = token[0], value = token[1];
if (tag === 'TOKENS') { if (tag === 'TOKENS') {
tokens.push.apply(tokens, value); tokens.push.apply(tokens, value);
} else { } else if (tag === 'NEOSTRING') {
if (!(value = value.replace(HEREGEX_OMIT, ''))) { if (!(value = value.replace(HEREGEX_OMIT, ''))) {
continue; continue;
} }
value = value.replace(/\\/g, '\\\\'); value = value.replace(/\\/g, '\\\\');
tokens.push(['STRING', this.makeString(value, '"', true)]); token[0] = 'STRING';
token[1] = this.makeString(value, '"', true);
tokens.push(token);
} else {
this.error("Unexpected " + tag);
} }
tokens.push(['+', '+']); prev = last(this.tokens);
plusToken = ['+', '+'];
plusToken[2] = prev[2];
tokens.push(plusToken);
} }
tokens.pop(); tokens.pop();
if (((_ref4 = tokens[0]) != null ? _ref4[0] : void 0) !== 'STRING') { if (((_ref3 = tokens[0]) != null ? _ref3[0] : void 0) !== 'STRING') {
this.tokens.push(['STRING', '""'], ['+', '+']); this.token('STRING', '""', 0, 0);
this.token('+', '+', 0, 0);
} }
(_ref5 = this.tokens).push.apply(_ref5, tokens); (_ref4 = this.tokens).push.apply(_ref4, tokens);
if (flags) { if (flags) {
this.tokens.push([',', ','], ['STRING', '"' + flags + '"']); flagsOffset = heregex.lastIndexOf(flags);
this.token(',', ',', flagsOffset, 0);
this.token('STRING', '"' + flags + '"', flagsOffset, flags.length);
} }
this.token(')', ')'); this.token(')', ')', heregex.length - 1, 0);
return heregex.length; return heregex.length;
}; };
@ -323,7 +344,6 @@
return 0; return 0;
} }
indent = match[0]; indent = match[0];
this.line += count(indent, '\n');
this.seenFor = false; this.seenFor = false;
size = indent.length - 1 - indent.lastIndexOf('\n'); size = indent.length - 1 - indent.lastIndexOf('\n');
noNewlines = this.unfinished(); noNewlines = this.unfinished();
@ -331,7 +351,7 @@
if (noNewlines) { if (noNewlines) {
this.suppressNewlines(); this.suppressNewlines();
} else { } else {
this.newlineToken(); this.newlineToken(0);
} }
return indent.length; return indent.length;
} }
@ -342,19 +362,19 @@
return indent.length; return indent.length;
} }
diff = size - this.indent + this.outdebt; diff = size - this.indent + this.outdebt;
this.token('INDENT', diff); this.token('INDENT', diff, 0, indent.length);
this.indents.push(diff); this.indents.push(diff);
this.ends.push('OUTDENT'); this.ends.push('OUTDENT');
this.outdebt = this.indebt = 0; this.outdebt = this.indebt = 0;
} else { } else {
this.indebt = 0; this.indebt = 0;
this.outdentToken(this.indent - size, noNewlines); this.outdentToken(this.indent - size, noNewlines, indent.length);
} }
this.indent = size; this.indent = size;
return indent.length; return indent.length;
}; };
Lexer.prototype.outdentToken = function(moveOut, noNewlines) { Lexer.prototype.outdentToken = function(moveOut, noNewlines, outdentLength) {
var dent, len; var dent, len;
while (moveOut > 0) { while (moveOut > 0) {
len = this.indents.length - 1; len = this.indents.length - 1;
@ -371,7 +391,7 @@
moveOut -= dent; moveOut -= dent;
this.outdebt = 0; this.outdebt = 0;
this.pair('OUTDENT'); this.pair('OUTDENT');
this.token('OUTDENT', dent); this.token('OUTDENT', dent, 0, outdentLength);
} }
} }
if (dent) { if (dent) {
@ -381,7 +401,7 @@
this.tokens.pop(); this.tokens.pop();
} }
if (!(this.tag() === 'TERMINATOR' || noNewlines)) { if (!(this.tag() === 'TERMINATOR' || noNewlines)) {
this.token('TERMINATOR', '\n'); this.token('TERMINATOR', '\n', outdentLength, 0);
} }
return this; return this;
}; };
@ -402,12 +422,12 @@
} }
}; };
Lexer.prototype.newlineToken = function() { Lexer.prototype.newlineToken = function(offset) {
while (this.value() === ';') { while (this.value() === ';') {
this.tokens.pop(); this.tokens.pop();
} }
if (this.tag() !== 'TERMINATOR') { if (this.tag() !== 'TERMINATOR') {
this.token('TERMINATOR', '\n'); this.token('TERMINATOR', '\n', offset, 0);
} }
return this; return this;
}; };
@ -581,11 +601,18 @@
}; };
Lexer.prototype.interpolateString = function(str, options) { Lexer.prototype.interpolateString = function(str, options) {
var expr, heredoc, i, inner, interpolated, len, letter, nested, pi, regex, tag, tokens, value, _i, _len, _ref2, _ref3, _ref4; var column, expr, heredoc, i, inner, interpolated, len, letter, lexedLength, line, locationToken, nested, offsetInChunk, pi, plusToken, popped, regex, strOffset, tag, token, tokens, value, _i, _len, _ref2, _ref3, _ref4;
if (options == null) { if (options == null) {
options = {}; options = {};
} }
heredoc = options.heredoc, regex = options.regex; heredoc = options.heredoc, regex = options.regex, offsetInChunk = options.offsetInChunk, strOffset = options.strOffset, lexedLength = options.lexedLength;
offsetInChunk = offsetInChunk || 0;
strOffset = strOffset || 0;
lexedLength = lexedLength || str.length;
if (heredoc && str.length > 0 && str[0] === '\n') {
str = str.slice(1);
strOffset++;
}
tokens = []; tokens = [];
pi = 0; pi = 0;
i = -1; i = -1;
@ -598,22 +625,24 @@
continue; continue;
} }
if (pi < i) { if (pi < i) {
tokens.push(['NEOSTRING', str.slice(pi, i)]); tokens.push(this.makeToken('NEOSTRING', str.slice(pi, i), strOffset + pi));
} }
inner = expr.slice(1, -1); inner = expr.slice(1, -1);
if (inner.length) { if (inner.length) {
_ref2 = this.getLineAndColumnFromChunk(strOffset + i + 1), line = _ref2[0], column = _ref2[1];
nested = new Lexer().tokenize(inner, { nested = new Lexer().tokenize(inner, {
line: this.line, line: line,
column: column,
rewrite: false rewrite: false
}); });
nested.pop(); popped = nested.pop();
if (((_ref2 = nested[0]) != null ? _ref2[0] : void 0) === 'TERMINATOR') { if (((_ref3 = nested[0]) != null ? _ref3[0] : void 0) === 'TERMINATOR') {
nested.shift(); popped = nested.shift();
} }
if (len = nested.length) { if (len = nested.length) {
if (len > 1) { if (len > 1) {
nested.unshift(['(', '(', this.line]); nested.unshift(this.makeToken('(', '(', strOffset + i + 1, 0));
nested.push([')', ')', this.line]); nested.push(this.makeToken(')', ')', strOffset + i + 1 + inner.length, 0));
} }
tokens.push(['TOKENS', nested]); tokens.push(['TOKENS', nested]);
} }
@ -622,33 +651,47 @@
pi = i + 1; pi = i + 1;
} }
if ((i > pi && pi < str.length)) { if ((i > pi && pi < str.length)) {
tokens.push(['NEOSTRING', str.slice(pi)]); tokens.push(this.makeToken('NEOSTRING', str.slice(pi), strOffset + pi));
} }
if (regex) { if (regex) {
return tokens; return tokens;
} }
if (!tokens.length) { if (!tokens.length) {
return this.token('STRING', '""'); return this.token('STRING', '""', offsetInChunk, lexedLength);
} }
if (tokens[0][0] !== 'NEOSTRING') { if (tokens[0][0] !== 'NEOSTRING') {
tokens.unshift(['', '']); tokens.unshift(this.makeToken('NEOSTRING', '', offsetInChunk));
} }
if (interpolated = tokens.length > 1) { if (interpolated = tokens.length > 1) {
this.token('(', '('); this.token('(', '(', offsetInChunk, 0);
} }
for (i = _i = 0, _len = tokens.length; _i < _len; i = ++_i) { for (i = _i = 0, _len = tokens.length; _i < _len; i = ++_i) {
_ref3 = tokens[i], tag = _ref3[0], value = _ref3[1]; token = tokens[i];
tag = token[0], value = token[1];
if (i) { if (i) {
this.token('+', '+'); if (i) {
plusToken = this.token('+', '+');
}
locationToken = tag === 'TOKENS' ? value[0] : token;
plusToken[2] = {
first_line: locationToken[2].first_line,
first_column: locationToken[2].first_column,
last_line: locationToken[2].first_line,
last_column: locationToken[2].first_column
};
} }
if (tag === 'TOKENS') { if (tag === 'TOKENS') {
(_ref4 = this.tokens).push.apply(_ref4, value); (_ref4 = this.tokens).push.apply(_ref4, value);
} else if (tag === 'NEOSTRING') {
token[0] = 'STRING';
token[1] = this.makeString(value, '"', heredoc);
this.tokens.push(token);
} else { } else {
this.token('STRING', this.makeString(value, '"', heredoc)); this.error("Unexpected " + tag);
} }
} }
if (interpolated) { if (interpolated) {
this.token(')', ')'); this.token(')', ')', offsetInChunk + lexedLength, 0);
} }
return tokens; return tokens;
}; };
@ -666,8 +709,46 @@
return this.ends.pop(); return this.ends.pop();
}; };
Lexer.prototype.token = function(tag, value) { Lexer.prototype.getLineAndColumnFromChunk = function(offset) {
return this.tokens.push([tag, value, this.line]); var column, lineCount, lines, string;
if (offset === 0) {
return [this.chunkLine, this.chunkColumn];
}
if (offset >= this.chunk.length) {
string = this.chunk;
} else {
string = this.chunk.slice(0, +(offset - 1) + 1 || 9e9);
}
lineCount = count(string, '\n');
column = this.chunkColumn;
if (lineCount > 0) {
lines = string.split('\n');
column = (last(lines)).length;
} else {
column += string.length;
}
return [this.chunkLine + lineCount, column];
};
Lexer.prototype.makeToken = function(tag, value, offsetInChunk, length) {
var lastCharacter, locationData, token, _ref2, _ref3;
offsetInChunk = offsetInChunk || 0;
if (length === void 0) {
length = value.length;
}
locationData = {};
_ref2 = this.getLineAndColumnFromChunk(offsetInChunk), locationData.first_line = _ref2[0], locationData.first_column = _ref2[1];
lastCharacter = length > 0 ? length - 1 : 0;
_ref3 = this.getLineAndColumnFromChunk(offsetInChunk + (length - 1)), locationData.last_line = _ref3[0], locationData.last_column = _ref3[1];
token = [tag, value, locationData];
return token;
};
Lexer.prototype.token = function(tag, value, offsetInChunk, length) {
var token;
token = this.makeToken(tag, value, offsetInChunk, length);
this.tokens.push(token);
return token;
}; };
Lexer.prototype.tag = function(index, tag) { Lexer.prototype.tag = function(index, tag) {
@ -705,7 +786,7 @@
}; };
Lexer.prototype.error = function(message) { Lexer.prototype.error = function(message) {
throw SyntaxError("" + message + " on line " + (this.line + 1)); throw SyntaxError("" + message + " on line " + (this.chunkLine + 1));
}; };
return Lexer; return Lexer;

View file

@ -1,6 +1,6 @@
// Generated by CoffeeScript 1.5.0-pre // Generated by CoffeeScript 1.5.0-pre
(function() { (function() {
var Access, Arr, Assign, Base, Block, Call, Class, Closure, Code, Comment, Existence, Extends, For, IDENTIFIER, IDENTIFIER_STR, IS_STRING, If, In, Index, LEVEL_ACCESS, LEVEL_COND, LEVEL_LIST, LEVEL_OP, LEVEL_PAREN, LEVEL_TOP, Literal, METHOD_DEF, NEGATE, NO, Obj, Op, Param, Parens, RESERVED, Range, Return, SIMPLENUM, STRICT_PROSCRIBED, Scope, Slice, Splat, Switch, TAB, THIS, Throw, Try, UTILITIES, Value, While, YES, compact, del, ends, extend, flatten, last, merge, multident, some, starts, unfoldSoak, utility, _ref, _ref1, var Access, Arr, Assign, Base, Block, Call, Class, Closure, Code, Comment, Existence, Extends, For, IDENTIFIER, IDENTIFIER_STR, IS_STRING, If, In, Index, LEVEL_ACCESS, LEVEL_COND, LEVEL_LIST, LEVEL_OP, LEVEL_PAREN, LEVEL_TOP, Literal, METHOD_DEF, NEGATE, NO, Obj, Op, Param, Parens, RESERVED, Range, Return, SIMPLENUM, STRICT_PROSCRIBED, Scope, Slice, Splat, Switch, TAB, THIS, Throw, Try, UTILITIES, Value, While, YES, addLocationDataFn, compact, del, ends, extend, flatten, last, locationDataToString, merge, multident, some, starts, unfoldSoak, utility, _ref, _ref1,
__hasProp = {}.hasOwnProperty, __hasProp = {}.hasOwnProperty,
__extends = function(child, parent) { for (var key in parent) { if (__hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, __extends = function(child, parent) { for (var key in parent) { if (__hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; },
__indexOf = [].indexOf || function(item) { for (var i = 0, l = this.length; i < l; i++) { if (i in this && this[i] === item) return i; } return -1; }; __indexOf = [].indexOf || function(item) { for (var i = 0, l = this.length; i < l; i++) { if (i in this && this[i] === item) return i; } return -1; };
@ -9,10 +9,12 @@
_ref = require('./lexer'), RESERVED = _ref.RESERVED, STRICT_PROSCRIBED = _ref.STRICT_PROSCRIBED; _ref = require('./lexer'), RESERVED = _ref.RESERVED, STRICT_PROSCRIBED = _ref.STRICT_PROSCRIBED;
_ref1 = require('./helpers'), compact = _ref1.compact, flatten = _ref1.flatten, extend = _ref1.extend, merge = _ref1.merge, del = _ref1.del, starts = _ref1.starts, ends = _ref1.ends, last = _ref1.last, some = _ref1.some; _ref1 = require('./helpers'), compact = _ref1.compact, flatten = _ref1.flatten, extend = _ref1.extend, merge = _ref1.merge, del = _ref1.del, starts = _ref1.starts, ends = _ref1.ends, last = _ref1.last, some = _ref1.some, addLocationDataFn = _ref1.addLocationDataFn, locationDataToString = _ref1.locationDataToString;
exports.extend = extend; exports.extend = extend;
exports.addLocationDataFn = addLocationDataFn;
YES = function() { YES = function() {
return true; return true;
}; };
@ -113,14 +115,15 @@
}; };
Base.prototype.toString = function(idt, name) { Base.prototype.toString = function(idt, name) {
var tree; var location, tree;
if (idt == null) { if (idt == null) {
idt = ''; idt = '';
} }
if (name == null) { if (name == null) {
name = this.constructor.name; name = this.constructor.name;
} }
tree = '\n' + idt + name; location = this.locationData ? locationDataToString(this.locationData) : "??";
tree = '\n' + idt + location + ": " + name;
if (this.soak) { if (this.soak) {
tree += '?'; tree += '?';
} }
@ -191,6 +194,16 @@
Base.prototype.assigns = NO; Base.prototype.assigns = NO;
Base.prototype.updateLocationDataIfMissing = function(locationData) {
if (!this.locationData) {
this.locationData = {};
extend(this.locationData, locationData);
}
return this.eachChild(function(child) {
return child.updateLocationDataIfMissing(locationData);
});
};
return Base; return Base;
})(); })();

View file

@ -1,4 +1,4 @@
/* Jison generated parser */ /* parser generated by jison 0.4.2 */
var parser = (function(){ var parser = (function(){
var parser = {trace: function trace() { }, var parser = {trace: function trace() { },
yy: {}, yy: {},
@ -9,15 +9,15 @@ performAction: function anonymous(yytext,yyleng,yylineno,yy,yystate,$$,_$) {
var $0 = $$.length - 1; var $0 = $$.length - 1;
switch (yystate) { switch (yystate) {
case 1:return this.$ = new yy.Block; case 1:return this.$ = yy.addLocationDataFn(_$[$0], _$[$0])(new yy.Block);
break; break;
case 2:return this.$ = $$[$0]; case 2:return this.$ = $$[$0];
break; break;
case 3:return this.$ = $$[$0-1]; case 3:return this.$ = $$[$0-1];
break; break;
case 4:this.$ = yy.Block.wrap([$$[$0]]); case 4:this.$ = yy.addLocationDataFn(_$[$0], _$[$0])(yy.Block.wrap([$$[$0]]));
break; break;
case 5:this.$ = $$[$0-2].push($$[$0]); case 5:this.$ = yy.addLocationDataFn(_$[$0-2], _$[$0])($$[$0-2].push($$[$0]));
break; break;
case 6:this.$ = $$[$0-1]; case 6:this.$ = $$[$0-1];
break; break;
@ -29,7 +29,7 @@ case 9:this.$ = $$[$0];
break; break;
case 10:this.$ = $$[$0]; case 10:this.$ = $$[$0];
break; break;
case 11:this.$ = new yy.Literal($$[$0]); case 11:this.$ = yy.addLocationDataFn(_$[$0], _$[$0])(new yy.Literal($$[$0]));
break; break;
case 12:this.$ = $$[$0]; case 12:this.$ = $$[$0];
break; break;
@ -55,41 +55,41 @@ case 22:this.$ = $$[$0];
break; break;
case 23:this.$ = $$[$0]; case 23:this.$ = $$[$0];
break; break;
case 24:this.$ = new yy.Block; case 24:this.$ = yy.addLocationDataFn(_$[$0-1], _$[$0])(new yy.Block);
break; break;
case 25:this.$ = $$[$0-1]; case 25:this.$ = yy.addLocationDataFn(_$[$0-2], _$[$0])($$[$0-1]);
break; break;
case 26:this.$ = new yy.Literal($$[$0]); case 26:this.$ = yy.addLocationDataFn(_$[$0], _$[$0])(new yy.Literal($$[$0]));
break; break;
case 27:this.$ = new yy.Literal($$[$0]); case 27:this.$ = yy.addLocationDataFn(_$[$0], _$[$0])(new yy.Literal($$[$0]));
break; break;
case 28:this.$ = new yy.Literal($$[$0]); case 28:this.$ = yy.addLocationDataFn(_$[$0], _$[$0])(new yy.Literal($$[$0]));
break; break;
case 29:this.$ = $$[$0]; case 29:this.$ = $$[$0];
break; break;
case 30:this.$ = new yy.Literal($$[$0]); case 30:this.$ = yy.addLocationDataFn(_$[$0], _$[$0])(new yy.Literal($$[$0]));
break; break;
case 31:this.$ = new yy.Literal($$[$0]); case 31:this.$ = yy.addLocationDataFn(_$[$0], _$[$0])(new yy.Literal($$[$0]));
break; break;
case 32:this.$ = new yy.Literal($$[$0]); case 32:this.$ = yy.addLocationDataFn(_$[$0], _$[$0])(new yy.Literal($$[$0]));
break; break;
case 33:this.$ = new yy.Undefined; case 33:this.$ = yy.addLocationDataFn(_$[$0], _$[$0])(new yy.Undefined);
break; break;
case 34:this.$ = new yy.Null; case 34:this.$ = yy.addLocationDataFn(_$[$0], _$[$0])(new yy.Null);
break; break;
case 35:this.$ = new yy.Bool($$[$0]); case 35:this.$ = yy.addLocationDataFn(_$[$0], _$[$0])(new yy.Bool($$[$0]));
break; break;
case 36:this.$ = new yy.Assign($$[$0-2], $$[$0]); case 36:this.$ = yy.addLocationDataFn(_$[$0-2], _$[$0])(new yy.Assign($$[$0-2], $$[$0]));
break; break;
case 37:this.$ = new yy.Assign($$[$0-3], $$[$0]); case 37:this.$ = yy.addLocationDataFn(_$[$0-3], _$[$0])(new yy.Assign($$[$0-3], $$[$0]));
break; break;
case 38:this.$ = new yy.Assign($$[$0-4], $$[$0-1]); case 38:this.$ = yy.addLocationDataFn(_$[$0-4], _$[$0])(new yy.Assign($$[$0-4], $$[$0-1]));
break; break;
case 39:this.$ = yy.Value.wrap($$[$0]); case 39:this.$ = yy.addLocationDataFn(_$[$0], _$[$0])(yy.Value.wrap($$[$0]));
break; break;
case 40:this.$ = new yy.Assign(yy.Value.wrap($$[$0-2]), $$[$0], 'object'); case 40:this.$ = yy.addLocationDataFn(_$[$0-2], _$[$0])(new yy.Assign(yy.addLocationDataFn(_$[$0-2])(yy.Value.wrap($$[$0-2])), $$[$0], 'object'));
break; break;
case 41:this.$ = new yy.Assign(yy.Value.wrap($$[$0-4]), $$[$0-1], 'object'); case 41:this.$ = yy.addLocationDataFn(_$[$0-4], _$[$0])(new yy.Assign(yy.addLocationDataFn(_$[$0-4])(yy.Value.wrap($$[$0-4])), $$[$0-1], 'object'));
break; break;
case 42:this.$ = $$[$0]; case 42:this.$ = $$[$0];
break; break;
@ -99,39 +99,39 @@ case 44:this.$ = $$[$0];
break; break;
case 45:this.$ = $$[$0]; case 45:this.$ = $$[$0];
break; break;
case 46:this.$ = new yy.Return($$[$0]); case 46:this.$ = yy.addLocationDataFn(_$[$0-1], _$[$0])(new yy.Return($$[$0]));
break; break;
case 47:this.$ = new yy.Return; case 47:this.$ = yy.addLocationDataFn(_$[$0], _$[$0])(new yy.Return);
break; break;
case 48:this.$ = new yy.Comment($$[$0]); case 48:this.$ = yy.addLocationDataFn(_$[$0], _$[$0])(new yy.Comment($$[$0]));
break; break;
case 49:this.$ = new yy.Code($$[$0-3], $$[$0], $$[$0-1]); case 49:this.$ = yy.addLocationDataFn(_$[$0-4], _$[$0])(new yy.Code($$[$0-3], $$[$0], $$[$0-1]));
break; break;
case 50:this.$ = new yy.Code([], $$[$0], $$[$0-1]); case 50:this.$ = yy.addLocationDataFn(_$[$0-1], _$[$0])(new yy.Code([], $$[$0], $$[$0-1]));
break; break;
case 51:this.$ = 'func'; case 51:this.$ = yy.addLocationDataFn(_$[$0], _$[$0])('func');
break; break;
case 52:this.$ = 'boundfunc'; case 52:this.$ = yy.addLocationDataFn(_$[$0], _$[$0])('boundfunc');
break; break;
case 53:this.$ = $$[$0]; case 53:this.$ = $$[$0];
break; break;
case 54:this.$ = $$[$0]; case 54:this.$ = $$[$0];
break; break;
case 55:this.$ = []; case 55:this.$ = yy.addLocationDataFn(_$[$0], _$[$0])([]);
break; break;
case 56:this.$ = [$$[$0]]; case 56:this.$ = yy.addLocationDataFn(_$[$0], _$[$0])([$$[$0]]);
break; break;
case 57:this.$ = $$[$0-2].concat($$[$0]); case 57:this.$ = yy.addLocationDataFn(_$[$0-2], _$[$0])($$[$0-2].concat($$[$0]));
break; break;
case 58:this.$ = $$[$0-3].concat($$[$0]); case 58:this.$ = yy.addLocationDataFn(_$[$0-3], _$[$0])($$[$0-3].concat($$[$0]));
break; break;
case 59:this.$ = $$[$0-5].concat($$[$0-2]); case 59:this.$ = yy.addLocationDataFn(_$[$0-5], _$[$0])($$[$0-5].concat($$[$0-2]));
break; break;
case 60:this.$ = new yy.Param($$[$0]); case 60:this.$ = yy.addLocationDataFn(_$[$0], _$[$0])(new yy.Param($$[$0]));
break; break;
case 61:this.$ = new yy.Param($$[$0-1], null, true); case 61:this.$ = yy.addLocationDataFn(_$[$0-1], _$[$0])(new yy.Param($$[$0-1], null, true));
break; break;
case 62:this.$ = new yy.Param($$[$0-2], $$[$0]); case 62:this.$ = yy.addLocationDataFn(_$[$0-2], _$[$0])(new yy.Param($$[$0-2], $$[$0]));
break; break;
case 63:this.$ = $$[$0]; case 63:this.$ = $$[$0];
break; break;
@ -141,129 +141,129 @@ case 65:this.$ = $$[$0];
break; break;
case 66:this.$ = $$[$0]; case 66:this.$ = $$[$0];
break; break;
case 67:this.$ = new yy.Splat($$[$0-1]); case 67:this.$ = yy.addLocationDataFn(_$[$0-1], _$[$0])(new yy.Splat($$[$0-1]));
break; break;
case 68:this.$ = yy.Value.wrap($$[$0]); case 68:this.$ = yy.addLocationDataFn(_$[$0], _$[$0])(yy.Value.wrap($$[$0]));
break; break;
case 69:this.$ = $$[$0-1].add($$[$0]); case 69:this.$ = yy.addLocationDataFn(_$[$0-1], _$[$0])($$[$0-1].add($$[$0]));
break; break;
case 70:this.$ = yy.Value.wrap($$[$0-1], [].concat($$[$0])); case 70:this.$ = yy.addLocationDataFn(_$[$0-1], _$[$0])(yy.Value.wrap($$[$0-1], [].concat($$[$0])));
break; break;
case 71:this.$ = $$[$0]; case 71:this.$ = $$[$0];
break; break;
case 72:this.$ = $$[$0]; case 72:this.$ = $$[$0];
break; break;
case 73:this.$ = yy.Value.wrap($$[$0]); case 73:this.$ = yy.addLocationDataFn(_$[$0], _$[$0])(yy.Value.wrap($$[$0]));
break; break;
case 74:this.$ = yy.Value.wrap($$[$0]); case 74:this.$ = yy.addLocationDataFn(_$[$0], _$[$0])(yy.Value.wrap($$[$0]));
break; break;
case 75:this.$ = $$[$0]; case 75:this.$ = $$[$0];
break; break;
case 76:this.$ = yy.Value.wrap($$[$0]); case 76:this.$ = yy.addLocationDataFn(_$[$0], _$[$0])(yy.Value.wrap($$[$0]));
break; break;
case 77:this.$ = yy.Value.wrap($$[$0]); case 77:this.$ = yy.addLocationDataFn(_$[$0], _$[$0])(yy.Value.wrap($$[$0]));
break; break;
case 78:this.$ = yy.Value.wrap($$[$0]); case 78:this.$ = yy.addLocationDataFn(_$[$0], _$[$0])(yy.Value.wrap($$[$0]));
break; break;
case 79:this.$ = $$[$0]; case 79:this.$ = $$[$0];
break; break;
case 80:this.$ = new yy.Access($$[$0]); case 80:this.$ = yy.addLocationDataFn(_$[$0-1], _$[$0])(new yy.Access($$[$0]));
break; break;
case 81:this.$ = new yy.Access($$[$0], 'soak'); case 81:this.$ = yy.addLocationDataFn(_$[$0-1], _$[$0])(new yy.Access($$[$0], 'soak'));
break; break;
case 82:this.$ = [new yy.Access(new yy.Literal('prototype')), new yy.Access($$[$0])]; case 82:this.$ = yy.addLocationDataFn(_$[$0-1], _$[$0])([yy.addLocationDataFn(_$[$0-1])(new yy.Access(new yy.Literal('prototype'))), yy.addLocationDataFn(_$[$0])(new yy.Access($$[$0]))]);
break; break;
case 83:this.$ = new yy.Access(new yy.Literal('prototype')); case 83:this.$ = yy.addLocationDataFn(_$[$0], _$[$0])(new yy.Access(new yy.Literal('prototype')));
break; break;
case 84:this.$ = $$[$0]; case 84:this.$ = $$[$0];
break; break;
case 85:this.$ = $$[$0-1]; case 85:this.$ = yy.addLocationDataFn(_$[$0-2], _$[$0])($$[$0-1]);
break; break;
case 86:this.$ = yy.extend($$[$0], { case 86:this.$ = yy.addLocationDataFn(_$[$0-1], _$[$0])(yy.extend($$[$0], {
soak: true soak: true
}); }));
break; break;
case 87:this.$ = new yy.Index($$[$0]); case 87:this.$ = yy.addLocationDataFn(_$[$0], _$[$0])(new yy.Index($$[$0]));
break; break;
case 88:this.$ = new yy.Slice($$[$0]); case 88:this.$ = yy.addLocationDataFn(_$[$0], _$[$0])(new yy.Slice($$[$0]));
break; break;
case 89:this.$ = new yy.Obj($$[$0-2], $$[$0-3].generated); case 89:this.$ = yy.addLocationDataFn(_$[$0-3], _$[$0])(new yy.Obj($$[$0-2], $$[$0-3].generated));
break; break;
case 90:this.$ = []; case 90:this.$ = yy.addLocationDataFn(_$[$0], _$[$0])([]);
break; break;
case 91:this.$ = [$$[$0]]; case 91:this.$ = yy.addLocationDataFn(_$[$0], _$[$0])([$$[$0]]);
break; break;
case 92:this.$ = $$[$0-2].concat($$[$0]); case 92:this.$ = yy.addLocationDataFn(_$[$0-2], _$[$0])($$[$0-2].concat($$[$0]));
break; break;
case 93:this.$ = $$[$0-3].concat($$[$0]); case 93:this.$ = yy.addLocationDataFn(_$[$0-3], _$[$0])($$[$0-3].concat($$[$0]));
break; break;
case 94:this.$ = $$[$0-5].concat($$[$0-2]); case 94:this.$ = yy.addLocationDataFn(_$[$0-5], _$[$0])($$[$0-5].concat($$[$0-2]));
break; break;
case 95:this.$ = new yy.Class; case 95:this.$ = yy.addLocationDataFn(_$[$0], _$[$0])(new yy.Class);
break; break;
case 96:this.$ = new yy.Class(null, null, $$[$0]); case 96:this.$ = yy.addLocationDataFn(_$[$0-1], _$[$0])(new yy.Class(null, null, $$[$0]));
break; break;
case 97:this.$ = new yy.Class(null, $$[$0]); case 97:this.$ = yy.addLocationDataFn(_$[$0-2], _$[$0])(new yy.Class(null, $$[$0]));
break; break;
case 98:this.$ = new yy.Class(null, $$[$0-1], $$[$0]); case 98:this.$ = yy.addLocationDataFn(_$[$0-3], _$[$0])(new yy.Class(null, $$[$0-1], $$[$0]));
break; break;
case 99:this.$ = new yy.Class($$[$0]); case 99:this.$ = yy.addLocationDataFn(_$[$0-1], _$[$0])(new yy.Class($$[$0]));
break; break;
case 100:this.$ = new yy.Class($$[$0-1], null, $$[$0]); case 100:this.$ = yy.addLocationDataFn(_$[$0-2], _$[$0])(new yy.Class($$[$0-1], null, $$[$0]));
break; break;
case 101:this.$ = new yy.Class($$[$0-2], $$[$0]); case 101:this.$ = yy.addLocationDataFn(_$[$0-3], _$[$0])(new yy.Class($$[$0-2], $$[$0]));
break; break;
case 102:this.$ = new yy.Class($$[$0-3], $$[$0-1], $$[$0]); case 102:this.$ = yy.addLocationDataFn(_$[$0-4], _$[$0])(new yy.Class($$[$0-3], $$[$0-1], $$[$0]));
break; break;
case 103:this.$ = new yy.Call($$[$0-2], $$[$0], $$[$0-1]); case 103:this.$ = yy.addLocationDataFn(_$[$0-2], _$[$0])(new yy.Call($$[$0-2], $$[$0], $$[$0-1]));
break; break;
case 104:this.$ = new yy.Call($$[$0-2], $$[$0], $$[$0-1]); case 104:this.$ = yy.addLocationDataFn(_$[$0-2], _$[$0])(new yy.Call($$[$0-2], $$[$0], $$[$0-1]));
break; break;
case 105:this.$ = new yy.Call('super', [new yy.Splat(new yy.Literal('arguments'))]); case 105:this.$ = yy.addLocationDataFn(_$[$0], _$[$0])(new yy.Call('super', [new yy.Splat(new yy.Literal('arguments'))]));
break; break;
case 106:this.$ = new yy.Call('super', $$[$0]); case 106:this.$ = yy.addLocationDataFn(_$[$0-1], _$[$0])(new yy.Call('super', $$[$0]));
break; break;
case 107:this.$ = false; case 107:this.$ = yy.addLocationDataFn(_$[$0], _$[$0])(false);
break; break;
case 108:this.$ = true; case 108:this.$ = yy.addLocationDataFn(_$[$0], _$[$0])(true);
break; break;
case 109:this.$ = []; case 109:this.$ = yy.addLocationDataFn(_$[$0-1], _$[$0])([]);
break; break;
case 110:this.$ = $$[$0-2]; case 110:this.$ = yy.addLocationDataFn(_$[$0-3], _$[$0])($$[$0-2]);
break; break;
case 111:this.$ = yy.Value.wrap(new yy.Literal('this')); case 111:this.$ = yy.addLocationDataFn(_$[$0], _$[$0])(yy.Value.wrap(new yy.Literal('this')));
break; break;
case 112:this.$ = yy.Value.wrap(new yy.Literal('this')); case 112:this.$ = yy.addLocationDataFn(_$[$0], _$[$0])(yy.Value.wrap(new yy.Literal('this')));
break; break;
case 113:this.$ = yy.Value.wrap(new yy.Literal('this'), [new yy.Access($$[$0])], 'this'); case 113:this.$ = yy.addLocationDataFn(_$[$0-1], _$[$0])(yy.Value.wrap(yy.addLocationDataFn(_$[$0-1])(new yy.Literal('this')), [yy.addLocationDataFn(_$[$0])(new yy.Access($$[$0]))], 'this'));
break; break;
case 114:this.$ = new yy.Arr([]); case 114:this.$ = yy.addLocationDataFn(_$[$0-1], _$[$0])(new yy.Arr([]));
break; break;
case 115:this.$ = new yy.Arr($$[$0-2]); case 115:this.$ = yy.addLocationDataFn(_$[$0-3], _$[$0])(new yy.Arr($$[$0-2]));
break; break;
case 116:this.$ = 'inclusive'; case 116:this.$ = yy.addLocationDataFn(_$[$0], _$[$0])('inclusive');
break; break;
case 117:this.$ = 'exclusive'; case 117:this.$ = yy.addLocationDataFn(_$[$0], _$[$0])('exclusive');
break; break;
case 118:this.$ = new yy.Range($$[$0-3], $$[$0-1], $$[$0-2]); case 118:this.$ = yy.addLocationDataFn(_$[$0-4], _$[$0])(new yy.Range($$[$0-3], $$[$0-1], $$[$0-2]));
break; break;
case 119:this.$ = new yy.Range($$[$0-2], $$[$0], $$[$0-1]); case 119:this.$ = yy.addLocationDataFn(_$[$0-2], _$[$0])(new yy.Range($$[$0-2], $$[$0], $$[$0-1]));
break; break;
case 120:this.$ = new yy.Range($$[$0-1], null, $$[$0]); case 120:this.$ = yy.addLocationDataFn(_$[$0-1], _$[$0])(new yy.Range($$[$0-1], null, $$[$0]));
break; break;
case 121:this.$ = new yy.Range(null, $$[$0], $$[$0-1]); case 121:this.$ = yy.addLocationDataFn(_$[$0-1], _$[$0])(new yy.Range(null, $$[$0], $$[$0-1]));
break; break;
case 122:this.$ = new yy.Range(null, null, $$[$0]); case 122:this.$ = yy.addLocationDataFn(_$[$0], _$[$0])(new yy.Range(null, null, $$[$0]));
break; break;
case 123:this.$ = [$$[$0]]; case 123:this.$ = yy.addLocationDataFn(_$[$0], _$[$0])([$$[$0]]);
break; break;
case 124:this.$ = $$[$0-2].concat($$[$0]); case 124:this.$ = yy.addLocationDataFn(_$[$0-2], _$[$0])($$[$0-2].concat($$[$0]));
break; break;
case 125:this.$ = $$[$0-3].concat($$[$0]); case 125:this.$ = yy.addLocationDataFn(_$[$0-3], _$[$0])($$[$0-3].concat($$[$0]));
break; break;
case 126:this.$ = $$[$0-2]; case 126:this.$ = yy.addLocationDataFn(_$[$0-3], _$[$0])($$[$0-2]);
break; break;
case 127:this.$ = $$[$0-5].concat($$[$0-2]); case 127:this.$ = yy.addLocationDataFn(_$[$0-5], _$[$0])($$[$0-5].concat($$[$0-2]));
break; break;
case 128:this.$ = $$[$0]; case 128:this.$ = $$[$0];
break; break;
@ -271,207 +271,207 @@ case 129:this.$ = $$[$0];
break; break;
case 130:this.$ = $$[$0]; case 130:this.$ = $$[$0];
break; break;
case 131:this.$ = [].concat($$[$0-2], $$[$0]); case 131:this.$ = yy.addLocationDataFn(_$[$0-2], _$[$0])([].concat($$[$0-2], $$[$0]));
break; break;
case 132:this.$ = new yy.Try($$[$0]); case 132:this.$ = yy.addLocationDataFn(_$[$0-1], _$[$0])(new yy.Try($$[$0]));
break; break;
case 133:this.$ = new yy.Try($$[$0-1], $$[$0][0], $$[$0][1]); case 133:this.$ = yy.addLocationDataFn(_$[$0-2], _$[$0])(new yy.Try($$[$0-1], $$[$0][0], $$[$0][1]));
break; break;
case 134:this.$ = new yy.Try($$[$0-2], null, null, $$[$0]); case 134:this.$ = yy.addLocationDataFn(_$[$0-3], _$[$0])(new yy.Try($$[$0-2], null, null, $$[$0]));
break; break;
case 135:this.$ = new yy.Try($$[$0-3], $$[$0-2][0], $$[$0-2][1], $$[$0]); case 135:this.$ = yy.addLocationDataFn(_$[$0-4], _$[$0])(new yy.Try($$[$0-3], $$[$0-2][0], $$[$0-2][1], $$[$0]));
break; break;
case 136:this.$ = [$$[$0-1], $$[$0]]; case 136:this.$ = yy.addLocationDataFn(_$[$0-2], _$[$0])([$$[$0-1], $$[$0]]);
break; break;
case 137:this.$ = [yy.Value.wrap($$[$0-1]), $$[$0]]; case 137:this.$ = yy.addLocationDataFn(_$[$0-2], _$[$0])([yy.addLocationDataFn(_$[$0-1])(yy.Value.wrap($$[$0-1])), $$[$0]]);
break; break;
case 138:this.$ = new yy.Throw($$[$0]); case 138:this.$ = yy.addLocationDataFn(_$[$0-1], _$[$0])(new yy.Throw($$[$0]));
break; break;
case 139:this.$ = new yy.Parens($$[$0-1]); case 139:this.$ = yy.addLocationDataFn(_$[$0-2], _$[$0])(new yy.Parens($$[$0-1]));
break; break;
case 140:this.$ = new yy.Parens($$[$0-2]); case 140:this.$ = yy.addLocationDataFn(_$[$0-4], _$[$0])(new yy.Parens($$[$0-2]));
break; break;
case 141:this.$ = new yy.While($$[$0]); case 141:this.$ = yy.addLocationDataFn(_$[$0-1], _$[$0])(new yy.While($$[$0]));
break; break;
case 142:this.$ = new yy.While($$[$0-2], { case 142:this.$ = yy.addLocationDataFn(_$[$0-3], _$[$0])(new yy.While($$[$0-2], {
guard: $$[$0] guard: $$[$0]
}); }));
break; break;
case 143:this.$ = new yy.While($$[$0], { case 143:this.$ = yy.addLocationDataFn(_$[$0-1], _$[$0])(new yy.While($$[$0], {
invert: true invert: true
}); }));
break; break;
case 144:this.$ = new yy.While($$[$0-2], { case 144:this.$ = yy.addLocationDataFn(_$[$0-3], _$[$0])(new yy.While($$[$0-2], {
invert: true, invert: true,
guard: $$[$0] guard: $$[$0]
}));
break;
case 145:this.$ = yy.addLocationDataFn(_$[$0-1], _$[$0])($$[$0-1].addBody($$[$0]));
break;
case 146:this.$ = yy.addLocationDataFn(_$[$0-1], _$[$0])($$[$0].addBody(yy.addLocationDataFn(_$[$0-1])(yy.Block.wrap([$$[$0-1]]))));
break;
case 147:this.$ = yy.addLocationDataFn(_$[$0-1], _$[$0])($$[$0].addBody(yy.addLocationDataFn(_$[$0-1])(yy.Block.wrap([$$[$0-1]]))));
break;
case 148:this.$ = yy.addLocationDataFn(_$[$0], _$[$0])($$[$0]);
break;
case 149:this.$ = yy.addLocationDataFn(_$[$0-1], _$[$0])(new yy.While(yy.addLocationDataFn(_$[$0-1])(new yy.Literal('true'))).addBody($$[$0]));
break;
case 150:this.$ = yy.addLocationDataFn(_$[$0-1], _$[$0])(new yy.While(yy.addLocationDataFn(_$[$0-1])(new yy.Literal('true'))).addBody(yy.addLocationDataFn(_$[$0])(yy.Block.wrap([$$[$0]]))));
break;
case 151:this.$ = yy.addLocationDataFn(_$[$0-1], _$[$0])(new yy.For($$[$0-1], $$[$0]));
break;
case 152:this.$ = yy.addLocationDataFn(_$[$0-1], _$[$0])(new yy.For($$[$0-1], $$[$0]));
break;
case 153:this.$ = yy.addLocationDataFn(_$[$0-1], _$[$0])(new yy.For($$[$0], $$[$0-1]));
break;
case 154:this.$ = yy.addLocationDataFn(_$[$0-1], _$[$0])({
source: yy.addLocationDataFn(_$[$0])(yy.Value.wrap($$[$0]))
}); });
break; break;
case 145:this.$ = $$[$0-1].addBody($$[$0]); case 155:this.$ = yy.addLocationDataFn(_$[$0-1], _$[$0])((function () {
break;
case 146:this.$ = $$[$0].addBody(yy.Block.wrap([$$[$0-1]]));
break;
case 147:this.$ = $$[$0].addBody(yy.Block.wrap([$$[$0-1]]));
break;
case 148:this.$ = $$[$0];
break;
case 149:this.$ = new yy.While(new yy.Literal('true')).addBody($$[$0]);
break;
case 150:this.$ = new yy.While(new yy.Literal('true')).addBody(yy.Block.wrap([$$[$0]]));
break;
case 151:this.$ = new yy.For($$[$0-1], $$[$0]);
break;
case 152:this.$ = new yy.For($$[$0-1], $$[$0]);
break;
case 153:this.$ = new yy.For($$[$0], $$[$0-1]);
break;
case 154:this.$ = {
source: yy.Value.wrap($$[$0])
};
break;
case 155:this.$ = (function () {
$$[$0].own = $$[$0-1].own; $$[$0].own = $$[$0-1].own;
$$[$0].name = $$[$0-1][0]; $$[$0].name = $$[$0-1][0];
$$[$0].index = $$[$0-1][1]; $$[$0].index = $$[$0-1][1];
return $$[$0]; return $$[$0];
}()); }()));
break; break;
case 156:this.$ = $$[$0]; case 156:this.$ = yy.addLocationDataFn(_$[$0-1], _$[$0])($$[$0]);
break; break;
case 157:this.$ = (function () { case 157:this.$ = yy.addLocationDataFn(_$[$0-2], _$[$0])((function () {
$$[$0].own = true; $$[$0].own = true;
return $$[$0]; return $$[$0];
}()); }()));
break; break;
case 158:this.$ = $$[$0]; case 158:this.$ = $$[$0];
break; break;
case 159:this.$ = $$[$0]; case 159:this.$ = $$[$0];
break; break;
case 160:this.$ = yy.Value.wrap($$[$0]); case 160:this.$ = yy.addLocationDataFn(_$[$0], _$[$0])(yy.Value.wrap($$[$0]));
break; break;
case 161:this.$ = yy.Value.wrap($$[$0]); case 161:this.$ = yy.addLocationDataFn(_$[$0], _$[$0])(yy.Value.wrap($$[$0]));
break; break;
case 162:this.$ = [$$[$0]]; case 162:this.$ = yy.addLocationDataFn(_$[$0], _$[$0])([$$[$0]]);
break; break;
case 163:this.$ = [$$[$0-2], $$[$0]]; case 163:this.$ = yy.addLocationDataFn(_$[$0-2], _$[$0])([$$[$0-2], $$[$0]]);
break; break;
case 164:this.$ = { case 164:this.$ = yy.addLocationDataFn(_$[$0-1], _$[$0])({
source: $$[$0] source: $$[$0]
}; });
break; break;
case 165:this.$ = { case 165:this.$ = yy.addLocationDataFn(_$[$0-1], _$[$0])({
source: $$[$0], source: $$[$0],
object: true object: true
}; });
break; break;
case 166:this.$ = { case 166:this.$ = yy.addLocationDataFn(_$[$0-3], _$[$0])({
source: $$[$0-2], source: $$[$0-2],
guard: $$[$0] guard: $$[$0]
}; });
break; break;
case 167:this.$ = { case 167:this.$ = yy.addLocationDataFn(_$[$0-3], _$[$0])({
source: $$[$0-2], source: $$[$0-2],
guard: $$[$0], guard: $$[$0],
object: true object: true
}; });
break; break;
case 168:this.$ = { case 168:this.$ = yy.addLocationDataFn(_$[$0-3], _$[$0])({
source: $$[$0-2], source: $$[$0-2],
step: $$[$0] step: $$[$0]
}; });
break; break;
case 169:this.$ = { case 169:this.$ = yy.addLocationDataFn(_$[$0-5], _$[$0])({
source: $$[$0-4], source: $$[$0-4],
guard: $$[$0-2], guard: $$[$0-2],
step: $$[$0] step: $$[$0]
}; });
break; break;
case 170:this.$ = { case 170:this.$ = yy.addLocationDataFn(_$[$0-5], _$[$0])({
source: $$[$0-4], source: $$[$0-4],
step: $$[$0-2], step: $$[$0-2],
guard: $$[$0] guard: $$[$0]
}; });
break; break;
case 171:this.$ = new yy.Switch($$[$0-3], $$[$0-1]); case 171:this.$ = yy.addLocationDataFn(_$[$0-4], _$[$0])(new yy.Switch($$[$0-3], $$[$0-1]));
break; break;
case 172:this.$ = new yy.Switch($$[$0-5], $$[$0-3], $$[$0-1]); case 172:this.$ = yy.addLocationDataFn(_$[$0-6], _$[$0])(new yy.Switch($$[$0-5], $$[$0-3], $$[$0-1]));
break; break;
case 173:this.$ = new yy.Switch(null, $$[$0-1]); case 173:this.$ = yy.addLocationDataFn(_$[$0-3], _$[$0])(new yy.Switch(null, $$[$0-1]));
break; break;
case 174:this.$ = new yy.Switch(null, $$[$0-3], $$[$0-1]); case 174:this.$ = yy.addLocationDataFn(_$[$0-5], _$[$0])(new yy.Switch(null, $$[$0-3], $$[$0-1]));
break; break;
case 175:this.$ = $$[$0]; case 175:this.$ = $$[$0];
break; break;
case 176:this.$ = $$[$0-1].concat($$[$0]); case 176:this.$ = yy.addLocationDataFn(_$[$0-1], _$[$0])($$[$0-1].concat($$[$0]));
break; break;
case 177:this.$ = [[$$[$0-1], $$[$0]]]; case 177:this.$ = yy.addLocationDataFn(_$[$0-2], _$[$0])([[$$[$0-1], $$[$0]]]);
break; break;
case 178:this.$ = [[$$[$0-2], $$[$0-1]]]; case 178:this.$ = yy.addLocationDataFn(_$[$0-3], _$[$0])([[$$[$0-2], $$[$0-1]]]);
break; break;
case 179:this.$ = new yy.If($$[$0-1], $$[$0], { case 179:this.$ = yy.addLocationDataFn(_$[$0-2], _$[$0])(new yy.If($$[$0-1], $$[$0], {
type: $$[$0-2]
});
break;
case 180:this.$ = $$[$0-4].addElse(new yy.If($$[$0-1], $$[$0], {
type: $$[$0-2] type: $$[$0-2]
})); }));
break; break;
case 180:this.$ = yy.addLocationDataFn(_$[$0-4], _$[$0])($$[$0-4].addElse(new yy.If($$[$0-1], $$[$0], {
type: $$[$0-2]
})));
break;
case 181:this.$ = $$[$0]; case 181:this.$ = $$[$0];
break; break;
case 182:this.$ = $$[$0-2].addElse($$[$0]); case 182:this.$ = yy.addLocationDataFn(_$[$0-2], _$[$0])($$[$0-2].addElse($$[$0]));
break; break;
case 183:this.$ = new yy.If($$[$0], yy.Block.wrap([$$[$0-2]]), { case 183:this.$ = yy.addLocationDataFn(_$[$0-2], _$[$0])(new yy.If($$[$0], yy.addLocationDataFn(_$[$0-2])(yy.Block.wrap([$$[$0-2]])), {
type: $$[$0-1], type: $$[$0-1],
statement: true statement: true
}); }));
break; break;
case 184:this.$ = new yy.If($$[$0], yy.Block.wrap([$$[$0-2]]), { case 184:this.$ = yy.addLocationDataFn(_$[$0-2], _$[$0])(new yy.If($$[$0], yy.addLocationDataFn(_$[$0-2])(yy.Block.wrap([$$[$0-2]])), {
type: $$[$0-1], type: $$[$0-1],
statement: true statement: true
}); }));
break; break;
case 185:this.$ = yy.Op.create($$[$0-1], $$[$0]); case 185:this.$ = yy.addLocationDataFn(_$[$0-1], _$[$0])(yy.Op.create($$[$0-1], $$[$0]));
break; break;
case 186:this.$ = yy.Op.create('-', $$[$0]); case 186:this.$ = yy.addLocationDataFn(_$[$0-1], _$[$0])(yy.Op.create('-', $$[$0]));
break; break;
case 187:this.$ = yy.Op.create('+', $$[$0]); case 187:this.$ = yy.addLocationDataFn(_$[$0-1], _$[$0])(yy.Op.create('+', $$[$0]));
break; break;
case 188:this.$ = yy.Op.create('--', $$[$0]); case 188:this.$ = yy.addLocationDataFn(_$[$0-1], _$[$0])(yy.Op.create('--', $$[$0]));
break; break;
case 189:this.$ = yy.Op.create('++', $$[$0]); case 189:this.$ = yy.addLocationDataFn(_$[$0-1], _$[$0])(yy.Op.create('++', $$[$0]));
break; break;
case 190:this.$ = yy.Op.create('--', $$[$0-1], null, true); case 190:this.$ = yy.addLocationDataFn(_$[$0-1], _$[$0])(yy.Op.create('--', $$[$0-1], null, true));
break; break;
case 191:this.$ = yy.Op.create('++', $$[$0-1], null, true); case 191:this.$ = yy.addLocationDataFn(_$[$0-1], _$[$0])(yy.Op.create('++', $$[$0-1], null, true));
break; break;
case 192:this.$ = new yy.Existence($$[$0-1]); case 192:this.$ = yy.addLocationDataFn(_$[$0-1], _$[$0])(new yy.Existence($$[$0-1]));
break; break;
case 193:this.$ = yy.Op.create('+', $$[$0-2], $$[$0]); case 193:this.$ = yy.addLocationDataFn(_$[$0-2], _$[$0])(yy.Op.create('+', $$[$0-2], $$[$0]));
break; break;
case 194:this.$ = yy.Op.create('-', $$[$0-2], $$[$0]); case 194:this.$ = yy.addLocationDataFn(_$[$0-2], _$[$0])(yy.Op.create('-', $$[$0-2], $$[$0]));
break; break;
case 195:this.$ = yy.Op.create($$[$0-1], $$[$0-2], $$[$0]); case 195:this.$ = yy.addLocationDataFn(_$[$0-2], _$[$0])(yy.Op.create($$[$0-1], $$[$0-2], $$[$0]));
break; break;
case 196:this.$ = yy.Op.create($$[$0-1], $$[$0-2], $$[$0]); case 196:this.$ = yy.addLocationDataFn(_$[$0-2], _$[$0])(yy.Op.create($$[$0-1], $$[$0-2], $$[$0]));
break; break;
case 197:this.$ = yy.Op.create($$[$0-1], $$[$0-2], $$[$0]); case 197:this.$ = yy.addLocationDataFn(_$[$0-2], _$[$0])(yy.Op.create($$[$0-1], $$[$0-2], $$[$0]));
break; break;
case 198:this.$ = yy.Op.create($$[$0-1], $$[$0-2], $$[$0]); case 198:this.$ = yy.addLocationDataFn(_$[$0-2], _$[$0])(yy.Op.create($$[$0-1], $$[$0-2], $$[$0]));
break; break;
case 199:this.$ = (function () { case 199:this.$ = yy.addLocationDataFn(_$[$0-2], _$[$0])((function () {
if ($$[$0-1].charAt(0) === '!') { if ($$[$0-1].charAt(0) === '!') {
return yy.Op.create($$[$0-1].slice(1), $$[$0-2], $$[$0]).invert(); return yy.Op.create($$[$0-1].slice(1), $$[$0-2], $$[$0]).invert();
} else { } else {
return yy.Op.create($$[$0-1], $$[$0-2], $$[$0]); return yy.Op.create($$[$0-1], $$[$0-2], $$[$0]);
} }
}()); }()));
break; break;
case 200:this.$ = new yy.Assign($$[$0-2], $$[$0], $$[$0-1]); case 200:this.$ = yy.addLocationDataFn(_$[$0-2], _$[$0])(new yy.Assign($$[$0-2], $$[$0], $$[$0-1]));
break; break;
case 201:this.$ = new yy.Assign($$[$0-4], $$[$0-1], $$[$0-3]); case 201:this.$ = yy.addLocationDataFn(_$[$0-4], _$[$0])(new yy.Assign($$[$0-4], $$[$0-1], $$[$0-3]));
break; break;
case 202:this.$ = new yy.Assign($$[$0-3], $$[$0], $$[$0-2]); case 202:this.$ = yy.addLocationDataFn(_$[$0-3], _$[$0])(new yy.Assign($$[$0-3], $$[$0], $$[$0-2]));
break; break;
case 203:this.$ = new yy.Extends($$[$0-2], $$[$0]); case 203:this.$ = yy.addLocationDataFn(_$[$0-2], _$[$0])(new yy.Extends($$[$0-2], $$[$0]));
break; break;
} }
}, },
@ -591,19 +591,16 @@ return new Parser;
if (typeof require !== 'undefined' && typeof exports !== 'undefined') { if (typeof require !== 'undefined' && typeof exports !== 'undefined') {
exports.parser = parser; exports.parser = parser;
exports.Parser = parser.Parser; exports.Parser = parser.Parser;
exports.parse = function () { return parser.parse.apply(parser, arguments); } exports.parse = function () { return parser.parse.apply(parser, arguments); };
exports.main = function commonjsMain(args) { exports.main = function commonjsMain(args) {
if (!args[1]) if (!args[1]) {
throw new Error('Usage: '+args[0]+' FILE'); console.log('Usage: '+args[0]+' FILE');
var source, cwd; process.exit(1);
if (typeof process !== 'undefined') {
source = require('fs').readFileSync(require('path').resolve(args[1]), "utf8");
} else {
source = require("file").path(require("file").cwd()).join(args[1]).read({charset: "utf-8"});
} }
var source = require('fs').readFileSync(require('path').normalize(args[1]), "utf8");
return exports.parser.parse(source); return exports.parser.parse(source);
} };
if (typeof module !== 'undefined' && require.main === module) { if (typeof module !== 'undefined' && require.main === module) {
exports.main(typeof process !== 'undefined' ? process.argv.slice(1) : require("system").args); exports.main(process.argv.slice(1));
} }
} }

View file

@ -18,6 +18,7 @@
this.tagPostfixConditionals(); this.tagPostfixConditionals();
this.addImplicitBraces(); this.addImplicitBraces();
this.addImplicitParentheses(); this.addImplicitParentheses();
this.addLocationDataToGeneratedTokens();
return this.tokens; return this.tokens;
}; };
@ -133,7 +134,7 @@
}; };
action = function(token, i) { action = function(token, i) {
var tok; var tok;
tok = this.generate('}', '}', token[2]); tok = this.generate('}', '}');
return this.tokens.splice(i, 0, tok); return this.tokens.splice(i, 0, tok);
}; };
return this.scanTokens(function(token, i, tokens) { return this.scanTokens(function(token, i, tokens) {
@ -160,7 +161,7 @@
startsLine = !prevTag || (__indexOf.call(LINEBREAKS, prevTag) >= 0); startsLine = !prevTag || (__indexOf.call(LINEBREAKS, prevTag) >= 0);
value = new String('{'); value = new String('{');
value.generated = true; value.generated = true;
tok = this.generate('{', value, token[2]); tok = this.generate('{', value);
tokens.splice(idx, 0, tok); tokens.splice(idx, 0, tok);
this.detectEnd(i + 2, condition, action); this.detectEnd(i + 2, condition, action);
return 2; return 2;
@ -189,7 +190,7 @@
return !token.generated && this.tag(i - 1) !== ',' && (__indexOf.call(IMPLICIT_END, tag) >= 0 || (tag === 'INDENT' && !seenControl)) && (tag !== 'INDENT' || (((_ref = this.tag(i - 2)) !== 'CLASS' && _ref !== 'EXTENDS') && (_ref1 = this.tag(i - 1), __indexOf.call(IMPLICIT_BLOCK, _ref1) < 0) && !(callIndex === i - 1 && (post = this.tokens[i + 1]) && post.generated && post[0] === '{'))); return !token.generated && this.tag(i - 1) !== ',' && (__indexOf.call(IMPLICIT_END, tag) >= 0 || (tag === 'INDENT' && !seenControl)) && (tag !== 'INDENT' || (((_ref = this.tag(i - 2)) !== 'CLASS' && _ref !== 'EXTENDS') && (_ref1 = this.tag(i - 1), __indexOf.call(IMPLICIT_BLOCK, _ref1) < 0) && !(callIndex === i - 1 && (post = this.tokens[i + 1]) && post.generated && post[0] === '{')));
}; };
action = function(token, i) { action = function(token, i) {
return this.tokens.splice(i, 0, this.generate('CALL_END', ')', token[2])); return this.tokens.splice(i, 0, this.generate('CALL_END', ')'));
}; };
return this.scanTokens(function(token, i, tokens) { return this.scanTokens(function(token, i, tokens) {
var callObject, current, next, prev, tag, _ref, _ref1, _ref2; var callObject, current, next, prev, tag, _ref, _ref1, _ref2;
@ -223,6 +224,32 @@
}); });
}; };
Rewriter.prototype.addLocationDataToGeneratedTokens = function() {
return this.scanTokens(function(token, i, tokens) {
var prevToken, tag;
tag = token[0];
if ((token.generated || token.explicit) && (!token[2])) {
if (i > 0) {
prevToken = tokens[i - 1];
token[2] = {
first_line: prevToken[2].last_line,
first_column: prevToken[2].last_column,
last_line: prevToken[2].last_line,
last_column: prevToken[2].last_column
};
} else {
token[2] = {
first_line: 0,
first_column: 0,
last_line: 0,
last_column: 0
};
}
}
return 1;
});
};
Rewriter.prototype.addImplicitIndentation = function() { Rewriter.prototype.addImplicitIndentation = function() {
var action, condition, indent, outdent, starter; var action, condition, indent, outdent, starter;
starter = indent = outdent = null; starter = indent = outdent = null;
@ -292,17 +319,20 @@
if (implicit == null) { if (implicit == null) {
implicit = false; implicit = false;
} }
indent = ['INDENT', 2, token[2]]; indent = ['INDENT', 2];
outdent = ['OUTDENT', 2, token[2]]; outdent = ['OUTDENT', 2];
if (implicit) { if (implicit) {
indent.generated = outdent.generated = true; indent.generated = outdent.generated = true;
} }
if (!implicit) {
indent.explicit = outdent.explicit = true;
}
return [indent, outdent]; return [indent, outdent];
}; };
Rewriter.prototype.generate = function(tag, value, line) { Rewriter.prototype.generate = function(tag, value) {
var tok; var tok;
tok = [tag, value, line]; tok = [tag, value];
tok.generated = true; tok.generated = true;
return tok; return tok;
}; };

View file

@ -122,7 +122,13 @@ lexer = new Lexer
# directly as a "Jison lexer". # directly as a "Jison lexer".
parser.lexer = parser.lexer =
lex: -> lex: ->
[tag, @yytext, @yylineno] = @tokens[@pos++] or [''] token = @tokens[@pos++]
if token
[tag, @yytext, @yylloc] = token
@yylineno = @yylloc.first_line
else
tag = ''
tag tag
setInput: (@tokens) -> setInput: (@tokens) ->
@pos = 0 @pos = 0

View file

@ -291,8 +291,10 @@ lint = (file, js) ->
# Pretty-print a stream of tokens. # Pretty-print a stream of tokens.
printTokens = (tokens) -> printTokens = (tokens) ->
strings = for token in tokens strings = for token in tokens
[tag, value] = [token[0], token[1].toString().replace(/\n/, '\\n')] tag = token[0]
"[#{tag} #{value}]" value = token[1].toString().replace(/\n/, '\\n')
locationData = helpers.locationDataToString token[2]
"[#{tag} #{value} #{locationData}]"
printLine strings.join(' ') printLine strings.join(' ')
# Use the [OptionParser module](optparse.html) to extract all options from # Use the [OptionParser module](optparse.html) to extract all options from

View file

@ -32,12 +32,28 @@ unwrap = /^function\s*\(\)\s*\{\s*return\s*([\s\S]*);\s*\}/
# previous nonterminal. # previous nonterminal.
o = (patternString, action, options) -> o = (patternString, action, options) ->
patternString = patternString.replace /\s{2,}/g, ' ' patternString = patternString.replace /\s{2,}/g, ' '
patternCount = patternString.split(' ').length
return [patternString, '$$ = $1;', options] unless action return [patternString, '$$ = $1;', options] unless action
action = if match = unwrap.exec action then match[1] else "(#{action}())" action = if match = unwrap.exec action then match[1] else "(#{action}())"
# All runtime functions we need are defined on "yy"
action = action.replace /\bnew /g, '$&yy.' action = action.replace /\bnew /g, '$&yy.'
action = action.replace /\b(?:Block\.wrap|extend)\b/g, 'yy.$&' action = action.replace /\b(?:Block\.wrap|extend)\b/g, 'yy.$&'
action = action.replace /\b(Op|Value\.(create|wrap))\b/g, 'yy.$&' action = action.replace /\b(Op|Value\.(create|wrap))\b/g, 'yy.$&'
[patternString, "$$ = #{action};", options]
# Returns a function which adds location data to the first parameter passed
# in, and returns the parameter. If the parameter is not a node, it will
# just be passed through unaffected.
addLocationDataFn = (first, last) ->
if not last
"yy.addLocationDataFn(@#{first})"
else
"yy.addLocationDataFn(@#{first}, @#{last})"
action = action.replace /LOCDATA\(([0-9]*)\)/g, addLocationDataFn('$1')
action = action.replace /LOCDATA\(([0-9]*),\s*([0-9]*)\)/g, addLocationDataFn('$1', '$2')
[patternString, "$$ = #{addLocationDataFn(1, patternCount)}(#{action});", options]
# Grammatical Rules # Grammatical Rules
# ----------------- # -----------------
@ -144,9 +160,9 @@ grammar =
# the ordinary **Assign** is that these allow numbers and strings as keys. # the ordinary **Assign** is that these allow numbers and strings as keys.
AssignObj: [ AssignObj: [
o 'ObjAssignable', -> Value.wrap $1 o 'ObjAssignable', -> Value.wrap $1
o 'ObjAssignable : Expression', -> new Assign Value.wrap($1), $3, 'object' o 'ObjAssignable : Expression', -> new Assign LOCDATA(1)(Value.wrap($1)), $3, 'object'
o 'ObjAssignable : o 'ObjAssignable :
INDENT Expression OUTDENT', -> new Assign Value.wrap($1), $4, 'object' INDENT Expression OUTDENT', -> new Assign LOCDATA(1)(Value.wrap($1)), $4, 'object'
o 'Comment' o 'Comment'
] ]
@ -248,7 +264,7 @@ grammar =
Accessor: [ Accessor: [
o '. Identifier', -> new Access $2 o '. Identifier', -> new Access $2
o '?. Identifier', -> new Access $2, 'soak' o '?. Identifier', -> new Access $2, 'soak'
o ':: Identifier', -> [(new Access new Literal 'prototype'), new Access $2] o ':: Identifier', -> [LOCDATA(1)(new Access new Literal 'prototype'), LOCDATA(2)(new Access $2)]
o '::', -> new Access new Literal 'prototype' o '::', -> new Access new Literal 'prototype'
o 'Index' o 'Index'
] ]
@ -320,7 +336,7 @@ grammar =
# A reference to a property on *this*. # A reference to a property on *this*.
ThisProperty: [ ThisProperty: [
o '@ Identifier', -> Value.wrap new Literal('this'), [new Access($2)], 'this' o '@ Identifier', -> Value.wrap LOCDATA(1)(new Literal('this')), [LOCDATA(2)(new Access($2))], 'this'
] ]
# The array literal. # The array literal.
@ -384,7 +400,7 @@ grammar =
# A catch clause names its error and runs a block of code. # A catch clause names its error and runs a block of code.
Catch: [ Catch: [
o 'CATCH Identifier Block', -> [$2, $3] o 'CATCH Identifier Block', -> [$2, $3]
o 'CATCH Object Block', -> [Value.wrap($2), $3] o 'CATCH Object Block', -> [LOCDATA(2)(Value.wrap($2)), $3]
] ]
# Throw an exception object. # Throw an exception object.
@ -413,14 +429,14 @@ grammar =
# or postfix, with a single expression. There is no do..while. # or postfix, with a single expression. There is no do..while.
While: [ While: [
o 'WhileSource Block', -> $1.addBody $2 o 'WhileSource Block', -> $1.addBody $2
o 'Statement WhileSource', -> $2.addBody Block.wrap [$1] o 'Statement WhileSource', -> $2.addBody LOCDATA(1) Block.wrap([$1])
o 'Expression WhileSource', -> $2.addBody Block.wrap [$1] o 'Expression WhileSource', -> $2.addBody LOCDATA(1) Block.wrap([$1])
o 'Loop', -> $1 o 'Loop', -> $1
] ]
Loop: [ Loop: [
o 'LOOP Block', -> new While(new Literal 'true').addBody $2 o 'LOOP Block', -> new While(LOCDATA(1) new Literal 'true').addBody $2
o 'LOOP Expression', -> new While(new Literal 'true').addBody Block.wrap [$2] o 'LOOP Expression', -> new While(LOCDATA(1) new Literal 'true').addBody LOCDATA(2) Block.wrap [$2]
] ]
# Array, object, and range comprehensions, at the most generic level. # Array, object, and range comprehensions, at the most generic level.
@ -433,7 +449,7 @@ grammar =
] ]
ForBody: [ ForBody: [
o 'FOR Range', -> source: Value.wrap($2) o 'FOR Range', -> source: LOCDATA(2) Value.wrap($2)
o 'ForStart ForSource', -> $2.own = $1.own; $2.name = $1[0]; $2.index = $1[1]; $2 o 'ForStart ForSource', -> $2.own = $1.own; $2.name = $1[0]; $2.index = $1[1]; $2
] ]
@ -503,8 +519,8 @@ grammar =
If: [ If: [
o 'IfBlock' o 'IfBlock'
o 'IfBlock ELSE Block', -> $1.addElse $3 o 'IfBlock ELSE Block', -> $1.addElse $3
o 'Statement POST_IF Expression', -> new If $3, Block.wrap([$1]), type: $2, statement: true o 'Statement POST_IF Expression', -> new If $3, LOCDATA(1)(Block.wrap [$1]), type: $2, statement: true
o 'Expression POST_IF Expression', -> new If $3, Block.wrap([$1]), type: $2, statement: true o 'Expression POST_IF Expression', -> new If $3, LOCDATA(1)(Block.wrap [$1]), type: $2, statement: true
] ]
# Arithmetic and logical operators, working on one or more operands. # Arithmetic and logical operators, working on one or more operands.

View file

@ -59,3 +59,37 @@ exports.last = (array, back) -> array[array.length - (back or 0) - 1]
exports.some = Array::some ? (fn) -> exports.some = Array::some ? (fn) ->
return true for e in this when fn e return true for e in this when fn e
false false
# Merge two jison-style location data objects together.
# If `last` is not provided, this will simply return `first`.
buildLocationData = (first, last) ->
if not last
first
else
first_line: first.first_line
first_column: first.first_column
last_line: last.last_line
last_column: last.last_column
# This returns a function which takes an object as a parameter, and if that object is an AST node,
# updates that object's locationData. The object is returned either way.
exports.addLocationDataFn = (first, last) ->
(obj) ->
if ((typeof obj) is 'object') and (!!obj['updateLocationDataIfMissing'])
obj.updateLocationDataIfMissing buildLocationData(first, last)
return obj
# Convert jison location data to a string.
# `obj` can be a token, or a locationData.
exports.locationDataToString = (obj) ->
if ("2" of obj) and ("first_line" of obj[2]) then locationData = obj[2]
else if "first_line" of obj then locationData = obj
if locationData
"#{locationData.first_line + 1}:#{locationData.first_column + 1}-" +
"#{locationData.last_line + 1}:#{locationData.last_column + 1}"
else
"No location data"

View file

@ -3,14 +3,16 @@
# a token is produced, we consume the match, and start again. Tokens are in the # a token is produced, we consume the match, and start again. Tokens are in the
# form: # form:
# #
# [tag, value, lineNumber] # [tag, value, locationData]
# #
# Which is a format that can be fed directly into [Jison](http://github.com/zaach/jison). # where locationData is {first_line, first_column, last_line, last_column}, which is a
# format that can be fed directly into [Jison](http://github.com/zaach/jison). These
# are read by jison in the `parser.lexer` function defined in coffee-script.coffee.
{Rewriter, INVERSES} = require './rewriter' {Rewriter, INVERSES} = require './rewriter'
# Import the helpers we need. # Import the helpers we need.
{count, starts, compact, last} = require './helpers' {count, starts, compact, last, locationDataToString} = require './helpers'
# The Lexer Class # The Lexer Class
# --------------- # ---------------
@ -34,7 +36,6 @@ exports.Lexer = class Lexer
tokenize: (code, opts = {}) -> tokenize: (code, opts = {}) ->
@literate = opts.literate # Are we lexing literate CoffeeScript? @literate = opts.literate # Are we lexing literate CoffeeScript?
code = @clean code # The stripped, cleaned original source code. code = @clean code # The stripped, cleaned original source code.
@line = opts.line or 0 # The current line.
@indent = 0 # The current indentation level. @indent = 0 # The current indentation level.
@indebt = 0 # The over-indentation at the current level. @indebt = 0 # The over-indentation at the current level.
@outdebt = 0 # The under-outdentation at the current level. @outdebt = 0 # The under-outdentation at the current level.
@ -42,12 +43,18 @@ exports.Lexer = class Lexer
@ends = [] # The stack for pairing up tokens. @ends = [] # The stack for pairing up tokens.
@tokens = [] # Stream of parsed tokens in the form `['TYPE', value, line]`. @tokens = [] # Stream of parsed tokens in the form `['TYPE', value, line]`.
@chunkLine =
opts.line or 0 # The start line for the current @chunk.
@chunkColumn =
opts.column or 0 # The start column of the current @chunk.
# At every position, run through this list of attempted matches, # At every position, run through this list of attempted matches,
# short-circuiting if any of them succeed. Their order determines precedence: # short-circuiting if any of them succeed. Their order determines precedence:
# `@literalToken` is the fallback catch-all. # `@literalToken` is the fallback catch-all.
i = 0 i = 0
while @chunk = code[i..] while @chunk = code[i..]
i += @identifierToken() or consumed = \
@identifierToken() or
@commentToken() or @commentToken() or
@whitespaceToken() or @whitespaceToken() or
@lineToken() or @lineToken() or
@ -58,6 +65,11 @@ exports.Lexer = class Lexer
@jsToken() or @jsToken() or
@literalToken() @literalToken()
# Update position
[@chunkLine, @chunkColumn] = @getLineAndColumnFromChunk consumed
i += consumed
@closeIndentation() @closeIndentation()
@error "missing #{tag}" if tag = @ends.pop() @error "missing #{tag}" if tag = @ends.pop()
return @tokens if opts.rewrite is off return @tokens if opts.rewrite is off
@ -92,6 +104,10 @@ exports.Lexer = class Lexer
return 0 unless match = IDENTIFIER.exec @chunk return 0 unless match = IDENTIFIER.exec @chunk
[input, id, colon] = match [input, id, colon] = match
# Preserve lenght of id for location data
idLength = id.length
poppedToken = undefined
if id is 'own' and @tag() is 'FOR' if id is 'own' and @tag() is 'FOR'
@token 'OWN', id @token 'OWN', id
return id.length return id.length
@ -117,7 +133,7 @@ exports.Lexer = class Lexer
else else
tag = 'RELATION' tag = 'RELATION'
if @value() is '!' if @value() is '!'
@tokens.pop() poppedToken = @tokens.pop()
id = '!' + id id = '!' + id
if id in JS_FORBIDDEN if id in JS_FORBIDDEN
@ -138,8 +154,14 @@ exports.Lexer = class Lexer
when 'break', 'continue' then 'STATEMENT' when 'break', 'continue' then 'STATEMENT'
else tag else tag
@token tag, id tagToken = @token tag, id, 0, idLength
@token ':', ':' if colon if poppedToken
[tagToken[2].first_line, tagToken[2].first_column] =
[poppedToken[2].first_line, poppedToken[2].first_column]
if colon
colonOffset = input.lastIndexOf ':'
@token ':', ':', colonOffset, colon.length
input.length input.length
# Matches numbers, including decimals, hex, and exponential notation. # Matches numbers, including decimals, hex, and exponential notation.
@ -160,7 +182,7 @@ exports.Lexer = class Lexer
number = '0x' + (parseInt octalLiteral[1], 8).toString 16 number = '0x' + (parseInt octalLiteral[1], 8).toString 16
if binaryLiteral = /^0b([01]+)/.exec number if binaryLiteral = /^0b([01]+)/.exec number
number = '0x' + (parseInt binaryLiteral[1], 2).toString 16 number = '0x' + (parseInt binaryLiteral[1], 2).toString 16
@token 'NUMBER', number @token 'NUMBER', number, 0, lexedLength
lexedLength lexedLength
# Matches strings, including multi-line strings. Ensures that quotation marks # Matches strings, including multi-line strings. Ensures that quotation marks
@ -169,18 +191,18 @@ exports.Lexer = class Lexer
switch @chunk.charAt 0 switch @chunk.charAt 0
when "'" when "'"
return 0 unless match = SIMPLESTR.exec @chunk return 0 unless match = SIMPLESTR.exec @chunk
@token 'STRING', (string = match[0]).replace MULTILINER, '\\\n' string = match[0]
@token 'STRING', string.replace(MULTILINER, '\\\n'), 0, string.length
when '"' when '"'
return 0 unless string = @balancedString @chunk, '"' return 0 unless string = @balancedString @chunk, '"'
if 0 < string.indexOf '#{', 1 if 0 < string.indexOf '#{', 1
@interpolateString string[1...-1] @interpolateString string[1...-1], strOffset: 1, lexedLength: string.length
else else
@token 'STRING', @escapeLines string @token 'STRING', @escapeLines string, 0, string.length
else else
return 0 return 0
if octalEsc = /^(?:\\.|[^\\])*\\(?:0[0-7]|[1-7])/.test string if octalEsc = /^(?:\\.|[^\\])*\\(?:0[0-7]|[1-7])/.test string
@error "octal escape sequences #{string} are not allowed" @error "octal escape sequences #{string} are not allowed"
@line += count string, '\n'
string.length string.length
# Matches heredocs, adjusting indentation to the correct level, as heredocs # Matches heredocs, adjusting indentation to the correct level, as heredocs
@ -191,10 +213,9 @@ exports.Lexer = class Lexer
quote = heredoc.charAt 0 quote = heredoc.charAt 0
doc = @sanitizeHeredoc match[2], quote: quote, indent: null doc = @sanitizeHeredoc match[2], quote: quote, indent: null
if quote is '"' and 0 <= doc.indexOf '#{' if quote is '"' and 0 <= doc.indexOf '#{'
@interpolateString doc, heredoc: yes @interpolateString doc, heredoc: yes, strOffset: 3, lexedLength: heredoc.length
else else
@token 'STRING', @makeString doc, quote, yes @token 'STRING', @makeString(doc, quote, yes), 0, heredoc.length
@line += count heredoc, '\n'
heredoc.length heredoc.length
# Matches and consumes comments. # Matches and consumes comments.
@ -202,16 +223,16 @@ exports.Lexer = class Lexer
return 0 unless match = @chunk.match COMMENT return 0 unless match = @chunk.match COMMENT
[comment, here] = match [comment, here] = match
if here if here
@token 'HERECOMMENT', @sanitizeHeredoc here, @token 'HERECOMMENT',
herecomment: true, indent: Array(@indent + 1).join(' ') (@sanitizeHeredoc here,
@line += count comment, '\n' herecomment: true, indent: Array(@indent + 1).join(' ')),
0, comment.length
comment.length comment.length
# Matches JavaScript interpolated directly into the source via backticks. # Matches JavaScript interpolated directly into the source via backticks.
jsToken: -> jsToken: ->
return 0 unless @chunk.charAt(0) is '`' and match = JSTOKEN.exec @chunk return 0 unless @chunk.charAt(0) is '`' and match = JSTOKEN.exec @chunk
@token 'JS', (script = match[0])[1...-1] @token 'JS', (script = match[0])[1...-1], 0, script.length
@line += count script, '\n'
script.length script.length
# Matches regular expression literals. Lexing regular expressions is difficult # Matches regular expression literals. Lexing regular expressions is difficult
@ -221,7 +242,6 @@ exports.Lexer = class Lexer
return 0 if @chunk.charAt(0) isnt '/' return 0 if @chunk.charAt(0) isnt '/'
if match = HEREGEX.exec @chunk if match = HEREGEX.exec @chunk
length = @heregexToken match length = @heregexToken match
@line += count match[0], '\n'
return length return length
prev = last @tokens prev = last @tokens
@ -230,7 +250,7 @@ exports.Lexer = class Lexer
[match, regex, flags] = match [match, regex, flags] = match
if regex[..1] is '/*' then @error 'regular expressions cannot begin with `*`' if regex[..1] is '/*' then @error 'regular expressions cannot begin with `*`'
if regex is '//' then regex = '/(?:)/' if regex is '//' then regex = '/(?:)/'
@token 'REGEX', "#{regex}#{flags}" @token 'REGEX', "#{regex}#{flags}", 0, match.length
match.length match.length
# Matches multiline extended regular expressions. # Matches multiline extended regular expressions.
@ -239,24 +259,45 @@ exports.Lexer = class Lexer
if 0 > body.indexOf '#{' if 0 > body.indexOf '#{'
re = body.replace(HEREGEX_OMIT, '').replace(/\//g, '\\/') re = body.replace(HEREGEX_OMIT, '').replace(/\//g, '\\/')
if re.match /^\*/ then @error 'regular expressions cannot begin with `*`' if re.match /^\*/ then @error 'regular expressions cannot begin with `*`'
@token 'REGEX', "/#{ re or '(?:)' }/#{flags}" @token 'REGEX', "/#{ re or '(?:)' }/#{flags}", 0, heregex.length
return heregex.length return heregex.length
@token 'IDENTIFIER', 'RegExp' @token 'IDENTIFIER', 'RegExp', 0, 0
@tokens.push ['CALL_START', '('] @token 'CALL_START', '(', 0, 0
tokens = [] tokens = []
for [tag, value] in @interpolateString(body, regex: yes) for token in @interpolateString(body, regex: yes)
[tag, value] = token
if tag is 'TOKENS' if tag is 'TOKENS'
tokens.push value... tokens.push value...
else else if tag is 'NEOSTRING'
continue unless value = value.replace HEREGEX_OMIT, '' continue unless value = value.replace HEREGEX_OMIT, ''
# Convert NEOSTRING into STRING
value = value.replace /\\/g, '\\\\' value = value.replace /\\/g, '\\\\'
tokens.push ['STRING', @makeString(value, '"', yes)] token[0] = 'STRING'
tokens.push ['+', '+'] token[1] = @makeString(value, '"', yes)
tokens.push token
else
@error "Unexpected #{tag}"
prev = last @tokens
plusToken = ['+', '+']
plusToken[2] = prev[2] # Copy location data
tokens.push plusToken
# Remove the extra "+"
tokens.pop() tokens.pop()
@tokens.push ['STRING', '""'], ['+', '+'] unless tokens[0]?[0] is 'STRING'
unless tokens[0]?[0] is 'STRING'
@token 'STRING', '""', 0, 0
@token '+', '+', 0, 0
@tokens.push tokens... @tokens.push tokens...
@tokens.push [',', ','], ['STRING', '"' + flags + '"'] if flags
@token ')', ')' if flags
# Find the flags in the heregex
flagsOffset = heregex.lastIndexOf flags
@token ',', ',', flagsOffset, 0
@token 'STRING', '"' + flags + '"', flagsOffset, flags.length
@token ')', ')', heregex.length-1, 0
heregex.length heregex.length
# Matches newlines, indents, and outdents, and determines which is which. # Matches newlines, indents, and outdents, and determines which is which.
@ -272,32 +313,32 @@ exports.Lexer = class Lexer
lineToken: -> lineToken: ->
return 0 unless match = MULTI_DENT.exec @chunk return 0 unless match = MULTI_DENT.exec @chunk
indent = match[0] indent = match[0]
@line += count indent, '\n'
@seenFor = no @seenFor = no
size = indent.length - 1 - indent.lastIndexOf '\n' size = indent.length - 1 - indent.lastIndexOf '\n'
noNewlines = @unfinished() noNewlines = @unfinished()
if size - @indebt is @indent if size - @indebt is @indent
if noNewlines then @suppressNewlines() else @newlineToken() if noNewlines then @suppressNewlines() else @newlineToken 0
return indent.length return indent.length
if size > @indent if size > @indent
if noNewlines if noNewlines
@indebt = size - @indent @indebt = size - @indent
@suppressNewlines() @suppressNewlines()
return indent.length return indent.length
diff = size - @indent + @outdebt diff = size - @indent + @outdebt
@token 'INDENT', diff @token 'INDENT', diff, 0, indent.length
@indents.push diff @indents.push diff
@ends.push 'OUTDENT' @ends.push 'OUTDENT'
@outdebt = @indebt = 0 @outdebt = @indebt = 0
else else
@indebt = 0 @indebt = 0
@outdentToken @indent - size, noNewlines @outdentToken @indent - size, noNewlines, indent.length
@indent = size @indent = size
indent.length indent.length
# Record an outdent token or multiple tokens, if we happen to be moving back # Record an outdent token or multiple tokens, if we happen to be moving back
# inwards past several recorded indents. # inwards past several recorded indents.
outdentToken: (moveOut, noNewlines) -> outdentToken: (moveOut, noNewlines, outdentLength) ->
while moveOut > 0 while moveOut > 0
len = @indents.length - 1 len = @indents.length - 1
if @indents[len] is undefined if @indents[len] is undefined
@ -313,10 +354,11 @@ exports.Lexer = class Lexer
moveOut -= dent moveOut -= dent
@outdebt = 0 @outdebt = 0
@pair 'OUTDENT' @pair 'OUTDENT'
@token 'OUTDENT', dent @token 'OUTDENT', dent, 0, outdentLength
@outdebt -= moveOut if dent @outdebt -= moveOut if dent
@tokens.pop() while @value() is ';' @tokens.pop() while @value() is ';'
@token 'TERMINATOR', '\n' unless @tag() is 'TERMINATOR' or noNewlines
@token 'TERMINATOR', '\n', outdentLength, 0 unless @tag() is 'TERMINATOR' or noNewlines
this this
# Matches and consumes non-meaningful whitespace. Tag the previous token # Matches and consumes non-meaningful whitespace. Tag the previous token
@ -329,9 +371,9 @@ exports.Lexer = class Lexer
if match then match[0].length else 0 if match then match[0].length else 0
# Generate a newline token. Consecutive newlines get merged together. # Generate a newline token. Consecutive newlines get merged together.
newlineToken: -> newlineToken: (offset) ->
@tokens.pop() while @value() is ';' @tokens.pop() while @value() is ';'
@token 'TERMINATOR', '\n' unless @tag() is 'TERMINATOR' @token 'TERMINATOR', '\n', offset, 0 unless @tag() is 'TERMINATOR'
this this
# Use a `\` at a line-ending to suppress the newline. # Use a `\` at a line-ending to suppress the newline.
@ -467,8 +509,28 @@ exports.Lexer = class Lexer
# If it encounters an interpolation, this method will recursively create a # If it encounters an interpolation, this method will recursively create a
# new Lexer, tokenize the interpolated contents, and merge them into the # new Lexer, tokenize the interpolated contents, and merge them into the
# token stream. # token stream.
#
# - `str` is the start of the string contents (IE with the " or """ stripped
# off.)
# - `options.offsetInChunk` is the start of the interpolated string in the
# current chunk, including the " or """, etc... If not provided, this is
# assumed to be 0. `options.lexedLength` is the length of the
# interpolated string, including both the start and end quotes. Both of these
# values are ignored if `options.regex` is true.
# - `options.strOffset` is the offset of str, relative to the start of the
# current chunk.
interpolateString: (str, options = {}) -> interpolateString: (str, options = {}) ->
{heredoc, regex} = options {heredoc, regex, offsetInChunk, strOffset, lexedLength} = options
offsetInChunk = offsetInChunk || 0
strOffset = strOffset || 0
lexedLength = lexedLength || str.length
# Clip leading \n from heredoc
if heredoc and str.length > 0 and str[0] == '\n'
str = str[1...]
strOffset++
# Parse the string.
tokens = [] tokens = []
pi = 0 pi = 0
i = -1 i = -1
@ -479,31 +541,58 @@ exports.Lexer = class Lexer
unless letter is '#' and str.charAt(i+1) is '{' and unless letter is '#' and str.charAt(i+1) is '{' and
(expr = @balancedString str[i + 1..], '}') (expr = @balancedString str[i + 1..], '}')
continue continue
tokens.push ['NEOSTRING', str[pi...i]] if pi < i # NEOSTRING is a fake token. This will be converted to a string below.
tokens.push @makeToken('NEOSTRING', str[pi...i], strOffset + pi) if pi < i
inner = expr[1...-1] inner = expr[1...-1]
if inner.length if inner.length
nested = new Lexer().tokenize inner, line: @line, rewrite: off [line, column] = @getLineAndColumnFromChunk(strOffset + i + 1)
nested.pop() nested = new Lexer().tokenize inner, line: line, column: column, rewrite: off
nested.shift() if nested[0]?[0] is 'TERMINATOR' popped = nested.pop()
popped = nested.shift() if nested[0]?[0] is 'TERMINATOR'
if len = nested.length if len = nested.length
if len > 1 if len > 1
nested.unshift ['(', '(', @line] nested.unshift @makeToken '(', '(', strOffset + i + 1, 0
nested.push [')', ')', @line] nested.push @makeToken ')', ')', strOffset + i + 1 + inner.length, 0
# Push a fake 'TOKENS' token, which will get turned into real tokens below.
tokens.push ['TOKENS', nested] tokens.push ['TOKENS', nested]
i += expr.length i += expr.length
pi = i + 1 pi = i + 1
tokens.push ['NEOSTRING', str[pi..]] if i > pi < str.length tokens.push @makeToken('NEOSTRING', str[pi..], strOffset + pi) if i > pi < str.length
# If regex, then return now and let the regex code deal with all these fake tokens
return tokens if regex return tokens if regex
return @token 'STRING', '""' unless tokens.length
tokens.unshift ['', ''] unless tokens[0][0] is 'NEOSTRING' # If we didn't find any tokens, then just return an empty string.
@token '(', '(' if interpolated = tokens.length > 1 return @token 'STRING', '""', offsetInChunk, lexedLength unless tokens.length
for [tag, value], i in tokens
@token '+', '+' if i # If the first token is not a string, add a fake empty string to the beginning.
tokens.unshift @makeToken('NEOSTRING', '', offsetInChunk) unless tokens[0][0] is 'NEOSTRING'
@token '(', '(', offsetInChunk, 0 if interpolated = tokens.length > 1
# Push all the tokens
for token, i in tokens
[tag, value] = token
if i
# Create a 0-length "+" token.
plusToken = @token '+', '+' if i
locationToken = if tag == 'TOKENS' then value[0] else token
plusToken[2] =
first_line: locationToken[2].first_line
first_column: locationToken[2].first_column
last_line: locationToken[2].first_line
last_column: locationToken[2].first_column
if tag is 'TOKENS' if tag is 'TOKENS'
# Push all the tokens in the fake 'TOKENS' token. These already have
# sane location data.
@tokens.push value... @tokens.push value...
else if tag is 'NEOSTRING'
# Convert NEOSTRING into STRING
token[0] = 'STRING'
token[1] = @makeString value, '"', heredoc
@tokens.push token
else else
@token 'STRING', @makeString value, '"', heredoc @error "Unexpected #{tag}"
@token ')', ')' if interpolated @token ')', ')', offsetInChunk + lexedLength, 0 if interpolated
tokens tokens
# Pairs up a closing token, ensuring that all listed pairs of tokens are # Pairs up a closing token, ensuring that all listed pairs of tokens are
@ -524,9 +613,59 @@ exports.Lexer = class Lexer
# Helpers # Helpers
# ------- # -------
# Add a token to the results, taking note of the line number. # Returns the line and column number from an offset into the current chunk.
token: (tag, value) -> #
@tokens.push [tag, value, @line] # `offset` is a number of characters into @chunk.
getLineAndColumnFromChunk: (offset) ->
if offset is 0
return [@chunkLine, @chunkColumn]
if offset >= @chunk.length
string = @chunk
else
string = @chunk[..offset-1]
lineCount = count string, '\n'
column = @chunkColumn
if lineCount > 0
lines = string.split '\n'
column = (last lines).length
else
column += string.length
return [@chunkLine + lineCount, column]
# Same as "token", exception this just returns the token without adding it
# to the results.
makeToken: (tag, value, offsetInChunk, length) ->
offsetInChunk = offsetInChunk || 0
if length is undefined then length = value.length
locationData = {}
[locationData.first_line, locationData.first_column] =
@getLineAndColumnFromChunk offsetInChunk
# Use length - 1 for the final offset - we're supplying the last_line and the last_column,
# so if last_column == first_column, then we're looking at a character of length 1.
lastCharacter = if length > 0 then (length - 1) else 0
[locationData.last_line, locationData.last_column] =
@getLineAndColumnFromChunk offsetInChunk + (length - 1)
token = [tag, value, locationData]
return token
# Add a token to the results.
# `offset` is the offset into the current @chunk where the token starts.
# `length` is the length of the token in the @chunk, after the offset. If
# not specified, the length of `value` will be used.
#
# Returns the new token.
token: (tag, value, offsetInChunk, length) ->
token = @makeToken tag, value, offsetInChunk, length
@tokens.push token
return token
# Peek at a tag in the current token stream. # Peek at a tag in the current token stream.
tag: (index, tag) -> tag: (index, tag) ->
@ -556,7 +695,9 @@ exports.Lexer = class Lexer
# Throws a syntax error on the current `@line`. # Throws a syntax error on the current `@line`.
error: (message) -> error: (message) ->
throw SyntaxError "#{message} on line #{ @line + 1}" # TODO: Are there some cases we could improve the error line number by
# passing the offset in the chunk where the error happened?
throw SyntaxError "#{message} on line #{ @chunkLine + 1 }"
# Constants # Constants
# --------- # ---------

View file

@ -7,9 +7,11 @@
{RESERVED, STRICT_PROSCRIBED} = require './lexer' {RESERVED, STRICT_PROSCRIBED} = require './lexer'
# Import the helpers we plan to use. # Import the helpers we plan to use.
{compact, flatten, extend, merge, del, starts, ends, last, some} = require './helpers' {compact, flatten, extend, merge, del, starts, ends, last, some, addLocationDataFn, locationDataToString} = require './helpers'
exports.extend = extend # for parser # Functions required by parser
exports.extend = extend
exports.addLocationDataFn = addLocationDataFn
# Constant functions for nodes that don't need customization. # Constant functions for nodes that don't need customization.
YES = -> yes YES = -> yes
@ -101,7 +103,8 @@ exports.Base = class Base
# `toString` representation of the node, for inspecting the parse tree. # `toString` representation of the node, for inspecting the parse tree.
# This is what `coffee --nodes` prints out. # This is what `coffee --nodes` prints out.
toString: (idt = '', name = @constructor.name) -> toString: (idt = '', name = @constructor.name) ->
tree = '\n' + idt + name location = if @locationData then locationDataToString @locationData else "??"
tree = '\n' + idt + location + ": " + name
tree += '?' if @soak tree += '?' if @soak
@eachChild (node) -> tree += node.toString idt + TAB @eachChild (node) -> tree += node.toString idt + TAB
tree tree
@ -143,6 +146,16 @@ exports.Base = class Base
# Is this node used to assign a certain variable? # Is this node used to assign a certain variable?
assigns: NO assigns: NO
# For this node and all descendents, set the location data to `locationData` if the location
# data is not already set.
updateLocationDataIfMissing: (locationData) ->
if not @locationData
@locationData = {}
extend @locationData, locationData
@eachChild (child) ->
child.updateLocationDataIfMissing locationData
#### Block #### Block
# The block is the list of expressions that forms the body of an # The block is the list of expressions that forms the body of an

View file

@ -26,6 +26,7 @@ class exports.Rewriter
@tagPostfixConditionals() @tagPostfixConditionals()
@addImplicitBraces() @addImplicitBraces()
@addImplicitParentheses() @addImplicitParentheses()
@addLocationDataToGeneratedTokens()
@tokens @tokens
# Rewrite the token stream, looking one token ahead and behind. # Rewrite the token stream, looking one token ahead and behind.
@ -122,7 +123,7 @@ class exports.Rewriter
) )
action = (token, i) -> action = (token, i) ->
tok = @generate '}', '}', token[2] tok = @generate '}', '}'
@tokens.splice i, 0, tok @tokens.splice i, 0, tok
@scanTokens (token, i, tokens) -> @scanTokens (token, i, tokens) ->
@ -143,7 +144,7 @@ class exports.Rewriter
startsLine = not prevTag or (prevTag in LINEBREAKS) startsLine = not prevTag or (prevTag in LINEBREAKS)
value = new String('{') value = new String('{')
value.generated = yes value.generated = yes
tok = @generate '{', value, token[2] tok = @generate '{', value
tokens.splice idx, 0, tok tokens.splice idx, 0, tok
@detectEnd i + 2, condition, action @detectEnd i + 2, condition, action
2 2
@ -169,7 +170,7 @@ class exports.Rewriter
not (callIndex is i - 1 and (post = @tokens[i + 1]) and post.generated and post[0] is '{'))) not (callIndex is i - 1 and (post = @tokens[i + 1]) and post.generated and post[0] is '{')))
action = (token, i) -> action = (token, i) ->
@tokens.splice i, 0, @generate 'CALL_END', ')', token[2] @tokens.splice i, 0, @generate 'CALL_END', ')'
@scanTokens (token, i, tokens) -> @scanTokens (token, i, tokens) ->
tag = token[0] tag = token[0]
@ -192,12 +193,31 @@ class exports.Rewriter
prev[0] = 'FUNC_EXIST' if prev[0] is '?' prev[0] = 'FUNC_EXIST' if prev[0] is '?'
2 2
# Add location data to all tokens generated by the rewriter.
addLocationDataToGeneratedTokens: ->
@scanTokens (token, i, tokens) ->
tag = token[0]
if (token.generated or token.explicit) and (not token[2])
if i > 0
prevToken = tokens[i-1]
token[2] =
first_line: prevToken[2].last_line
first_column: prevToken[2].last_column
last_line: prevToken[2].last_line
last_column: prevToken[2].last_column
else
token[2] =
first_line: 0
first_column: 0
last_line: 0
last_column: 0
return 1
# Because our grammar is LALR(1), it can't handle some single-line # Because our grammar is LALR(1), it can't handle some single-line
# expressions that lack ending delimiters. The **Rewriter** adds the implicit # expressions that lack ending delimiters. The **Rewriter** adds the implicit
# blocks, so it doesn't need to. ')' can close a single-line block, # blocks, so it doesn't need to. ')' can close a single-line block,
# but we need to make sure it's balanced. # but we need to make sure it's balanced.
addImplicitIndentation: -> addImplicitIndentation: ->
starter = indent = outdent = null starter = indent = outdent = null
condition = (token, i) -> condition = (token, i) ->
@ -250,14 +270,15 @@ class exports.Rewriter
# Generate the indentation tokens, based on another token on the same line. # Generate the indentation tokens, based on another token on the same line.
indentation: (token, implicit = no) -> indentation: (token, implicit = no) ->
indent = ['INDENT', 2, token[2]] indent = ['INDENT', 2]
outdent = ['OUTDENT', 2, token[2]] outdent = ['OUTDENT', 2]
indent.generated = outdent.generated = yes if implicit indent.generated = outdent.generated = yes if implicit
indent.explicit = outdent.explicit = yes if not implicit
[indent, outdent] [indent, outdent]
# Create a generated token: one that exists due to a use of implicit syntax. # Create a generated token: one that exists due to a use of implicit syntax.
generate: (tag, value, line) -> generate: (tag, value) ->
tok = [tag, value, line] tok = [tag, value]
tok.generated = yes tok.generated = yes
tok tok

46
test/location.coffee Normal file
View file

@ -0,0 +1,46 @@
testScript = '''
if true
x = 6
console.log "A console #{x + 7} log"
foo = "bar"
z = /// ^ (a#{foo}) ///
x = () ->
try
console.log "foo"
catch err
# Rewriter will generate explicit indentation here.
return null
'''
test "Verify location of generated tokens", ->
tokens = CoffeeScript.tokens "a = 79"
eq tokens.length, 4
aToken = tokens[0]
eq aToken[2].first_line, 0
eq aToken[2].first_column, 0
eq aToken[2].last_line, 0
eq aToken[2].last_column, 0
equalsToken = tokens[1]
eq equalsToken[2].first_line, 0
eq equalsToken[2].first_column, 2
eq equalsToken[2].last_line, 0
eq equalsToken[2].last_column, 2
numberToken = tokens[2]
eq numberToken[2].first_line, 0
eq numberToken[2].first_column, 4
eq numberToken[2].last_line, 0
eq numberToken[2].last_column, 5
test "Verify all tokens get a location", ->
doesNotThrow ->
tokens = CoffeeScript.tokens testScript
for token in tokens
ok !!token[2]