mirror of
https://github.com/jashkenas/coffeescript.git
synced 2022-11-09 12:23:24 -05:00
[CS2] Destructuring (#4478)
* Output simple array destructuring assignments to ES2015 * Output simple object destructured assignments to ES2015 * Compile shorthand object properties to ES2015 shorthand properties This dramatically improves the appearance of destructured imports. * Compile default values in destructured assignment to ES2015 * Rename `wrapInBraces` to `wrapInParentheses`, and `compilePatternMatch` to `compileDestructuring`, for clarity; style improvements (no `==` or `!=`, etc.) * Don’t confuse the syntax highlighter * Comment Assign::compilePatternMatch a bit * Assignment expressions in conditionals are a bad practice * Optional check for existence that only checks `!== undefined`, not `!= null`, to follow ES convention that default values only apply when a variable is undefined, not falsy * Add comments; remove unnecessary array splats in function tests * The fallback destructuring code should apply default values only if `undefined`, not falsy, to follow ES spec * Support destructuring in function parameters (first pass); catch destructured reserved words * Destructured variables in function parameter lists shouldn’t be added to the function body with `var` declarations; treat splat array function parameters the legacy way to avoid rethinking #4005 * Remove redundancy in undefined-only check for existence; fix passing option to check * Fix undefined redundancy * Simplify getting the variable name * Reimplement “check for existence if not undefined” without creating a new operator * `Obj::isAssignable` should not mutate; pass `lhs` property in from `Assign` or `Code` to child arrays and objects so that those child nodes are set as allowable for destructuring * Revert changes to tests * Restore revised test for empty destructuring assignment
This commit is contained in:
parent
5e1d978946
commit
b192e215a5
17 changed files with 953 additions and 646 deletions
|
@ -1,15 +1,15 @@
|
|||
// Generated by CoffeeScript 2.0.0-alpha1
|
||||
(function() {
|
||||
var BOM, BOOL, CALLABLE, CODE, COFFEE_ALIASES, COFFEE_ALIAS_MAP, COFFEE_KEYWORDS, COMMENT, COMPARE, COMPOUND_ASSIGN, HERECOMMENT_ILLEGAL, HEREDOC_DOUBLE, HEREDOC_INDENT, HEREDOC_SINGLE, HEREGEX, HEREGEX_OMIT, HERE_JSTOKEN, IDENTIFIER, INDENTABLE_CLOSERS, INDEXABLE, INVALID_ESCAPE, INVERSES, JSTOKEN, JS_KEYWORDS, LEADING_BLANK_LINE, LINE_BREAK, LINE_CONTINUER, Lexer, MATH, MULTI_DENT, NOT_REGEX, NUMBER, OPERATOR, POSSIBLY_DIVISION, REGEX, REGEX_FLAGS, REGEX_ILLEGAL, RELATION, RESERVED, Rewriter, SHIFT, SIMPLE_STRING_OMIT, STRICT_PROSCRIBED, STRING_DOUBLE, STRING_OMIT, STRING_SINGLE, STRING_START, TRAILING_BLANK_LINE, TRAILING_SPACES, UNARY, UNARY_MATH, VALID_FLAGS, WHITESPACE, compact, count, invertLiterate, isForFrom, isUnassignable, key, locationDataToString, ref, ref1, repeat, starts, throwSyntaxError,
|
||||
var BOM, BOOL, CALLABLE, CODE, COFFEE_ALIASES, COFFEE_ALIAS_MAP, COFFEE_KEYWORDS, COMMENT, COMPARE, COMPOUND_ASSIGN, HERECOMMENT_ILLEGAL, HEREDOC_DOUBLE, HEREDOC_INDENT, HEREDOC_SINGLE, HEREGEX, HEREGEX_OMIT, HERE_JSTOKEN, IDENTIFIER, INDENTABLE_CLOSERS, INDEXABLE, INVALID_ESCAPE, INVERSES, JSTOKEN, JS_KEYWORDS, LEADING_BLANK_LINE, LINE_BREAK, LINE_CONTINUER, Lexer, MATH, MULTI_DENT, NOT_REGEX, NUMBER, OPERATOR, POSSIBLY_DIVISION, REGEX, REGEX_FLAGS, REGEX_ILLEGAL, RELATION, RESERVED, Rewriter, SHIFT, SIMPLE_STRING_OMIT, STRICT_PROSCRIBED, STRING_DOUBLE, STRING_OMIT, STRING_SINGLE, STRING_START, TRAILING_BLANK_LINE, TRAILING_SPACES, UNARY, UNARY_MATH, VALID_FLAGS, WHITESPACE, compact, count, invertLiterate, isForFrom, isUnassignable, key, locationDataToString, repeat, starts, throwSyntaxError,
|
||||
indexOf = [].indexOf || function(item) { for (var i = 0, l = this.length; i < l; i++) { if (i in this && this[i] === item) return i; } return -1; };
|
||||
|
||||
ref = require('./rewriter'), Rewriter = ref.Rewriter, INVERSES = ref.INVERSES;
|
||||
({Rewriter, INVERSES} = require('./rewriter'));
|
||||
|
||||
ref1 = require('./helpers'), count = ref1.count, starts = ref1.starts, compact = ref1.compact, repeat = ref1.repeat, invertLiterate = ref1.invertLiterate, locationDataToString = ref1.locationDataToString, throwSyntaxError = ref1.throwSyntaxError;
|
||||
({count, starts, compact, repeat, invertLiterate, locationDataToString, throwSyntaxError} = require('./helpers'));
|
||||
|
||||
exports.Lexer = Lexer = class Lexer {
|
||||
tokenize(code, opts = {}) {
|
||||
var consumed, end, i, ref2;
|
||||
var consumed, end, i;
|
||||
this.literate = opts.literate;
|
||||
this.indent = 0;
|
||||
this.baseIndent = 0;
|
||||
|
@ -29,7 +29,7 @@
|
|||
i = 0;
|
||||
while (this.chunk = code.slice(i)) {
|
||||
consumed = this.identifierToken() || this.commentToken() || this.whitespaceToken() || this.lineToken() || this.stringToken() || this.numberToken() || this.regexToken() || this.jsToken() || this.literalToken();
|
||||
ref2 = this.getLineAndColumnFromChunk(consumed), this.chunkLine = ref2[0], this.chunkColumn = ref2[1];
|
||||
[this.chunkLine, this.chunkColumn] = this.getLineAndColumnFromChunk(consumed);
|
||||
i += consumed;
|
||||
if (opts.untilBalanced && this.ends.length === 0) {
|
||||
return {
|
||||
|
@ -64,11 +64,11 @@
|
|||
}
|
||||
|
||||
identifierToken() {
|
||||
var alias, colon, colonOffset, id, idLength, input, match, poppedToken, prev, ref2, ref3, ref4, ref5, ref6, ref7, ref8, ref9, tag, tagToken;
|
||||
var alias, colon, colonOffset, id, idLength, input, match, poppedToken, prev, ref, ref1, ref2, ref3, ref4, ref5, ref6, tag, tagToken;
|
||||
if (!(match = IDENTIFIER.exec(this.chunk))) {
|
||||
return 0;
|
||||
}
|
||||
input = match[0], id = match[1], colon = match[2];
|
||||
[input, id, colon] = match;
|
||||
idLength = id.length;
|
||||
poppedToken = void 0;
|
||||
if (id === 'own' && this.tag() === 'FOR') {
|
||||
|
@ -82,27 +82,27 @@
|
|||
if (id === 'as' && this.seenImport) {
|
||||
if (this.value() === '*') {
|
||||
this.tokens[this.tokens.length - 1][0] = 'IMPORT_ALL';
|
||||
} else if (ref2 = this.value(), indexOf.call(COFFEE_KEYWORDS, ref2) >= 0) {
|
||||
} else if (ref = this.value(), indexOf.call(COFFEE_KEYWORDS, ref) >= 0) {
|
||||
this.tokens[this.tokens.length - 1][0] = 'IDENTIFIER';
|
||||
}
|
||||
if ((ref3 = this.tag()) === 'DEFAULT' || ref3 === 'IMPORT_ALL' || ref3 === 'IDENTIFIER') {
|
||||
if ((ref1 = this.tag()) === 'DEFAULT' || ref1 === 'IMPORT_ALL' || ref1 === 'IDENTIFIER') {
|
||||
this.token('AS', id);
|
||||
return id.length;
|
||||
}
|
||||
}
|
||||
if (id === 'as' && this.seenExport && ((ref4 = this.tag()) === 'IDENTIFIER' || ref4 === 'DEFAULT')) {
|
||||
if (id === 'as' && this.seenExport && ((ref2 = this.tag()) === 'IDENTIFIER' || ref2 === 'DEFAULT')) {
|
||||
this.token('AS', id);
|
||||
return id.length;
|
||||
}
|
||||
if (id === 'default' && this.seenExport && ((ref5 = this.tag()) === 'EXPORT' || ref5 === 'AS')) {
|
||||
if (id === 'default' && this.seenExport && ((ref3 = this.tag()) === 'EXPORT' || ref3 === 'AS')) {
|
||||
this.token('DEFAULT', id);
|
||||
return id.length;
|
||||
}
|
||||
ref6 = this.tokens, prev = ref6[ref6.length - 1];
|
||||
tag = colon || (prev != null) && (((ref7 = prev[0]) === '.' || ref7 === '?.' || ref7 === '::' || ref7 === '?::') || !prev.spaced && prev[0] === '@') ? 'PROPERTY' : 'IDENTIFIER';
|
||||
ref4 = this.tokens, prev = ref4[ref4.length - 1];
|
||||
tag = colon || (prev != null) && (((ref5 = prev[0]) === '.' || ref5 === '?.' || ref5 === '::' || ref5 === '?::') || !prev.spaced && prev[0] === '@') ? 'PROPERTY' : 'IDENTIFIER';
|
||||
if (tag === 'IDENTIFIER' && (indexOf.call(JS_KEYWORDS, id) >= 0 || indexOf.call(COFFEE_KEYWORDS, id) >= 0) && !(this.exportSpecifierList && indexOf.call(COFFEE_KEYWORDS, id) >= 0)) {
|
||||
tag = id.toUpperCase();
|
||||
if (tag === 'WHEN' && (ref8 = this.tag(), indexOf.call(LINE_BREAK, ref8) >= 0)) {
|
||||
if (tag === 'WHEN' && (ref6 = this.tag(), indexOf.call(LINE_BREAK, ref6) >= 0)) {
|
||||
tag = 'LEADING_WHEN';
|
||||
} else if (tag === 'FOR') {
|
||||
this.seenFor = true;
|
||||
|
@ -167,7 +167,7 @@
|
|||
tagToken.origin = [tag, alias, tagToken[2]];
|
||||
}
|
||||
if (poppedToken) {
|
||||
ref9 = [poppedToken[2].first_line, poppedToken[2].first_column], tagToken[2].first_line = ref9[0], tagToken[2].first_column = ref9[1];
|
||||
[tagToken[2].first_line, tagToken[2].first_column] = [poppedToken[2].first_line, poppedToken[2].first_column];
|
||||
}
|
||||
if (colon) {
|
||||
colonOffset = input.lastIndexOf(':');
|
||||
|
@ -223,8 +223,8 @@
|
|||
}
|
||||
|
||||
stringToken() {
|
||||
var $, attempt, delimiter, doc, end, heredoc, i, indent, indentRegex, match, quote, ref2, ref3, regex, token, tokens;
|
||||
quote = (STRING_START.exec(this.chunk) || [])[0];
|
||||
var $, attempt, delimiter, doc, end, heredoc, i, indent, indentRegex, match, quote, ref, regex, token, tokens;
|
||||
[quote] = STRING_START.exec(this.chunk) || [];
|
||||
if (!quote) {
|
||||
return 0;
|
||||
}
|
||||
|
@ -244,7 +244,10 @@
|
|||
}
|
||||
})();
|
||||
heredoc = quote.length === 3;
|
||||
ref2 = this.matchWithInterpolations(regex, quote), tokens = ref2.tokens, end = ref2.index;
|
||||
({
|
||||
tokens,
|
||||
index: end
|
||||
} = this.matchWithInterpolations(regex, quote));
|
||||
$ = tokens.length - 1;
|
||||
delimiter = quote.charAt(0);
|
||||
if (heredoc) {
|
||||
|
@ -262,16 +265,14 @@
|
|||
})()).join('#{}');
|
||||
while (match = HEREDOC_INDENT.exec(doc)) {
|
||||
attempt = match[1];
|
||||
if (indent === null || (0 < (ref3 = attempt.length) && ref3 < indent.length)) {
|
||||
if (indent === null || (0 < (ref = attempt.length) && ref < indent.length)) {
|
||||
indent = attempt;
|
||||
}
|
||||
}
|
||||
if (indent) {
|
||||
indentRegex = RegExp(`\\n${indent}`, "g");
|
||||
}
|
||||
this.mergeInterpolationTokens(tokens, {
|
||||
delimiter: delimiter
|
||||
}, (value, i) => {
|
||||
this.mergeInterpolationTokens(tokens, {delimiter}, (value, i) => {
|
||||
value = this.formatString(value);
|
||||
if (indentRegex) {
|
||||
value = value.replace(indentRegex, '\n');
|
||||
|
@ -285,9 +286,7 @@
|
|||
return value;
|
||||
});
|
||||
} else {
|
||||
this.mergeInterpolationTokens(tokens, {
|
||||
delimiter: delimiter
|
||||
}, (value, i) => {
|
||||
this.mergeInterpolationTokens(tokens, {delimiter}, (value, i) => {
|
||||
value = this.formatString(value);
|
||||
value = value.replace(SIMPLE_STRING_OMIT, function(match, offset) {
|
||||
if ((i === 0 && offset === 0) || (i === $ && offset + match.length === value.length)) {
|
||||
|
@ -307,7 +306,7 @@
|
|||
if (!(match = this.chunk.match(COMMENT))) {
|
||||
return 0;
|
||||
}
|
||||
comment = match[0], here = match[1];
|
||||
[comment, here] = match;
|
||||
if (here) {
|
||||
if (match = HERECOMMENT_ILLEGAL.exec(comment)) {
|
||||
this.error(`block comments cannot contain ${match[0]}`, {
|
||||
|
@ -336,7 +335,7 @@
|
|||
}
|
||||
|
||||
regexToken() {
|
||||
var body, closed, end, flags, index, match, origin, prev, ref2, ref3, ref4, regex, tokens;
|
||||
var body, closed, end, flags, index, match, origin, prev, ref, ref1, ref2, regex, tokens;
|
||||
switch (false) {
|
||||
case !(match = REGEX_ILLEGAL.exec(this.chunk)):
|
||||
this.error(`regular expressions cannot begin with ${match[2]}`, {
|
||||
|
@ -344,22 +343,22 @@
|
|||
});
|
||||
break;
|
||||
case !(match = this.matchWithInterpolations(HEREGEX, '///')):
|
||||
tokens = match.tokens, index = match.index;
|
||||
({tokens, index} = match);
|
||||
break;
|
||||
case !(match = REGEX.exec(this.chunk)):
|
||||
regex = match[0], body = match[1], closed = match[2];
|
||||
[regex, body, closed] = match;
|
||||
this.validateEscapes(body, {
|
||||
isRegex: true,
|
||||
offsetInChunk: 1
|
||||
});
|
||||
index = regex.length;
|
||||
ref2 = this.tokens, prev = ref2[ref2.length - 1];
|
||||
ref = this.tokens, prev = ref[ref.length - 1];
|
||||
if (prev) {
|
||||
if (prev.spaced && (ref3 = prev[0], indexOf.call(CALLABLE, ref3) >= 0)) {
|
||||
if (prev.spaced && (ref1 = prev[0], indexOf.call(CALLABLE, ref1) >= 0)) {
|
||||
if (!closed || POSSIBLY_DIVISION.test(regex)) {
|
||||
return 0;
|
||||
}
|
||||
} else if (ref4 = prev[0], indexOf.call(NOT_REGEX, ref4) >= 0) {
|
||||
} else if (ref2 = prev[0], indexOf.call(NOT_REGEX, ref2) >= 0) {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
@ -370,7 +369,7 @@
|
|||
default:
|
||||
return 0;
|
||||
}
|
||||
flags = REGEX_FLAGS.exec(this.chunk.slice(index))[0];
|
||||
[flags] = REGEX_FLAGS.exec(this.chunk.slice(index));
|
||||
end = index + flags.length;
|
||||
origin = this.makeToken('REGEX', null, 0, end);
|
||||
switch (false) {
|
||||
|
@ -469,7 +468,7 @@
|
|||
}
|
||||
|
||||
outdentToken(moveOut, noNewlines, outdentLength) {
|
||||
var decreasedIndent, dent, lastIndent, ref2;
|
||||
var decreasedIndent, dent, lastIndent, ref;
|
||||
decreasedIndent = this.indent - moveOut;
|
||||
while (moveOut > 0) {
|
||||
lastIndent = this.indents[this.indents.length - 1];
|
||||
|
@ -483,7 +482,7 @@
|
|||
moveOut -= lastIndent;
|
||||
} else {
|
||||
dent = this.indents.pop() + this.outdebt;
|
||||
if (outdentLength && (ref2 = this.chunk[outdentLength], indexOf.call(INDENTABLE_CLOSERS, ref2) >= 0)) {
|
||||
if (outdentLength && (ref = this.chunk[outdentLength], indexOf.call(INDENTABLE_CLOSERS, ref) >= 0)) {
|
||||
decreasedIndent -= dent - moveOut;
|
||||
moveOut = dent;
|
||||
}
|
||||
|
@ -508,11 +507,11 @@
|
|||
}
|
||||
|
||||
whitespaceToken() {
|
||||
var match, nline, prev, ref2;
|
||||
var match, nline, prev, ref;
|
||||
if (!((match = WHITESPACE.exec(this.chunk)) || (nline = this.chunk.charAt(0) === '\n'))) {
|
||||
return 0;
|
||||
}
|
||||
ref2 = this.tokens, prev = ref2[ref2.length - 1];
|
||||
ref = this.tokens, prev = ref[ref.length - 1];
|
||||
if (prev) {
|
||||
prev[match ? 'spaced' : 'newLine'] = true;
|
||||
}
|
||||
|
@ -541,9 +540,9 @@
|
|||
}
|
||||
|
||||
literalToken() {
|
||||
var match, message, origin, prev, ref2, ref3, ref4, ref5, ref6, skipToken, tag, token, value;
|
||||
var match, message, origin, prev, ref, ref1, ref2, ref3, ref4, skipToken, tag, token, value;
|
||||
if (match = OPERATOR.exec(this.chunk)) {
|
||||
value = match[0];
|
||||
[value] = match;
|
||||
if (CODE.test(value)) {
|
||||
this.tagParameters();
|
||||
}
|
||||
|
@ -551,17 +550,17 @@
|
|||
value = this.chunk.charAt(0);
|
||||
}
|
||||
tag = value;
|
||||
ref2 = this.tokens, prev = ref2[ref2.length - 1];
|
||||
ref = this.tokens, prev = ref[ref.length - 1];
|
||||
if (prev && indexOf.call(['=', ...COMPOUND_ASSIGN], value) >= 0) {
|
||||
skipToken = false;
|
||||
if (value === '=' && ((ref3 = prev[1]) === '||' || ref3 === '&&') && !prev.spaced) {
|
||||
if (value === '=' && ((ref1 = prev[1]) === '||' || ref1 === '&&') && !prev.spaced) {
|
||||
prev[0] = 'COMPOUND_ASSIGN';
|
||||
prev[1] += '=';
|
||||
prev = this.tokens[this.tokens.length - 2];
|
||||
skipToken = true;
|
||||
}
|
||||
if (prev && prev[0] !== 'PROPERTY') {
|
||||
origin = (ref4 = prev.origin) != null ? ref4 : prev;
|
||||
origin = (ref2 = prev.origin) != null ? ref2 : prev;
|
||||
message = isUnassignable(prev[1], origin[1]);
|
||||
if (message) {
|
||||
this.error(message, origin[2]);
|
||||
|
@ -596,12 +595,12 @@
|
|||
} else if (value === '?' && (prev != null ? prev.spaced : void 0)) {
|
||||
tag = 'BIN?';
|
||||
} else if (prev && !prev.spaced) {
|
||||
if (value === '(' && (ref5 = prev[0], indexOf.call(CALLABLE, ref5) >= 0)) {
|
||||
if (value === '(' && (ref3 = prev[0], indexOf.call(CALLABLE, ref3) >= 0)) {
|
||||
if (prev[0] === '?') {
|
||||
prev[0] = 'FUNC_EXIST';
|
||||
}
|
||||
tag = 'CALL_START';
|
||||
} else if (value === '[' && (ref6 = prev[0], indexOf.call(INDEXABLE, ref6) >= 0)) {
|
||||
} else if (value === '[' && (ref4 = prev[0], indexOf.call(INDEXABLE, ref4) >= 0)) {
|
||||
tag = 'INDEX_START';
|
||||
switch (prev[0]) {
|
||||
case '?':
|
||||
|
@ -634,7 +633,7 @@
|
|||
return this;
|
||||
}
|
||||
stack = [];
|
||||
tokens = this.tokens;
|
||||
({tokens} = this);
|
||||
i = tokens.length;
|
||||
tokens[--i][0] = 'PARAM_END';
|
||||
while (tok = tokens[--i]) {
|
||||
|
@ -662,7 +661,7 @@
|
|||
}
|
||||
|
||||
matchWithInterpolations(regex, delimiter) {
|
||||
var close, column, firstToken, index, lastToken, line, nested, offsetInChunk, open, ref2, ref3, ref4, str, strPart, tokens;
|
||||
var close, column, firstToken, index, lastToken, line, nested, offsetInChunk, open, ref, str, strPart, tokens;
|
||||
tokens = [];
|
||||
offsetInChunk = delimiter.length;
|
||||
if (this.chunk.slice(0, offsetInChunk) !== delimiter) {
|
||||
|
@ -670,10 +669,10 @@
|
|||
}
|
||||
str = this.chunk.slice(offsetInChunk);
|
||||
while (true) {
|
||||
strPart = regex.exec(str)[0];
|
||||
[strPart] = regex.exec(str);
|
||||
this.validateEscapes(strPart, {
|
||||
isRegex: delimiter.charAt(0) === '/',
|
||||
offsetInChunk: offsetInChunk
|
||||
offsetInChunk
|
||||
});
|
||||
tokens.push(this.makeToken('NEOSTRING', strPart, offsetInChunk));
|
||||
str = str.slice(strPart.length);
|
||||
|
@ -681,18 +680,21 @@
|
|||
if (str.slice(0, 2) !== '#{') {
|
||||
break;
|
||||
}
|
||||
ref2 = this.getLineAndColumnFromChunk(offsetInChunk + 1), line = ref2[0], column = ref2[1];
|
||||
ref3 = new Lexer().tokenize(str.slice(1), {
|
||||
[line, column] = this.getLineAndColumnFromChunk(offsetInChunk + 1);
|
||||
({
|
||||
tokens: nested,
|
||||
index
|
||||
} = new Lexer().tokenize(str.slice(1), {
|
||||
line: line,
|
||||
column: column,
|
||||
untilBalanced: true
|
||||
}), nested = ref3.tokens, index = ref3.index;
|
||||
}));
|
||||
index += 1;
|
||||
open = nested[0], close = nested[nested.length - 1];
|
||||
open[0] = open[1] = '(';
|
||||
close[0] = close[1] = ')';
|
||||
close.origin = ['', 'end of interpolation', close[2]];
|
||||
if (((ref4 = nested[1]) != null ? ref4[0] : void 0) === 'TERMINATOR') {
|
||||
if (((ref = nested[1]) != null ? ref[0] : void 0) === 'TERMINATOR') {
|
||||
nested.splice(1, 1);
|
||||
}
|
||||
tokens.push(['TOKENS', nested]);
|
||||
|
@ -716,7 +718,7 @@
|
|||
lastToken[2].last_column -= 1;
|
||||
}
|
||||
return {
|
||||
tokens: tokens,
|
||||
tokens,
|
||||
index: offsetInChunk + delimiter.length
|
||||
};
|
||||
}
|
||||
|
@ -729,7 +731,7 @@
|
|||
firstIndex = this.tokens.length;
|
||||
for (i = j = 0, len = tokens.length; j < len; i = ++j) {
|
||||
token = tokens[i];
|
||||
tag = token[0], value = token[1];
|
||||
[tag, value] = token;
|
||||
switch (tag) {
|
||||
case 'TOKENS':
|
||||
if (value.length === 2) {
|
||||
|
@ -787,13 +789,13 @@
|
|||
}
|
||||
|
||||
pair(tag) {
|
||||
var lastIndent, prev, ref2, ref3, wanted;
|
||||
ref2 = this.ends, prev = ref2[ref2.length - 1];
|
||||
var lastIndent, prev, ref, ref1, wanted;
|
||||
ref = this.ends, prev = ref[ref.length - 1];
|
||||
if (tag !== (wanted = prev != null ? prev.tag : void 0)) {
|
||||
if ('OUTDENT' !== wanted) {
|
||||
this.error(`unmatched ${tag}`);
|
||||
}
|
||||
ref3 = this.indents, lastIndent = ref3[ref3.length - 1];
|
||||
ref1 = this.indents, lastIndent = ref1[ref1.length - 1];
|
||||
this.outdentToken(lastIndent, true);
|
||||
return this.pair(tag);
|
||||
}
|
||||
|
@ -801,7 +803,7 @@
|
|||
}
|
||||
|
||||
getLineAndColumnFromChunk(offset) {
|
||||
var column, lastLine, lineCount, ref2, string;
|
||||
var column, lastLine, lineCount, ref, string;
|
||||
if (offset === 0) {
|
||||
return [this.chunkLine, this.chunkColumn];
|
||||
}
|
||||
|
@ -813,7 +815,7 @@
|
|||
lineCount = count(string, '\n');
|
||||
column = this.chunkColumn;
|
||||
if (lineCount > 0) {
|
||||
ref2 = string.split('\n'), lastLine = ref2[ref2.length - 1];
|
||||
ref = string.split('\n'), lastLine = ref[ref.length - 1];
|
||||
column = lastLine.length;
|
||||
} else {
|
||||
column += string.length;
|
||||
|
@ -822,11 +824,11 @@
|
|||
}
|
||||
|
||||
makeToken(tag, value, offsetInChunk = 0, length = value.length) {
|
||||
var lastCharacter, locationData, ref2, ref3, token;
|
||||
var lastCharacter, locationData, token;
|
||||
locationData = {};
|
||||
ref2 = this.getLineAndColumnFromChunk(offsetInChunk), locationData.first_line = ref2[0], locationData.first_column = ref2[1];
|
||||
[locationData.first_line, locationData.first_column] = this.getLineAndColumnFromChunk(offsetInChunk);
|
||||
lastCharacter = length > 0 ? length - 1 : 0;
|
||||
ref3 = this.getLineAndColumnFromChunk(offsetInChunk + lastCharacter), locationData.last_line = ref3[0], locationData.last_column = ref3[1];
|
||||
[locationData.last_line, locationData.last_column] = this.getLineAndColumnFromChunk(offsetInChunk + lastCharacter);
|
||||
token = [tag, value, locationData];
|
||||
return token;
|
||||
}
|
||||
|
@ -842,20 +844,20 @@
|
|||
}
|
||||
|
||||
tag() {
|
||||
var ref2, token;
|
||||
ref2 = this.tokens, token = ref2[ref2.length - 1];
|
||||
var ref, token;
|
||||
ref = this.tokens, token = ref[ref.length - 1];
|
||||
return token != null ? token[0] : void 0;
|
||||
}
|
||||
|
||||
value() {
|
||||
var ref2, token;
|
||||
ref2 = this.tokens, token = ref2[ref2.length - 1];
|
||||
var ref, token;
|
||||
ref = this.tokens, token = ref[ref.length - 1];
|
||||
return token != null ? token[1] : void 0;
|
||||
}
|
||||
|
||||
unfinished() {
|
||||
var ref2;
|
||||
return LINE_CONTINUER.test(this.chunk) || ((ref2 = this.tag()) === '\\' || ref2 === '.' || ref2 === '?.' || ref2 === '?::' || ref2 === 'UNARY' || ref2 === 'MATH' || ref2 === 'UNARY_MATH' || ref2 === '+' || ref2 === '-' || ref2 === '**' || ref2 === 'SHIFT' || ref2 === 'RELATION' || ref2 === 'COMPARE' || ref2 === '&' || ref2 === '^' || ref2 === '|' || ref2 === '&&' || ref2 === '||' || ref2 === 'BIN?' || ref2 === 'THROW' || ref2 === 'EXTENDS');
|
||||
var ref;
|
||||
return LINE_CONTINUER.test(this.chunk) || ((ref = this.tag()) === '\\' || ref === '.' || ref === '?.' || ref === '?::' || ref === 'UNARY' || ref === 'MATH' || ref === 'UNARY_MATH' || ref === '+' || ref === '-' || ref === '**' || ref === 'SHIFT' || ref === 'RELATION' || ref === 'COMPARE' || ref === '&' || ref === '^' || ref === '|' || ref === '&&' || ref === '||' || ref === 'BIN?' || ref === 'THROW' || ref === 'EXTENDS');
|
||||
}
|
||||
|
||||
formatString(str) {
|
||||
|
@ -867,7 +869,7 @@
|
|||
}
|
||||
|
||||
validateEscapes(str, options = {}) {
|
||||
var before, hex, invalidEscape, match, message, octal, ref2, unicode;
|
||||
var before, hex, invalidEscape, match, message, octal, ref, unicode;
|
||||
match = INVALID_ESCAPE.exec(str);
|
||||
if (!match) {
|
||||
return;
|
||||
|
@ -879,7 +881,7 @@
|
|||
message = octal ? "octal escape sequences are not allowed" : "invalid escape sequence";
|
||||
invalidEscape = `\\${octal || hex || unicode}`;
|
||||
return this.error(`${message} ${invalidEscape}`, {
|
||||
offset: ((ref2 = options.offsetInChunk) != null ? ref2 : 0) + match.index + before.length,
|
||||
offset: ((ref = options.offsetInChunk) != null ? ref : 0) + match.index + before.length,
|
||||
length: invalidEscape.length
|
||||
});
|
||||
}
|
||||
|
@ -922,11 +924,11 @@
|
|||
}
|
||||
|
||||
error(message, options = {}) {
|
||||
var first_column, first_line, location, ref2, ref3, ref4;
|
||||
location = 'first_line' in options ? options : ((ref3 = this.getLineAndColumnFromChunk((ref2 = options.offset) != null ? ref2 : 0), first_line = ref3[0], first_column = ref3[1], ref3), {
|
||||
first_line: first_line,
|
||||
first_column: first_column,
|
||||
last_column: first_column + ((ref4 = options.length) != null ? ref4 : 1) - 1
|
||||
var first_column, first_line, location, ref, ref1;
|
||||
location = 'first_line' in options ? options : ([first_line, first_column] = this.getLineAndColumnFromChunk((ref = options.offset) != null ? ref : 0), {
|
||||
first_line,
|
||||
first_column,
|
||||
last_column: first_column + ((ref1 = options.length) != null ? ref1 : 1) - 1
|
||||
});
|
||||
return throwSyntaxError(message, location);
|
||||
}
|
||||
|
@ -949,7 +951,7 @@
|
|||
exports.isUnassignable = isUnassignable;
|
||||
|
||||
isForFrom = function(prev) {
|
||||
var ref2;
|
||||
var ref;
|
||||
if (prev[0] === 'IDENTIFIER') {
|
||||
if (prev[1] === 'from') {
|
||||
prev[1][0] = 'IDENTIFIER';
|
||||
|
@ -958,7 +960,7 @@
|
|||
return true;
|
||||
} else if (prev[0] === 'FOR') {
|
||||
return false;
|
||||
} else if ((ref2 = prev[1]) === '{' || ref2 === '[' || ref2 === ',' || ref2 === ':') {
|
||||
} else if ((ref = prev[1]) === '{' || ref === '[' || ref === ',' || ref === ':') {
|
||||
return false;
|
||||
} else {
|
||||
return true;
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue