Improve error messages for generated tokens

This commit is contained in:
xixixao 2014-01-22 02:44:50 +00:00
parent 39cb8815f7
commit f0463e9981
9 changed files with 111 additions and 25 deletions

View File

@ -214,6 +214,7 @@
token = this.tokens[this.pos++]; token = this.tokens[this.pos++];
if (token) { if (token) {
tag = token[0], this.yytext = token[1], this.yylloc = token[2]; tag = token[0], this.yytext = token[1], this.yylloc = token[2];
this.errorToken = token.origin || token;
this.yylineno = this.yylloc.first_line; this.yylineno = this.yylloc.first_line;
} else { } else {
tag = ''; tag = '';
@ -232,10 +233,12 @@
parser.yy = require('./nodes'); parser.yy = require('./nodes');
parser.yy.parseError = function(message, _arg) { parser.yy.parseError = function(message, _arg) {
var token; var errorLoc, errorText, errorToken, ignored, token, tokens, _ref;
token = _arg.token; token = _arg.token;
message = "unexpected " + (token === 1 ? 'end of input' : token); _ref = parser.lexer, errorToken = _ref.errorToken, tokens = _ref.tokens;
return helpers.throwSyntaxError(message, parser.lexer.yylloc); ignored = errorToken[0], errorText = errorToken[1], errorLoc = errorToken[2];
errorText = errorToken === tokens[tokens.length - 1] ? 'end of input' : helpers.nameWhitespaceCharacter(errorText);
return helpers.throwSyntaxError("unexpected " + errorText, errorLoc);
}; };
formatSourcePosition = function(frame, getSourceMapping) { formatSourcePosition = function(frame, getSourceMapping) {

View File

@ -234,4 +234,19 @@
return "" + filename + ":" + (first_line + 1) + ":" + (first_column + 1) + ": error: " + this.message + "\n" + codeLine + "\n" + marker; return "" + filename + ":" + (first_line + 1) + ":" + (first_column + 1) + ": error: " + this.message + "\n" + codeLine + "\n" + marker;
}; };
exports.nameWhitespaceCharacter = function(string) {
switch (string) {
case ' ':
return 'space';
case '\n':
return 'newline';
case '\r':
return 'carriage return';
case '\t':
return 'tab';
default:
return string;
}
};
}).call(this); }).call(this);

View File

@ -588,14 +588,14 @@
}; };
Lexer.prototype.interpolateString = function(str, options) { Lexer.prototype.interpolateString = function(str, options) {
var column, expr, heredoc, i, inner, interpolated, len, letter, lexedLength, line, locationToken, nested, offsetInChunk, pi, plusToken, popped, regex, rparen, strOffset, tag, token, tokens, value, _i, _len, _ref2, _ref3, _ref4; var column, errorToken, expr, heredoc, i, inner, interpolated, len, letter, lexedLength, line, locationToken, nested, offsetInChunk, pi, plusToken, popped, regex, rparen, strOffset, tag, token, tokens, value, _i, _len, _ref2, _ref3, _ref4;
if (options == null) { if (options == null) {
options = {}; options = {};
} }
heredoc = options.heredoc, regex = options.regex, offsetInChunk = options.offsetInChunk, strOffset = options.strOffset, lexedLength = options.lexedLength; heredoc = options.heredoc, regex = options.regex, offsetInChunk = options.offsetInChunk, strOffset = options.strOffset, lexedLength = options.lexedLength;
offsetInChunk = offsetInChunk || 0; offsetInChunk || (offsetInChunk = 0);
strOffset = strOffset || 0; strOffset || (strOffset = 0);
lexedLength = lexedLength || str.length; lexedLength || (lexedLength = str.length);
tokens = []; tokens = [];
pi = 0; pi = 0;
i = -1; i = -1;
@ -610,6 +610,9 @@
if (pi < i) { if (pi < i) {
tokens.push(this.makeToken('NEOSTRING', str.slice(pi, i), strOffset + pi)); tokens.push(this.makeToken('NEOSTRING', str.slice(pi, i), strOffset + pi));
} }
if (!errorToken) {
errorToken = this.makeToken('', 'string interpolation', offsetInChunk + i + 1, 2);
}
inner = expr.slice(1, -1); inner = expr.slice(1, -1);
if (inner.length) { if (inner.length) {
_ref2 = this.getLineAndColumnFromChunk(strOffset + i + 1), line = _ref2[0], column = _ref2[1]; _ref2 = this.getLineAndColumnFromChunk(strOffset + i + 1), line = _ref2[0], column = _ref2[1];
@ -646,7 +649,7 @@
tokens.unshift(this.makeToken('NEOSTRING', '', offsetInChunk)); tokens.unshift(this.makeToken('NEOSTRING', '', offsetInChunk));
} }
if (interpolated = tokens.length > 1) { if (interpolated = tokens.length > 1) {
this.token('(', '(', offsetInChunk, 0); this.token('(', '(', offsetInChunk, 0, errorToken);
} }
for (i = _i = 0, _len = tokens.length; _i < _len; i = ++_i) { for (i = _i = 0, _len = tokens.length; _i < _len; i = ++_i) {
token = tokens[i]; token = tokens[i];
@ -731,9 +734,12 @@
return token; return token;
}; };
Lexer.prototype.token = function(tag, value, offsetInChunk, length) { Lexer.prototype.token = function(tag, value, offsetInChunk, length, origin) {
var token; var token;
token = this.makeToken(tag, value, offsetInChunk, length); token = this.makeToken(tag, value, offsetInChunk, length);
if (origin) {
token.origin = origin;
}
this.tokens.push(token); this.tokens.push(token);
return token; return token;
}; };

View File

@ -4,10 +4,13 @@
__indexOf = [].indexOf || function(item) { for (var i = 0, l = this.length; i < l; i++) { if (i in this && this[i] === item) return i; } return -1; }, __indexOf = [].indexOf || function(item) { for (var i = 0, l = this.length; i < l; i++) { if (i in this && this[i] === item) return i; } return -1; },
__slice = [].slice; __slice = [].slice;
generate = function(tag, value) { generate = function(tag, value, origin) {
var tok; var tok;
tok = [tag, value]; tok = [tag, value];
tok.generated = true; tok.generated = true;
if (origin) {
tok.origin = origin;
}
return tok; return tok;
}; };
@ -212,7 +215,7 @@
ours: true ours: true
} }
]); ]);
tokens.splice(idx, 0, generate('{', generate(new String('{')))); tokens.splice(idx, 0, generate('{', generate(new String('{')), token));
if (j == null) { if (j == null) {
return i += 1; return i += 1;
} }
@ -220,7 +223,7 @@
endImplicitObject = function(j) { endImplicitObject = function(j) {
j = j != null ? j : i; j = j != null ? j : i;
stack.pop(); stack.pop();
tokens.splice(j, 0, generate('}', '}')); tokens.splice(j, 0, generate('}', '}', token));
return i += 1; return i += 1;
}; };
if (inImplicitCall() && (tag === 'IF' || tag === 'TRY' || tag === 'FINALLY' || tag === 'CATCH' || tag === 'CLASS' || tag === 'SWITCH')) { if (inImplicitCall() && (tag === 'IF' || tag === 'TRY' || tag === 'FINALLY' || tag === 'CATCH' || tag === 'CLASS' || tag === 'SWITCH')) {

View File

@ -183,6 +183,7 @@ parser.lexer =
token = @tokens[@pos++] token = @tokens[@pos++]
if token if token
[tag, @yytext, @yylloc] = token [tag, @yytext, @yylloc] = token
@errorToken = token.origin or token
@yylineno = @yylloc.first_line @yylineno = @yylloc.first_line
else else
tag = '' tag = ''
@ -198,12 +199,21 @@ parser.yy = require './nodes'
# Override Jison's default error handling function. # Override Jison's default error handling function.
parser.yy.parseError = (message, {token}) -> parser.yy.parseError = (message, {token}) ->
# Disregard Jison's message, it contains redundant line numer information. # Disregard Jison's message, it contains redundant line numer information.
message = "unexpected #{if token is 1 then 'end of input' else token}" # Disregard the token, we take its value directly from the lexer in case
# the error is caused by a generated token which might refer to its origin.
{errorToken, tokens} = parser.lexer
[ignored, errorText, errorLoc] = errorToken
errorText = if errorToken is tokens[tokens.length - 1]
'end of input'
else
helpers.nameWhitespaceCharacter errorText
# The second argument has a `loc` property, which should have the location # The second argument has a `loc` property, which should have the location
# data for this token. Unfortunately, Jison seems to send an outdated `loc` # data for this token. Unfortunately, Jison seems to send an outdated `loc`
# (from the previous token), so we take the location information directly # (from the previous token), so we take the location information directly
# from the lexer. # from the lexer.
helpers.throwSyntaxError message, parser.lexer.yylloc helpers.throwSyntaxError "unexpected #{errorText}", errorLoc
# Based on http://v8.googlecode.com/svn/branches/bleeding_edge/src/messages.js # Based on http://v8.googlecode.com/svn/branches/bleeding_edge/src/messages.js
# Modified to handle sourceMap # Modified to handle sourceMap

View File

@ -188,3 +188,11 @@ syntaxErrorToString = ->
#{codeLine} #{codeLine}
#{marker} #{marker}
""" """
exports.nameWhitespaceCharacter = (string) ->
switch string
when ' ' then 'space'
when '\n' then 'newline'
when '\r' then 'carriage return'
when '\t' then 'tab'
else string

View File

@ -520,9 +520,9 @@ exports.Lexer = class Lexer
# current chunk. # current chunk.
interpolateString: (str, options = {}) -> interpolateString: (str, options = {}) ->
{heredoc, regex, offsetInChunk, strOffset, lexedLength} = options {heredoc, regex, offsetInChunk, strOffset, lexedLength} = options
offsetInChunk = offsetInChunk || 0 offsetInChunk ||= 0
strOffset = strOffset || 0 strOffset ||= 0
lexedLength = lexedLength || str.length lexedLength ||= str.length
# Parse the string. # Parse the string.
tokens = [] tokens = []
@ -537,6 +537,8 @@ exports.Lexer = class Lexer
continue continue
# NEOSTRING is a fake token. This will be converted to a string below. # NEOSTRING is a fake token. This will be converted to a string below.
tokens.push @makeToken('NEOSTRING', str[pi...i], strOffset + pi) if pi < i tokens.push @makeToken('NEOSTRING', str[pi...i], strOffset + pi) if pi < i
unless errorToken
errorToken = @makeToken '', 'string interpolation', offsetInChunk + i + 1, 2
inner = expr[1...-1] inner = expr[1...-1]
if inner.length if inner.length
[line, column] = @getLineAndColumnFromChunk(strOffset + i + 1) [line, column] = @getLineAndColumnFromChunk(strOffset + i + 1)
@ -562,7 +564,9 @@ exports.Lexer = class Lexer
# If the first token is not a string, add a fake empty string to the beginning. # If the first token is not a string, add a fake empty string to the beginning.
tokens.unshift @makeToken('NEOSTRING', '', offsetInChunk) unless tokens[0][0] is 'NEOSTRING' tokens.unshift @makeToken('NEOSTRING', '', offsetInChunk) unless tokens[0][0] is 'NEOSTRING'
@token '(', '(', offsetInChunk, 0 if interpolated = tokens.length > 1 if interpolated = tokens.length > 1
@token '(', '(', offsetInChunk, 0, errorToken
# Push all the tokens # Push all the tokens
for token, i in tokens for token, i in tokens
[tag, value] = token [tag, value] = token
@ -656,8 +660,9 @@ exports.Lexer = class Lexer
# not specified, the length of `value` will be used. # not specified, the length of `value` will be used.
# #
# Returns the new token. # Returns the new token.
token: (tag, value, offsetInChunk, length) -> token: (tag, value, offsetInChunk, length, origin) ->
token = @makeToken tag, value, offsetInChunk, length token = @makeToken tag, value, offsetInChunk, length
token.origin = origin if origin
@tokens.push token @tokens.push token
token token

View File

@ -6,9 +6,10 @@
# parentheses, and generally clean things up. # parentheses, and generally clean things up.
# Create a generated token: one that exists due to a use of implicit syntax. # Create a generated token: one that exists due to a use of implicit syntax.
generate = (tag, value) -> generate = (tag, value, origin) ->
tok = [tag, value] tok = [tag, value]
tok.generated = yes tok.generated = yes
tok.origin = origin if origin
tok tok
# The **Rewriter** class is used by the [Lexer](lexer.html), directly against # The **Rewriter** class is used by the [Lexer](lexer.html), directly against
@ -167,13 +168,13 @@ class exports.Rewriter
startImplicitObject = (j, startsLine = yes) -> startImplicitObject = (j, startsLine = yes) ->
idx = j ? i idx = j ? i
stack.push ['{', idx, sameLine: yes, startsLine: startsLine, ours: yes] stack.push ['{', idx, sameLine: yes, startsLine: startsLine, ours: yes]
tokens.splice idx, 0, generate '{', generate(new String('{')) tokens.splice idx, 0, generate '{', generate(new String('{')), token
i += 1 if not j? i += 1 if not j?
endImplicitObject = (j) -> endImplicitObject = (j) ->
j = j ? i j = j ? i
stack.pop() stack.pop()
tokens.splice j, 0, generate '}', '}' tokens.splice j, 0, generate '}', '}', token
i += 1 i += 1
# Don't end an implicit call on next indent if any of these are in an argument # Don't end an implicit call on next indent if any of these are in an argument

View File

@ -26,7 +26,7 @@ test "parser error formating", ->
foo in bar or in baz foo in bar or in baz
''', ''',
''' '''
[stdin]:1:15: error: unexpected RELATION [stdin]:1:15: error: unexpected in
foo in bar or in baz foo in bar or in baz
^^ ^^
''' '''
@ -58,9 +58,44 @@ test "#2849: compilation error in a require()d file", ->
require './test/syntax-error' require './test/syntax-error'
''', ''',
""" """
#{path.join __dirname, 'syntax-error.coffee'}:1:15: error: unexpected RELATION #{path.join __dirname, 'syntax-error.coffee'}:1:15: error: unexpected in
foo in bar or in baz foo in bar or in baz
^^ ^^
""" """
finally finally
fs.unlink 'test/syntax-error.coffee' fs.unlink 'test/syntax-error.coffee'
test "#1096: unexpected generated tokens", ->
# Unexpected interpolation
assertErrorFormat '{"#{key}": val}', '''
[stdin]:1:3: error: unexpected string interpolation
{"#{key}": val}
^^
'''
# Implicit ends
assertErrorFormat 'a:, b', '''
[stdin]:1:3: error: unexpected ,
a:, b
^
'''
# Explicit ends
assertErrorFormat '(a:)', '''
[stdin]:1:4: error: unexpected )
(a:)
^
'''
# Unexpected end of file
assertErrorFormat 'a:', '''
[stdin]:1:3: error: unexpected end of input
a:
^
'''
# Unexpected implicit object
assertErrorFormat '''
for i in [1]:
1
''', '''
[stdin]:1:13: error: unexpected :
for i in [1]:
^
'''