Remove unnecessary returns and use default parameters

This commit is contained in:
Demian Ferreiro 2013-02-25 10:44:56 -03:00
parent c39723c053
commit f609036bee
2 changed files with 10 additions and 11 deletions

View File

@ -732,13 +732,15 @@
Lexer.prototype.makeToken = function(tag, value, offsetInChunk, length) {
var lastCharacter, locationData, token, _ref2, _ref3;
offsetInChunk = offsetInChunk || 0;
if (length === void 0) {
if (offsetInChunk == null) {
offsetInChunk = 0;
}
if (length == null) {
length = value.length;
}
locationData = {};
_ref2 = this.getLineAndColumnFromChunk(offsetInChunk), locationData.first_line = _ref2[0], locationData.first_column = _ref2[1];
lastCharacter = length > 0 ? length - 1 : 0;
lastCharacter = Math.max(0, length - 1);
_ref3 = this.getLineAndColumnFromChunk(offsetInChunk + (length - 1)), locationData.last_line = _ref3[0], locationData.last_column = _ref3[1];
token = [tag, value, locationData];
return token;

View File

@ -634,27 +634,24 @@ exports.Lexer = class Lexer
else
column += string.length
return [@chunkLine + lineCount, column]
[@chunkLine + lineCount, column]
# Same as "token", exception this just returns the token without adding it
# to the results.
makeToken: (tag, value, offsetInChunk, length) ->
offsetInChunk = offsetInChunk || 0
if length is undefined then length = value.length
makeToken: (tag, value, offsetInChunk = 0, length = value.length) ->
locationData = {}
[locationData.first_line, locationData.first_column] =
@getLineAndColumnFromChunk offsetInChunk
# Use length - 1 for the final offset - we're supplying the last_line and the last_column,
# so if last_column == first_column, then we're looking at a character of length 1.
lastCharacter = if length > 0 then (length - 1) else 0
lastCharacter = Math.max 0, length - 1
[locationData.last_line, locationData.last_column] =
@getLineAndColumnFromChunk offsetInChunk + (length - 1)
token = [tag, value, locationData]
return token
token
# Add a token to the results.
# `offset` is the offset into the current @chunk where the token starts.
@ -665,7 +662,7 @@ exports.Lexer = class Lexer
token: (tag, value, offsetInChunk, length) ->
token = @makeToken tag, value, offsetInChunk, length
@tokens.push token
return token
token
# Peek at a tag in the current token stream.
tag: (index, tag) ->