Clean up TODOs

This commit is contained in:
Jason Walton 2012-11-19 17:37:46 -05:00
parent 12625cc00c
commit df6c497ab0
4 changed files with 283 additions and 25 deletions

View File

@ -21,7 +21,6 @@
}
code = code.replace(/\r/g, '').replace(TRAILING_SPACES, '');
this.code = code;
this.line = opts.line || 0;
this.chunkLine = opts.line || 0;
this.chunkColumn = opts.column || 0;
this.indent = 0;
@ -182,7 +181,6 @@
if (octalEsc = /^(?:\\.|[^\\])*\\(?:0[0-7]|[1-7])/.test(string)) {
this.error("octal escape sequences " + string + " are not allowed");
}
this.line += count(string, '\n');
return string.length;
};
@ -205,7 +203,6 @@
} else {
this.token('STRING', this.makeString(doc, quote, true), 0, heredoc.length);
}
this.line += count(heredoc, '\n');
return heredoc.length;
};
@ -221,7 +218,6 @@
indent: Array(this.indent + 1).join(' ')
}), 0, comment.length);
}
this.line += count(comment, '\n');
return comment.length;
};
@ -231,7 +227,6 @@
return 0;
}
this.token('JS', (script = match[0]).slice(1, -1), 0, script.length);
this.line += count(script, '\n');
return script.length;
};
@ -242,7 +237,6 @@
}
if (match = HEREGEX.exec(this.chunk)) {
length = this.heregexToken(match);
this.line += count(match[0], '\n');
return length;
}
prev = last(this.tokens);
@ -332,10 +326,8 @@
} else {
this.newlineToken(0);
}
this.line += count(indent, '\n');
return indent.length;
}
this.line += count(indent, '\n');
if (size > this.indent) {
if (noNewlines) {
this.indebt = size - this.indent;
@ -582,7 +574,7 @@
};
Lexer.prototype.interpolateString = function(str, options) {
var column, expr, heredoc, i, inner, interpolated, len, letter, lexedLength, line, nested, offsetInChunk, originalOffsetInChunk, pi, popped, regex, tag, token, tokens, value, _i, _len, _ref2, _ref3, _ref4;
var column, expr, heredoc, i, inner, interpolated, len, letter, lexedLength, line, locationToken, nested, offsetInChunk, originalOffsetInChunk, pi, plusToken, popped, regex, tag, token, tokens, value, _i, _len, _ref2, _ref3, _ref4;
if (options == null) {
options = {};
}
@ -650,7 +642,16 @@
token = tokens[i];
tag = token[0], value = token[1];
if (i) {
this.token('+', '+');
if (i) {
plusToken = this.token('+', '+');
}
locationToken = tag === 'TOKENS' ? value[0] : token;
plusToken.locationData = {
first_line: locationToken.locationData.first_line,
first_column: locationToken.locationData.first_column,
last_line: locationToken.locationData.first_line,
last_column: locationToken.locationData.first_column
};
}
if (tag === 'TOKENS') {
(_ref4 = this.tokens).push.apply(_ref4, value);
@ -758,7 +759,7 @@
};
Lexer.prototype.error = function(message) {
throw SyntaxError("" + message + " on line " + (this.line + 1));
throw SyntaxError("" + message + " on line " + (this.chunkLine + 1));
};
return Lexer;

View File

@ -0,0 +1,139 @@
// Generated by CoffeeScript 1.4.0
(function() {
var BASE64_CHARS, LineMapping, MAX_BASE64_VALUE, VLQ_CONTINUATION_BIT, VLQ_MASK, VLQ_SHIFT, encodeBase64Char;
LineMapping = (function() {
function LineMapping(generatedLine) {
this.generatedLine = generatedLine;
this.columnMap = {};
this.columnMappings = [];
}
LineMapping.prototype.addMapping = function(generatedColumn, sourceLine, sourceColumn) {
if (this.columnMap[generatedColumn]) {
return;
}
this.columnMap[generatedColumn] = {
generatedLine: this.generatedLine,
generatedColumn: generatedColumn,
sourceLine: sourceLine,
sourceColumn: sourceColumn
};
return this.columnMappings.push(this.columnMap[generatedColumn]);
};
return LineMapping;
})();
exports.SourceMap = (function() {
function SourceMap() {
this.generatedLines = [];
}
SourceMap.prototype.addMapping = function(generatedLine, generatedColumn, sourceLine, sourceColumn) {
var lineArray;
lineArray = this.generatedLines[generatedLine];
if (!lineArray) {
lineArray = this.generatedLines[generatedLine] = LineMapping(generatedLine);
}
return lineArray.addMapping(generatedColumn, sourceLine, sourceColumn);
};
SourceMap.prototype.forEachMapping = function(fn) {
var columnMapping, generatedLineNumber, lineMapping, _i, _len, _ref, _results;
_ref = this.generatedLines;
_results = [];
for (generatedLineNumber = _i = 0, _len = _ref.length; _i < _len; generatedLineNumber = ++_i) {
lineMapping = _ref[generatedLineNumber];
if (lineMapping) {
_results.push((function() {
var _j, _len1, _ref1, _results1;
_ref1 = lineMapping.columnMappings;
_results1 = [];
for (_j = 0, _len1 = _ref1.length; _j < _len1; _j++) {
columnMapping = _ref1[_j];
_results1.push(fn(columnMapping));
}
return _results1;
})());
} else {
_results.push(void 0);
}
}
return _results;
};
return SourceMap;
})();
exports.generateV3SourceMap = function(sourceMap) {
var lastGeneratedColumnWritten, lastSourceColumnWritten, lastSourceLineWritten, mappings, needComma, writingGeneratedLine;
writingGeneratedLine = 0;
lastGeneratedColumnWritten = 0;
lastSourceLineWritten = 0;
lastSourceColumnWritten = 0;
needComma = false;
mappings = "";
return sourceMap.forEachMapping(function(mapping) {
while (writingGeneratedLine < mapping.generatedLine) {
lastGeneratedColumnWritten = 0;
needComma = false;
mappings += ";";
writingGeneratedLine++;
}
if (needComma) {
mappings += ",";
needComma = false;
}
exports.vlqEncodeValue(mapping.generatedColumn - lastGeneratedColumnWritten);
lastGeneratedColumnWritten = mapping.generatedColumn;
exports.vlqEncodeValue(0);
exports.vlqEncodeValue(mapping.sourceLine - lastSourceLineWritten);
lastSourceLineWritten = mapping.sourceLine;
exports.vlqEncodeValue(mapping.sourceColumn - lastSourceColumnWritten);
lastSourceColumnWritten = mapping.sourceColumn;
return needComma = true;
});
};
BASE64_CHARS = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
MAX_BASE64_VALUE = BASE64_CHARS.length - 1;
VLQ_SHIFT = 5;
VLQ_MASK = 0x1F;
VLQ_CONTINUATION_BIT = 0x20;
encodeBase64Char = function(value) {
if (value > MAX_BASE64_VALUE) {
throw Error("Cannot encode value " + value + " > " + MAX_BASE64_VALUE);
} else if (value < 0) {
throw Error("Cannot encode value " + value + " < 0");
}
return BASE64_CHARS[value];
};
exports.vlqEncodeValue = function(value) {
var answer, nextVlqChunk, _results;
value = value < 0 ? 1 : 0;
value += Math.abs(value) << 1;
answer = "";
_results = [];
while (value) {
nextVlqChunk = value & VLQ_MASK;
value >> VLQ_SHIFT;
if (value) {
nextVlqChunk |= VLQ_CONTINUATION_BIT;
}
_results.push(answer += encodeBase64Char(nextVlqChunk));
}
return _results;
};
}).call(this);

View File

@ -36,7 +36,6 @@ exports.Lexer = class Lexer
code = code.replace(/\r/g, '').replace TRAILING_SPACES, ''
@code = code # The source code.
@line = opts.line or 0 # TODO: Remove
@chunkLine =
opts.line or 0 # The start line for the current chunk.
@chunkColumn =
@ -73,7 +72,6 @@ exports.Lexer = class Lexer
@closeIndentation()
@error "missing #{tag}" if tag = @ends.pop()
return @tokens if opts.rewrite is off
# TODO: deal with Rewriter
(new Rewriter).rewrite @tokens
# Tokenizers
@ -187,7 +185,6 @@ exports.Lexer = class Lexer
return 0
if octalEsc = /^(?:\\.|[^\\])*\\(?:0[0-7]|[1-7])/.test string
@error "octal escape sequences #{string} are not allowed"
@line += count string, '\n'
string.length
# Matches heredocs, adjusting indentation to the correct level, as heredocs
@ -201,7 +198,6 @@ exports.Lexer = class Lexer
@interpolateString doc, heredoc: yes, offsetInChunk: 3
else
@token 'STRING', @makeString(doc, quote, yes), 0, heredoc.length
@line += count heredoc, '\n'
heredoc.length
# Matches and consumes comments.
@ -213,14 +209,12 @@ exports.Lexer = class Lexer
(@sanitizeHeredoc here,
herecomment: true, indent: Array(@indent + 1).join(' ')),
0, comment.length
@line += count comment, '\n'
comment.length
# Matches JavaScript interpolated directly into the source via backticks.
jsToken: ->
return 0 unless @chunk.charAt(0) is '`' and match = JSTOKEN.exec @chunk
@token 'JS', (script = match[0])[1...-1], 0, script.length
@line += count script, '\n'
script.length
# Matches regular expression literals. Lexing regular expressions is difficult
@ -230,7 +224,6 @@ exports.Lexer = class Lexer
return 0 if @chunk.charAt(0) isnt '/'
if match = HEREGEX.exec @chunk
length = @heregexToken match
@line += count match[0], '\n'
return length
prev = last @tokens
@ -307,12 +300,8 @@ exports.Lexer = class Lexer
noNewlines = @unfinished()
if size - @indebt is @indent
if noNewlines then @suppressNewlines() else @newlineToken 0
# Advance @line line after the newlineToken, so the TERMINATOR shows up
# on the right line.
@line += count indent, '\n'
return indent.length
@line += count indent, '\n'
if size > @indent
if noNewlines
@indebt = size - @indent
@ -560,8 +549,15 @@ exports.Lexer = class Lexer
# Push all the tokens
for token, i in tokens
[tag, value] = token
# TODO: this needs location data.
@token '+', '+' if i
if i
# Create a 0-length "+" token.
plusToken = @token '+', '+' if i
locationToken = if tag == 'TOKENS' then value[0] else token
plusToken.locationData =
first_line: locationToken.locationData.first_line
first_column: locationToken.locationData.first_column
last_line: locationToken.locationData.first_line
last_column: locationToken.locationData.first_column
if tag is 'TOKENS'
# Push all the tokens in the fake 'TOKENS' token. These already have
# sane location data.
@ -671,7 +667,9 @@ exports.Lexer = class Lexer
# Throws a syntax error on the current `@line`.
error: (message) ->
throw SyntaxError "#{message} on line #{ @line + 1}"
# TODO: Are there some cases we could improve the error line number by
# passing the offset in the chunk where the error happened?
throw SyntaxError "#{message} on line #{ @chunkLine + 1 }"
# Constants
# ---------

120
src/sourcemap.coffee Normal file
View File

@ -0,0 +1,120 @@
# Hold data about mappings for one line of generated source code.
class LineMapping
constructor: (@generatedLine) ->
@columnMap = {}
@columnMappings = []
addMapping: (generatedColumn, sourceLine, sourceColumn) ->
if @columnMap[generatedColumn]
# We already have a mapping for this column. Bail.
return
@columnMap[generatedColumn] = {
generatedLine: @generatedLine
generatedColumn
sourceLine
sourceColumn
}
@columnMappings.push @columnMap[generatedColumn]
class exports.SourceMap
constructor: () ->
# Array of LineMappings, one per generated line.
@generatedLines = []
# Adds a mapping to this SourceMap.
# If there is already a mapping for the specified `generatedLine` and
# `generatedColumn`, then this will have no effect.
addMapping: (generatedLine, generatedColumn, sourceLine, sourceColumn) ->
lineArray = @generatedLines[generatedLine]
if not lineArray
lineArray = @generatedLines[generatedLine] = LineMapping(generatedLine)
lineArray.addMapping generatedColumn, sourceLine, sourceColumn
# `fn` will be called once for every recorded mapping, in the order in
# which they occur in the generated source. `fn` will be passed an object
# with four properties: generatedLine, generatedColumn, sourceLine, and
# sourceColumn.
forEachMapping: (fn) ->
for lineMapping, generatedLineNumber in @generatedLines
if lineMapping
for columnMapping in lineMapping.columnMappings
fn(columnMapping)
#### Build a V3 source map from a SourceMap object.
# Returns the generated JSON as a string.
exports.generateV3SourceMap = (sourceMap) ->
writingGeneratedLine = 0
lastGeneratedColumnWritten = 0
lastSourceLineWritten = 0
lastSourceColumnWritten = 0
needComma = no
mappings = ""
sourceMap.forEachMapping (mapping) ->
while writingGeneratedLine < mapping.generatedLine
lastGeneratedColumnWritten = 0
needComma = no
mappings += ";"
writingGeneratedLine++
if needComma
mappings += ","
needComma = no
# Add the generated start-column
exports.vlqEncodeValue(mapping.generatedColumn - lastGeneratedColumnWritten)
lastGeneratedColumnWritten = mapping.generatedColumn
# Add the index into the sources list
exports.vlqEncodeValue(0)
# Add the source start-line
exports.vlqEncodeValue(mapping.sourceLine - lastSourceLineWritten)
lastSourceLineWritten = mapping.sourceLine
# Add the source start-column
exports.vlqEncodeValue(mapping.sourceColumn - lastSourceColumnWritten)
lastSourceColumnWritten = mapping.sourceColumn
# TODO: Do we care about symbol names for CoffeeScript?
needComma = yes
BASE64_CHARS = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'
MAX_BASE64_VALUE = BASE64_CHARS.length - 1
VLQ_SHIFT = 5
VLQ_MASK = 0x1F # 0001 1111
VLQ_CONTINUATION_BIT = 0x20 # 0010 0000
encodeBase64Char = (value) ->
if value > MAX_BASE64_VALUE
throw Error "Cannot encode value #{value} > #{MAX_BASE64_VALUE}"
else if value < 0
throw Error "Cannot encode value #{value} < 0"
BASE64_CHARS[value]
exports.vlqEncodeValue = (value) ->
# Least significant bit represents the sign.
value = if value < 0 then 1 else 0
# Next bits are the actual value
value += Math.abs(value) << 1
answer = ""
while value
nextVlqChunk = value & VLQ_MASK
value >> VLQ_SHIFT
if value
nextVlqChunk |= VLQ_CONTINUATION_BIT
answer += encodeBase64Char(nextVlqChunk)