Merging in satyr's rewrite-lexer.coffee

This commit is contained in:
Jeremy Ashkenas 2010-09-26 10:57:03 -04:00
parent bd047cbb4f
commit ecb23d15c4
2 changed files with 16 additions and 19 deletions

View File

@ -393,7 +393,7 @@
} }
doc = doc.replace(/^\n/, '').replace(new RegExp("" + (options.quote), "g"), '\\$&'); doc = doc.replace(/^\n/, '').replace(new RegExp("" + (options.quote), "g"), '\\$&');
if (options.quote === "'") { if (options.quote === "'") {
doc = this.oldline(doc, true); doc = this.escapeLines(doc, true);
} }
return doc; return doc;
}; };
@ -477,8 +477,10 @@
return !i ? false : str.slice(0, i); return !i ? false : str.slice(0, i);
}; };
Lexer.prototype.interpolateString = function(str, options) { Lexer.prototype.interpolateString = function(str, options) {
var _len, _ref2, _ref3, end, escaped, expr, i, idx, inner, interpolated, lexer, nested, pi, push, quote, s, tag, tok, token, tokens, value; var _len, _ref2, _ref3, end, escapeQuotes, escaped, expr, heredoc, i, idx, inner, interpolated, lexer, nested, pi, push, quote, s, tag, tok, token, tokens, value;
options || (options = {}); _ref2 = options || {};
heredoc = _ref2.heredoc;
escapeQuotes = _ref2.escapeQuotes;
quote = str.charAt(0); quote = str.charAt(0);
if (quote !== '"' || str.length < 3) { if (quote !== '"' || str.length < 3) {
return this.token('STRING', str); return this.token('STRING', str);
@ -492,12 +494,12 @@
i += 1; i += 1;
} else if (expr = this.balancedString(str.slice(i), [['#{', '}']])) { } else if (expr = this.balancedString(str.slice(i), [['#{', '}']])) {
if (pi < i) { if (pi < i) {
s = quote + this.oldline(str.slice(pi, i), options.heredoc) + quote; s = quote + this.escapeLines(str.slice(pi, i), heredoc) + quote;
tokens.push(['STRING', s]); tokens.push(['STRING', s]);
} }
inner = expr.slice(2, -1).replace(/^\s+/, ''); inner = expr.slice(2, -1).replace(/^[ \t]*\n/, '');
if (inner.length) { if (inner.length) {
if (options.heredoc) { if (heredoc) {
inner = inner.replace(RegExp('\\\\' + quote, 'g'), quote); inner = inner.replace(RegExp('\\\\' + quote, 'g'), quote);
} }
nested = lexer.tokenize("(" + (inner) + ")", { nested = lexer.tokenize("(" + (inner) + ")", {
@ -521,7 +523,7 @@
i += 1; i += 1;
} }
if ((i > pi) && (pi < str.length - 1)) { if ((i > pi) && (pi < str.length - 1)) {
s = str.slice(pi, i).replace(MULTILINER, options.heredoc ? '\\n' : ''); s = str.slice(pi, i).replace(MULTILINER, heredoc ? '\\n' : '');
tokens.push(['STRING', quote + s + quote]); tokens.push(['STRING', quote + s + quote]);
} }
if (tokens[0][0] !== 'STRING') { if (tokens[0][0] !== 'STRING') {
@ -541,7 +543,7 @@
value = _ref3[1]; value = _ref3[1];
if (tag === 'TOKENS') { if (tag === 'TOKENS') {
push.apply(this.tokens, value); push.apply(this.tokens, value);
} else if (tag === 'STRING' && options.escapeQuotes) { } else if (tag === 'STRING' && escapeQuotes) {
escaped = value.slice(1, -1).replace(/"/g, '\\"'); escaped = value.slice(1, -1).replace(/"/g, '\\"');
this.token(tag, "\"" + (escaped) + "\""); this.token(tag, "\"" + (escaped) + "\"");
} else { } else {
@ -582,15 +584,11 @@
Lexer.prototype.prev = function(index) { Lexer.prototype.prev = function(index) {
return this.tokens[this.tokens.length - (index || 1)]; return this.tokens[this.tokens.length - (index || 1)];
}; };
Lexer.prototype.match = function(regex, index) {
var m;
return (m = this.chunk.match(regex)) ? m[index || 0] : false;
};
Lexer.prototype.unfinished = function() { Lexer.prototype.unfinished = function() {
var prev, value; var prev, value;
return (prev = this.prev(2)) && prev[0] !== '.' && (value = this.value()) && NO_NEWLINE.test(value) && !CODE.test(value) && !ASSIGNED.test(this.chunk); return (prev = this.prev(2)) && prev[0] !== '.' && (value = this.value()) && NO_NEWLINE.test(value) && !CODE.test(value) && !ASSIGNED.test(this.chunk);
}; };
Lexer.prototype.oldline = function(str, heredoc) { Lexer.prototype.escapeLines = function(str, heredoc) {
return str.replace(MULTILINER, heredoc ? '\\n' : ''); return str.replace(MULTILINER, heredoc ? '\\n' : '');
}; };
return Lexer; return Lexer;

View File

@ -340,7 +340,7 @@ exports.Lexer = class Lexer
doc = doc.replace /\n#{ indent }/g, '\n' if indent doc = doc.replace /\n#{ indent }/g, '\n' if indent
return doc if herecomment return doc if herecomment
doc = doc.replace(/^\n/, '').replace(/#{ options.quote }/g, '\\$&') doc = doc.replace(/^\n/, '').replace(/#{ options.quote }/g, '\\$&')
doc = @oldline doc, on if options.quote is "'" doc = @escapeLines doc, yes if options.quote is "'"
doc doc
# A source of ambiguity in our grammar used to be parameter lists in function # A source of ambiguity in our grammar used to be parameter lists in function
@ -406,8 +406,7 @@ exports.Lexer = class Lexer
if not i then false else str[0...i] if not i then false else str[0...i]
# Expand variables and expressions inside double-quoted strings using # Expand variables and expressions inside double-quoted strings using
# Ruby-like notation # Ruby-like notation for substitution of arbitrary expressions.
# for substitution of bare variables as well as arbitrary expressions.
# #
# "Hello #{name.capitalize()}." # "Hello #{name.capitalize()}."
# #
@ -427,7 +426,7 @@ exports.Lexer = class Lexer
i += 1 i += 1
else if expr = @balancedString str[i..], [['#{', '}']] else if expr = @balancedString str[i..], [['#{', '}']]
if pi < i if pi < i
s = quote + @oldline(str[pi...i], heredoc) + quote s = quote + @escapeLines(str[pi...i], heredoc) + quote
tokens.push ['STRING', s] tokens.push ['STRING', s]
inner = expr.slice(2, -1).replace /^[ \t]*\n/, '' inner = expr.slice(2, -1).replace /^[ \t]*\n/, ''
if inner.length if inner.length
@ -490,8 +489,8 @@ exports.Lexer = class Lexer
(value = @value()) and NO_NEWLINE.test(value) and not CODE.test(value) and (value = @value()) and NO_NEWLINE.test(value) and not CODE.test(value) and
not ASSIGNED.test(@chunk) not ASSIGNED.test(@chunk)
# Converts newlines for string literals # Converts newlines for string literals.
oldline: (str, heredoc) -> escapeLines: (str, heredoc) ->
str.replace MULTILINER, if heredoc then '\\n' else '' str.replace MULTILINER, if heredoc then '\\n' else ''
# Constants # Constants