Template fixes
* Fixes #1147: template strings not obeying -b ascii_only true * Allow evaluation of template expressions by adding optimizers and walkers * Make sure tagged templates are never changed * Remove template tokenizer in parser, add template tokenizer in tokenizer. It is using a brace counter to track brace position of templates * Add tokens `template_head` and `template_substitution` but parsing tokens stays mostly the same * Do not output strings anymore in AST_TemplateString, instead use AST_TemplateSegment * Fix parsing tagged templates, allowing multiple templates behind as spec allows this These changes don't influence tagged templates because raw content may influence code execution, however they are safe to do in normal templates: * Allow basic string concatenation of templates where possible * Allow custom character escape style similar to strings, except in tagged templates Note that expressions are still compressed in tagged templates. Optional things that may be improved later: * Custom quote style for templates if it doesn't have expressions. Making it obey the quote_style option if this is the case.
This commit is contained in:
108
lib/parse.js
108
lib/parse.js
@@ -120,7 +120,7 @@ var PUNC_AFTER_EXPRESSION = makePredicate(characters(";]),:"));
|
||||
|
||||
var PUNC_BEFORE_EXPRESSION = makePredicate(characters("[{(,.;:"));
|
||||
|
||||
var PUNC_CHARS = makePredicate(characters("[]{}(),;:`"));
|
||||
var PUNC_CHARS = makePredicate(characters("[]{}(),;:"));
|
||||
|
||||
var REGEXP_MODIFIERS = makePredicate(characters("gmsiy"));
|
||||
|
||||
@@ -269,6 +269,8 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
||||
tokcol : 0,
|
||||
newline_before : false,
|
||||
regex_allowed : false,
|
||||
brace_counter : 0,
|
||||
template_braces : [],
|
||||
comments_before : [],
|
||||
directives : {},
|
||||
directive_stack : []
|
||||
@@ -487,6 +489,40 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
||||
return tok;
|
||||
});
|
||||
|
||||
var read_template_characters = with_eof_error("SyntaxError: Unterminated template", function(begin){
|
||||
if (begin) {
|
||||
S.template_braces.push(S.brace_counter);
|
||||
}
|
||||
var content = "", raw = "", ch, tok;
|
||||
next();
|
||||
while ((ch = next(true)) !== "`") {
|
||||
if (ch === "$" && peek() === "{") {
|
||||
next();
|
||||
S.brace_counter++;
|
||||
tok = token(begin ? "template_head" : "template_substitution", content);
|
||||
tok.begin = begin;
|
||||
tok.raw = raw;
|
||||
tok.end = false;
|
||||
return tok;
|
||||
}
|
||||
|
||||
raw += ch;
|
||||
if (ch === "\\") {
|
||||
var tmp = S.pos;
|
||||
ch = read_escaped_char();
|
||||
raw += S.text.substr(tmp, S.pos - tmp);
|
||||
}
|
||||
|
||||
content += ch;
|
||||
}
|
||||
S.template_braces.pop();
|
||||
tok = token(begin ? "template_head" : "template_substitution", content);
|
||||
tok.begin = begin;
|
||||
tok.raw = raw;
|
||||
tok.end = true;
|
||||
return tok;
|
||||
});
|
||||
|
||||
function skip_line_comment(type) {
|
||||
var regex_allowed = S.regex_allowed;
|
||||
var i = find_eol(), ret;
|
||||
@@ -688,6 +724,16 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
||||
return tok;
|
||||
}
|
||||
case 61: return handle_eq_sign();
|
||||
case 96: return read_template_characters(true);
|
||||
case 123:
|
||||
S.brace_counter++;
|
||||
break;
|
||||
case 125:
|
||||
S.brace_counter--;
|
||||
if (S.template_braces.length > 0
|
||||
&& S.template_braces[S.template_braces.length - 1] === S.brace_counter)
|
||||
return read_template_characters(false);
|
||||
break;
|
||||
}
|
||||
if (is_digit(code)) return read_num();
|
||||
if (PUNC_CHARS(ch)) return token("punc", next());
|
||||
@@ -939,6 +985,7 @@ function parse($TEXT, options) {
|
||||
});
|
||||
}
|
||||
return stat;
|
||||
case "template_head":
|
||||
case "num":
|
||||
case "regexp":
|
||||
case "operator":
|
||||
@@ -960,7 +1007,6 @@ function parse($TEXT, options) {
|
||||
});
|
||||
case "[":
|
||||
case "(":
|
||||
case "`":
|
||||
return simple_statement();
|
||||
case ";":
|
||||
S.in_directives = false;
|
||||
@@ -1600,8 +1646,6 @@ function parse($TEXT, options) {
|
||||
return subscripts(array_(), allow_calls);
|
||||
case "{":
|
||||
return subscripts(object_or_object_destructuring_(), allow_calls);
|
||||
case "`":
|
||||
return subscripts(template_string(), allow_calls);
|
||||
}
|
||||
unexpected();
|
||||
}
|
||||
@@ -1619,6 +1663,9 @@ function parse($TEXT, options) {
|
||||
cls.end = prev();
|
||||
return subscripts(cls, allow_calls);
|
||||
}
|
||||
if (is("template_head")) {
|
||||
return subscripts(template_string(), allow_calls);
|
||||
}
|
||||
if (ATOMIC_START_TOKEN[S.token.type]) {
|
||||
return subscripts(as_atom_node(), allow_calls);
|
||||
}
|
||||
@@ -1626,28 +1673,29 @@ function parse($TEXT, options) {
|
||||
};
|
||||
|
||||
function template_string() {
|
||||
var tokenizer_S = S.input, start = S.token, segments = [], segment = "", ch;
|
||||
var segments = [], start = S.token;
|
||||
|
||||
while ((ch = tokenizer_S.next()) !== "`") {
|
||||
if (ch === "$" && tokenizer_S.peek() === "{") {
|
||||
segments.push(segment); segment = "";
|
||||
tokenizer_S.next();
|
||||
next();
|
||||
segments.push(expression());
|
||||
if (!is("punc", "}")) {
|
||||
// force error message
|
||||
expect("}");
|
||||
}
|
||||
continue;
|
||||
}
|
||||
segment += ch;
|
||||
if (ch === "\\") {
|
||||
segment += tokenizer_S.next();
|
||||
segments.push(new AST_TemplateSegment({
|
||||
start: S.token,
|
||||
raw: S.token.raw,
|
||||
value: S.token.value,
|
||||
end: S.token
|
||||
}));
|
||||
while (S.token.end === false) {
|
||||
next();
|
||||
segments.push(expression());
|
||||
|
||||
if (!is_token("template_substitution")) {
|
||||
unexpected();
|
||||
}
|
||||
|
||||
segments.push(new AST_TemplateSegment({
|
||||
start: S.token,
|
||||
raw: S.token.raw,
|
||||
value: S.token.value,
|
||||
end: S.token
|
||||
}));
|
||||
}
|
||||
|
||||
segments.push(segment);
|
||||
|
||||
next();
|
||||
|
||||
return new AST_TemplateString({
|
||||
@@ -2033,6 +2081,13 @@ function parse($TEXT, options) {
|
||||
end : prev()
|
||||
}), true);
|
||||
}
|
||||
if (is("template_head")) {
|
||||
return subscripts(new AST_PrefixedTemplateString({
|
||||
start: start,
|
||||
prefix: expr,
|
||||
template_string: template_string()
|
||||
}), allow_calls);
|
||||
}
|
||||
return expr;
|
||||
};
|
||||
|
||||
@@ -2189,13 +2244,6 @@ function parse($TEXT, options) {
|
||||
});
|
||||
return arrow_function(expr);
|
||||
}
|
||||
if ((expr instanceof AST_SymbolRef || expr instanceof AST_PropAccess) && is("punc", "`")) {
|
||||
return new AST_PrefixedTemplateString({
|
||||
start: start,
|
||||
prefix: expr,
|
||||
template_string: template_string()
|
||||
})
|
||||
}
|
||||
if (commas && is("punc", ",")) {
|
||||
next();
|
||||
return new AST_Seq({
|
||||
|
||||
Reference in New Issue
Block a user