minor tweaks (#3502)
This commit is contained in:
@@ -91,13 +91,11 @@ function OutputStream(options) {
|
|||||||
comment_filter = function(comment) {
|
comment_filter = function(comment) {
|
||||||
return comment.type != "comment5" && comments.test(comment.value);
|
return comment.type != "comment5" && comments.test(comment.value);
|
||||||
};
|
};
|
||||||
}
|
} else if (typeof comments === "function") {
|
||||||
else if (typeof comments === "function") {
|
|
||||||
comment_filter = function(comment) {
|
comment_filter = function(comment) {
|
||||||
return comment.type != "comment5" && comments(this, comment);
|
return comment.type != "comment5" && comments(this, comment);
|
||||||
};
|
};
|
||||||
}
|
} else if (comments === "some") {
|
||||||
else if (comments === "some") {
|
|
||||||
comment_filter = is_some_comments;
|
comment_filter = is_some_comments;
|
||||||
} else { // NOTE includes "all" option
|
} else { // NOTE includes "all" option
|
||||||
comment_filter = return_true;
|
comment_filter = return_true;
|
||||||
@@ -643,8 +641,7 @@ function OutputStream(options) {
|
|||||||
var self = this, generator = self._codegen;
|
var self = this, generator = self._codegen;
|
||||||
if (self instanceof AST_Scope) {
|
if (self instanceof AST_Scope) {
|
||||||
active_scope = self;
|
active_scope = self;
|
||||||
}
|
} else if (!use_asm && self instanceof AST_Directive && self.value == "use asm") {
|
||||||
else if (!use_asm && self instanceof AST_Directive && self.value == "use asm") {
|
|
||||||
use_asm = active_scope;
|
use_asm = active_scope;
|
||||||
}
|
}
|
||||||
function doit() {
|
function doit() {
|
||||||
@@ -1043,11 +1040,9 @@ function OutputStream(options) {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
b = b.alternative;
|
b = b.alternative;
|
||||||
}
|
} else if (b instanceof AST_StatementWithBody) {
|
||||||
else if (b instanceof AST_StatementWithBody) {
|
|
||||||
b = b.body;
|
b = b.body;
|
||||||
}
|
} else break;
|
||||||
else break;
|
|
||||||
}
|
}
|
||||||
force_statement(self.body, output);
|
force_statement(self.body, output);
|
||||||
}
|
}
|
||||||
|
|||||||
85
lib/parse.js
85
lib/parse.js
@@ -234,6 +234,7 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
|||||||
directives : {},
|
directives : {},
|
||||||
directive_stack : []
|
directive_stack : []
|
||||||
};
|
};
|
||||||
|
var prev_was_dot = false;
|
||||||
|
|
||||||
function peek() {
|
function peek() {
|
||||||
return S.text.charAt(S.pos);
|
return S.text.charAt(S.pos);
|
||||||
@@ -286,16 +287,12 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
|||||||
S.tokpos = S.pos;
|
S.tokpos = S.pos;
|
||||||
}
|
}
|
||||||
|
|
||||||
var prev_was_dot = false;
|
|
||||||
function token(type, value, is_comment) {
|
function token(type, value, is_comment) {
|
||||||
S.regex_allowed = ((type == "operator" && !UNARY_POSTFIX[value]) ||
|
S.regex_allowed = type == "operator" && !UNARY_POSTFIX[value]
|
||||||
(type == "keyword" && KEYWORDS_BEFORE_EXPRESSION[value]) ||
|
|| type == "keyword" && KEYWORDS_BEFORE_EXPRESSION[value]
|
||||||
(type == "punc" && PUNC_BEFORE_EXPRESSION[value]));
|
|| type == "punc" && PUNC_BEFORE_EXPRESSION[value];
|
||||||
if (type == "punc" && value == ".") {
|
if (type == "punc" && value == ".") prev_was_dot = true;
|
||||||
prev_was_dot = true;
|
else if (!is_comment) prev_was_dot = false;
|
||||||
} else if (!is_comment) {
|
|
||||||
prev_was_dot = false;
|
|
||||||
}
|
|
||||||
var ret = {
|
var ret = {
|
||||||
type : type,
|
type : type,
|
||||||
value : value,
|
value : value,
|
||||||
@@ -358,12 +355,9 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
|||||||
parse_error("Legacy octal literals are not allowed in strict mode");
|
parse_error("Legacy octal literals are not allowed in strict mode");
|
||||||
}
|
}
|
||||||
var valid = parse_js_number(num);
|
var valid = parse_js_number(num);
|
||||||
if (!isNaN(valid)) {
|
if (!isNaN(valid)) return token("num", valid);
|
||||||
return token("num", valid);
|
|
||||||
} else {
|
|
||||||
parse_error("Invalid syntax: " + num);
|
parse_error("Invalid syntax: " + num);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
function read_escaped_char(in_string) {
|
function read_escaped_char(in_string) {
|
||||||
var ch = next(true, in_string);
|
var ch = next(true, in_string);
|
||||||
@@ -463,8 +457,7 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
|||||||
if (ch == "\\") escaped = backslash = true, next();
|
if (ch == "\\") escaped = backslash = true, next();
|
||||||
else if (is_identifier_char(ch)) name += next();
|
else if (is_identifier_char(ch)) name += next();
|
||||||
else break;
|
else break;
|
||||||
}
|
} else {
|
||||||
else {
|
|
||||||
if (ch != "u") parse_error("Expecting UnicodeEscapeSequence -- uXXXX");
|
if (ch != "u") parse_error("Expecting UnicodeEscapeSequence -- uXXXX");
|
||||||
ch = read_escaped_char();
|
ch = read_escaped_char();
|
||||||
if (!is_identifier_char(ch)) parse_error("Unicode char: " + ch.charCodeAt(0) + " is not valid in identifier");
|
if (!is_identifier_char(ch)) parse_error("Unicode char: " + ch.charCodeAt(0) + " is not valid in identifier");
|
||||||
@@ -538,9 +531,7 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
|||||||
|
|
||||||
function handle_dot() {
|
function handle_dot() {
|
||||||
next();
|
next();
|
||||||
return is_digit(peek().charCodeAt(0))
|
return is_digit(peek().charCodeAt(0)) ? read_num(".") : token("punc", ".");
|
||||||
? read_num(".")
|
|
||||||
: token("punc", ".");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function read_word() {
|
function read_word() {
|
||||||
@@ -592,12 +583,11 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
|||||||
switch (code) {
|
switch (code) {
|
||||||
case 34: case 39: return read_string(ch);
|
case 34: case 39: return read_string(ch);
|
||||||
case 46: return handle_dot();
|
case 46: return handle_dot();
|
||||||
case 47: {
|
case 47:
|
||||||
var tok = handle_slash();
|
var tok = handle_slash();
|
||||||
if (tok === next_token) continue;
|
if (tok === next_token) continue;
|
||||||
return tok;
|
return tok;
|
||||||
}
|
}
|
||||||
}
|
|
||||||
if (is_digit(code)) return read_num();
|
if (is_digit(code)) return read_num();
|
||||||
if (PUNC_CHARS[ch]) return token("punc", next());
|
if (PUNC_CHARS[ch]) return token("punc", next());
|
||||||
if (OPERATOR_CHARS[ch]) return read_operator();
|
if (OPERATOR_CHARS[ch]) return read_operator();
|
||||||
@@ -614,12 +604,8 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
|||||||
|
|
||||||
next_token.add_directive = function(directive) {
|
next_token.add_directive = function(directive) {
|
||||||
S.directive_stack[S.directive_stack.length - 1].push(directive);
|
S.directive_stack[S.directive_stack.length - 1].push(directive);
|
||||||
|
if (S.directives[directive]) S.directives[directive]++;
|
||||||
if (S.directives[directive] === undefined) {
|
else S.directives[directive] = 1;
|
||||||
S.directives[directive] = 1;
|
|
||||||
} else {
|
|
||||||
S.directives[directive]++;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
next_token.push_directives_stack = function() {
|
next_token.push_directives_stack = function() {
|
||||||
@@ -627,13 +613,10 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
next_token.pop_directives_stack = function() {
|
next_token.pop_directives_stack = function() {
|
||||||
var directives = S.directive_stack[S.directive_stack.length - 1];
|
var directives = S.directive_stack.pop();
|
||||||
|
for (var i = directives.length; --i >= 0;) {
|
||||||
for (var i = 0; i < directives.length; i++) {
|
|
||||||
S.directives[directives[i]]--;
|
S.directives[directives[i]]--;
|
||||||
}
|
}
|
||||||
|
|
||||||
S.directive_stack.pop();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
next_token.has_directive = function(directive) {
|
next_token.has_directive = function(directive) {
|
||||||
@@ -645,27 +628,17 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
|||||||
|
|
||||||
/* -----[ Parser (constants) ]----- */
|
/* -----[ Parser (constants) ]----- */
|
||||||
|
|
||||||
var UNARY_PREFIX = makePredicate([
|
var UNARY_PREFIX = makePredicate("typeof void delete -- ++ ! ~ - +");
|
||||||
"typeof",
|
|
||||||
"void",
|
|
||||||
"delete",
|
|
||||||
"--",
|
|
||||||
"++",
|
|
||||||
"!",
|
|
||||||
"~",
|
|
||||||
"-",
|
|
||||||
"+"
|
|
||||||
]);
|
|
||||||
|
|
||||||
var UNARY_POSTFIX = makePredicate([ "--", "++" ]);
|
var UNARY_POSTFIX = makePredicate("-- ++");
|
||||||
|
|
||||||
var ASSIGNMENT = makePredicate([ "=", "+=", "-=", "/=", "*=", "%=", ">>=", "<<=", ">>>=", "|=", "^=", "&=" ]);
|
var ASSIGNMENT = makePredicate("= += -= /= *= %= >>= <<= >>>= |= ^= &=");
|
||||||
|
|
||||||
var PRECEDENCE = function(a, ret) {
|
var PRECEDENCE = function(a, ret) {
|
||||||
for (var i = 0; i < a.length; ++i) {
|
for (var i = 0; i < a.length;) {
|
||||||
var b = a[i];
|
var b = a[i++];
|
||||||
for (var j = 0; j < b.length; ++j) {
|
for (var j = 0; j < b.length; j++) {
|
||||||
ret[b[j]] = i + 1;
|
ret[b[j]] = i;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return ret;
|
return ret;
|
||||||
@@ -682,7 +655,7 @@ var PRECEDENCE = function(a, ret) {
|
|||||||
["*", "/", "%"]
|
["*", "/", "%"]
|
||||||
], {});
|
], {});
|
||||||
|
|
||||||
var ATOMIC_START_TOKEN = makePredicate([ "atom", "num", "string", "regexp", "name" ]);
|
var ATOMIC_START_TOKEN = makePredicate("atom num string regexp name");
|
||||||
|
|
||||||
/* -----[ Parser ]----- */
|
/* -----[ Parser ]----- */
|
||||||
|
|
||||||
@@ -698,10 +671,9 @@ function parse($TEXT, options) {
|
|||||||
}, true);
|
}, true);
|
||||||
|
|
||||||
var S = {
|
var S = {
|
||||||
input : (typeof $TEXT == "string"
|
input : typeof $TEXT == "string"
|
||||||
? tokenizer($TEXT, options.filename,
|
? tokenizer($TEXT, options.filename, options.html5_comments, options.shebang)
|
||||||
options.html5_comments, options.shebang)
|
: $TEXT,
|
||||||
: $TEXT),
|
|
||||||
token : null,
|
token : null,
|
||||||
prev : null,
|
prev : null,
|
||||||
peeked : null,
|
peeked : null,
|
||||||
@@ -757,15 +729,12 @@ function parse($TEXT, options) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function unexpected(token) {
|
function unexpected(token) {
|
||||||
if (token == null)
|
if (token == null) token = S.token;
|
||||||
token = S.token;
|
|
||||||
token_error(token, "Unexpected token: " + token_to_string(token.type, token.value));
|
token_error(token, "Unexpected token: " + token_to_string(token.type, token.value));
|
||||||
}
|
}
|
||||||
|
|
||||||
function expect_token(type, val) {
|
function expect_token(type, val) {
|
||||||
if (is(type, val)) {
|
if (is(type, val)) return next();
|
||||||
return next();
|
|
||||||
}
|
|
||||||
token_error(S.token, "Unexpected token: " + token_to_string(S.token.type, S.token.value) + ", expected: " + token_to_string(type, val));
|
token_error(S.token, "Unexpected token: " + token_to_string(S.token.type, S.token.value) + ", expected: " + token_to_string(type, val));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -127,8 +127,7 @@ var MAP = (function() {
|
|||||||
} else {
|
} else {
|
||||||
top.push(val);
|
top.push(val);
|
||||||
}
|
}
|
||||||
}
|
} else if (val !== skip) {
|
||||||
else if (val !== skip) {
|
|
||||||
if (val instanceof Splice) {
|
if (val instanceof Splice) {
|
||||||
ret.push.apply(ret, backwards ? val.v.slice().reverse() : val.v);
|
ret.push.apply(ret, backwards ? val.v.slice().reverse() : val.v);
|
||||||
} else {
|
} else {
|
||||||
@@ -145,8 +144,7 @@ var MAP = (function() {
|
|||||||
} else {
|
} else {
|
||||||
for (i = 0; i < a.length; ++i) if (doit()) break;
|
for (i = 0; i < a.length; ++i) if (doit()) break;
|
||||||
}
|
}
|
||||||
}
|
} else {
|
||||||
else {
|
|
||||||
for (i in a) if (HOP(a, i)) if (doit()) break;
|
for (i in a) if (HOP(a, i)) if (doit()) break;
|
||||||
}
|
}
|
||||||
return top.concat(ret);
|
return top.concat(ret);
|
||||||
|
|||||||
@@ -4947,7 +4947,7 @@ defun_single_use_loop: {
|
|||||||
unused: true,
|
unused: true,
|
||||||
}
|
}
|
||||||
input: {
|
input: {
|
||||||
for (var x, i = 2; --i >= 0; ) {
|
for (var x, i = 2; --i >= 0;) {
|
||||||
var y = x;
|
var y = x;
|
||||||
x = f;
|
x = f;
|
||||||
console.log(x === y);
|
console.log(x === y);
|
||||||
@@ -4955,7 +4955,7 @@ defun_single_use_loop: {
|
|||||||
function f() {};
|
function f() {};
|
||||||
}
|
}
|
||||||
expect: {
|
expect: {
|
||||||
for (var x, i = 2; --i >= 0; ) {
|
for (var x, i = 2; --i >= 0;) {
|
||||||
var y = x;
|
var y = x;
|
||||||
x = f;
|
x = f;
|
||||||
console.log(x === y);
|
console.log(x === y);
|
||||||
|
|||||||
Reference in New Issue
Block a user