improve performance through makePredicate() (#3048)
This commit is contained in:
@@ -435,7 +435,7 @@ merge(Compressor.prototype, {
|
||||
if (!d.escaped || d.escaped > depth) d.escaped = depth;
|
||||
return;
|
||||
} else if (parent instanceof AST_Array
|
||||
|| parent instanceof AST_Binary && lazy_op(parent.operator)
|
||||
|| parent instanceof AST_Binary && lazy_op[parent.operator]
|
||||
|| parent instanceof AST_Conditional && node !== parent.condition
|
||||
|| parent instanceof AST_Sequence && node === parent.tail_node()) {
|
||||
mark_escaped(tw, d, scope, parent, parent, level + 1, depth);
|
||||
@@ -489,7 +489,7 @@ merge(Compressor.prototype, {
|
||||
return true;
|
||||
});
|
||||
def(AST_Binary, function(tw) {
|
||||
if (!lazy_op(this.operator)) return;
|
||||
if (!lazy_op[this.operator]) return;
|
||||
this.left.walk(tw);
|
||||
push(tw);
|
||||
this.right.walk(tw);
|
||||
@@ -884,7 +884,7 @@ merge(Compressor.prototype, {
|
||||
var global_names = makePredicate("Array Boolean clearInterval clearTimeout console Date decodeURI decodeURIComponent encodeURI encodeURIComponent Error escape eval EvalError Function isFinite isNaN JSON Math Number parseFloat parseInt RangeError ReferenceError RegExp Object setInterval setTimeout String SyntaxError TypeError unescape URIError");
|
||||
AST_SymbolRef.DEFMETHOD("is_declared", function(compressor) {
|
||||
return !this.definition().undeclared
|
||||
|| compressor.option("unsafe") && global_names(this.name);
|
||||
|| compressor.option("unsafe") && global_names[this.name];
|
||||
});
|
||||
|
||||
var identifier_atom = makePredicate("Infinity NaN undefined");
|
||||
@@ -977,7 +977,7 @@ merge(Compressor.prototype, {
|
||||
}
|
||||
// Stop only if candidate is found within conditional branches
|
||||
if (!stop_if_hit
|
||||
&& (parent instanceof AST_Binary && lazy_op(parent.operator) && parent.left !== node
|
||||
&& (parent instanceof AST_Binary && lazy_op[parent.operator] && parent.left !== node
|
||||
|| parent instanceof AST_Conditional && parent.condition !== node
|
||||
|| parent instanceof AST_If && parent.condition !== node)) {
|
||||
stop_if_hit = parent;
|
||||
@@ -1285,7 +1285,7 @@ merge(Compressor.prototype, {
|
||||
return node;
|
||||
}
|
||||
if (parent instanceof AST_Binary) {
|
||||
if (write_only && (!lazy_op(parent.operator) || parent.left === node)) {
|
||||
if (write_only && (!lazy_op[parent.operator] || parent.left === node)) {
|
||||
return find_stop(parent, level + 1, write_only);
|
||||
}
|
||||
return node;
|
||||
@@ -2119,15 +2119,15 @@ merge(Compressor.prototype, {
|
||||
|
||||
// methods to determine whether an expression has a boolean result type
|
||||
(function(def){
|
||||
var unary_bool = [ "!", "delete" ];
|
||||
var binary_bool = [ "in", "instanceof", "==", "!=", "===", "!==", "<", "<=", ">=", ">" ];
|
||||
var unary_bool = makePredicate("! delete");
|
||||
var binary_bool = makePredicate("in instanceof == != === !== < <= >= >");
|
||||
def(AST_Node, return_false);
|
||||
def(AST_UnaryPrefix, function(){
|
||||
return member(this.operator, unary_bool);
|
||||
return unary_bool[this.operator];
|
||||
});
|
||||
def(AST_Binary, function(){
|
||||
return member(this.operator, binary_bool)
|
||||
|| lazy_op(this.operator)
|
||||
return binary_bool[this.operator]
|
||||
|| lazy_op[this.operator]
|
||||
&& this.left.is_boolean()
|
||||
&& this.right.is_boolean();
|
||||
});
|
||||
@@ -2152,16 +2152,16 @@ merge(Compressor.prototype, {
|
||||
def(AST_Number, return_true);
|
||||
var unary = makePredicate("+ - ~ ++ --");
|
||||
def(AST_Unary, function(){
|
||||
return unary(this.operator);
|
||||
return unary[this.operator];
|
||||
});
|
||||
var binary = makePredicate("- * / % & | ^ << >> >>>");
|
||||
def(AST_Binary, function(compressor){
|
||||
return binary(this.operator) || this.operator == "+"
|
||||
return binary[this.operator] || this.operator == "+"
|
||||
&& this.left.is_number(compressor)
|
||||
&& this.right.is_number(compressor);
|
||||
});
|
||||
def(AST_Assign, function(compressor){
|
||||
return binary(this.operator.slice(0, -1))
|
||||
return binary[this.operator.slice(0, -1)]
|
||||
|| this.operator == "=" && this.right.is_number(compressor);
|
||||
});
|
||||
def(AST_Sequence, function(compressor){
|
||||
@@ -2202,7 +2202,7 @@ merge(Compressor.prototype, {
|
||||
var unary_side_effects = makePredicate("delete ++ --");
|
||||
|
||||
function is_lhs(node, parent) {
|
||||
if (parent instanceof AST_Unary && unary_side_effects(parent.operator)) return parent.expression;
|
||||
if (parent instanceof AST_Unary && unary_side_effects[parent.operator]) return parent.expression;
|
||||
if (parent instanceof AST_Assign && parent.left === node) return node;
|
||||
}
|
||||
|
||||
@@ -2405,7 +2405,7 @@ merge(Compressor.prototype, {
|
||||
} else {
|
||||
return this instanceof AST_UnaryPrefix
|
||||
&& this.expression instanceof AST_Constant
|
||||
&& unaryPrefix(this.operator);
|
||||
&& unaryPrefix[this.operator];
|
||||
}
|
||||
});
|
||||
def(AST_Statement, function(){
|
||||
@@ -2475,7 +2475,7 @@ merge(Compressor.prototype, {
|
||||
&& e.fixed_value() instanceof AST_Lambda)) {
|
||||
return typeof function(){};
|
||||
}
|
||||
if (!non_converting_unary(this.operator)) depth++;
|
||||
if (!non_converting_unary[this.operator]) depth++;
|
||||
e = e._eval(compressor, cached, depth);
|
||||
if (e === this.expression) return this;
|
||||
switch (this.operator) {
|
||||
@@ -2494,7 +2494,7 @@ merge(Compressor.prototype, {
|
||||
});
|
||||
var non_converting_binary = makePredicate("&& || === !==");
|
||||
def(AST_Binary, function(compressor, cached, depth) {
|
||||
if (!non_converting_binary(this.operator)) depth++;
|
||||
if (!non_converting_binary[this.operator]) depth++;
|
||||
var left = this.left._eval(compressor, cached, depth);
|
||||
if (left === this.left) return this;
|
||||
var right = this.right._eval(compressor, cached, depth);
|
||||
@@ -2597,7 +2597,8 @@ merge(Compressor.prototype, {
|
||||
var exp = this.expression;
|
||||
var val;
|
||||
if (is_undeclared_ref(exp)) {
|
||||
if (!(static_values[exp.name] || return_false)(key)) return this;
|
||||
var static_value = static_values[exp.name];
|
||||
if (!static_value || !static_value[key]) return this;
|
||||
val = global_objs[exp.name];
|
||||
} else {
|
||||
val = exp._eval(compressor, cached, depth + 1);
|
||||
@@ -2626,11 +2627,14 @@ merge(Compressor.prototype, {
|
||||
var val;
|
||||
var e = exp.expression;
|
||||
if (is_undeclared_ref(e)) {
|
||||
if (!(static_fns[e.name] || return_false)(key)) return this;
|
||||
var static_fn = static_fns[e.name];
|
||||
if (!static_fn || !static_fn[key]) return this;
|
||||
val = global_objs[e.name];
|
||||
} else {
|
||||
val = e._eval(compressor, cached, depth + 1);
|
||||
if (val === e || !(val && native_fns[val.constructor.name] || return_false)(key)) return this;
|
||||
if (val === e || !val) return this;
|
||||
var native_fn = native_fns[val.constructor.name];
|
||||
if (!native_fn || !native_fn[key]) return this;
|
||||
}
|
||||
var args = [];
|
||||
for (var i = 0, len = this.args.length; i < len; i++) {
|
||||
@@ -2738,11 +2742,10 @@ merge(Compressor.prototype, {
|
||||
AST_Call.DEFMETHOD("is_expr_pure", function(compressor) {
|
||||
if (compressor.option("unsafe")) {
|
||||
var expr = this.expression;
|
||||
if (is_undeclared_ref(expr) && global_pure_fns(expr.name)) return true;
|
||||
if (expr instanceof AST_Dot
|
||||
&& is_undeclared_ref(expr.expression)
|
||||
&& (static_fns[expr.expression.name] || return_false)(expr.property)) {
|
||||
return true;
|
||||
if (is_undeclared_ref(expr) && global_pure_fns[expr.name]) return true;
|
||||
if (expr instanceof AST_Dot && is_undeclared_ref(expr.expression)) {
|
||||
var static_fn = static_fns[expr.expression.name];
|
||||
return static_fn && static_fn[expr.property];
|
||||
}
|
||||
}
|
||||
return this.pure || !compressor.pure_funcs(this);
|
||||
@@ -2751,21 +2754,21 @@ merge(Compressor.prototype, {
|
||||
AST_Dot.DEFMETHOD("is_call_pure", function(compressor) {
|
||||
if (!compressor.option("unsafe")) return;
|
||||
var expr = this.expression;
|
||||
var fns = return_false;
|
||||
var map;
|
||||
if (expr instanceof AST_Array) {
|
||||
fns = native_fns.Array;
|
||||
map = native_fns.Array;
|
||||
} else if (expr.is_boolean()) {
|
||||
fns = native_fns.Boolean;
|
||||
map = native_fns.Boolean;
|
||||
} else if (expr.is_number(compressor)) {
|
||||
fns = native_fns.Number;
|
||||
map = native_fns.Number;
|
||||
} else if (expr instanceof AST_RegExp) {
|
||||
fns = native_fns.RegExp;
|
||||
map = native_fns.RegExp;
|
||||
} else if (expr.is_string(compressor)) {
|
||||
fns = native_fns.String;
|
||||
map = native_fns.String;
|
||||
} else if (!this.may_throw_on_access(compressor)) {
|
||||
fns = native_fns.Object;
|
||||
map = native_fns.Object;
|
||||
}
|
||||
return fns(this.property);
|
||||
return map && map[this.property];
|
||||
});
|
||||
|
||||
// determine if expression has side effects
|
||||
@@ -2830,7 +2833,7 @@ merge(Compressor.prototype, {
|
||||
|| this.alternative.has_side_effects(compressor);
|
||||
});
|
||||
def(AST_Unary, function(compressor){
|
||||
return unary_side_effects(this.operator)
|
||||
return unary_side_effects[this.operator]
|
||||
|| this.expression.has_side_effects(compressor);
|
||||
});
|
||||
def(AST_SymbolRef, function(compressor){
|
||||
@@ -3720,7 +3723,7 @@ merge(Compressor.prototype, {
|
||||
def(AST_Binary, function(compressor, first_in_statement){
|
||||
var right = this.right.drop_side_effect_free(compressor);
|
||||
if (!right) return this.left.drop_side_effect_free(compressor, first_in_statement);
|
||||
if (lazy_op(this.operator)) {
|
||||
if (lazy_op[this.operator]) {
|
||||
if (right === this.right) return this;
|
||||
var node = this.clone();
|
||||
node.right = right;
|
||||
@@ -3765,7 +3768,7 @@ merge(Compressor.prototype, {
|
||||
return node;
|
||||
});
|
||||
def(AST_Unary, function(compressor, first_in_statement){
|
||||
if (unary_side_effects(this.operator)) {
|
||||
if (unary_side_effects[this.operator]) {
|
||||
this.write_only = !this.expression.has_side_effects(compressor);
|
||||
return this;
|
||||
}
|
||||
@@ -4668,7 +4671,7 @@ merge(Compressor.prototype, {
|
||||
if (arg.__unused) continue;
|
||||
if (!safe_to_inject
|
||||
|| catches[arg.name]
|
||||
|| identifier_atom(arg.name)
|
||||
|| identifier_atom[arg.name]
|
||||
|| scope.var_names()[arg.name]) {
|
||||
return false;
|
||||
}
|
||||
@@ -4686,7 +4689,7 @@ merge(Compressor.prototype, {
|
||||
for (var j = stat.definitions.length; --j >= 0;) {
|
||||
var name = stat.definitions[j].name;
|
||||
if (catches[name.name]
|
||||
|| identifier_atom(name.name)
|
||||
|| identifier_atom[name.name]
|
||||
|| scope.var_names()[name.name]) {
|
||||
return false;
|
||||
}
|
||||
@@ -5001,7 +5004,7 @@ merge(Compressor.prototype, {
|
||||
self.right = tmp;
|
||||
}
|
||||
}
|
||||
if (commutativeOperators(self.operator)) {
|
||||
if (commutativeOperators[self.operator]) {
|
||||
if (self.right.is_constant()
|
||||
&& !self.left.is_constant()) {
|
||||
// if right is a constant, whatever side effects the
|
||||
@@ -5394,7 +5397,7 @@ merge(Compressor.prototype, {
|
||||
// "x" + (y + "z")==> "x" + y + "z"
|
||||
if (self.right instanceof AST_Binary
|
||||
&& self.right.operator == self.operator
|
||||
&& (lazy_op(self.operator)
|
||||
&& (lazy_op[self.operator]
|
||||
|| (self.operator == "+"
|
||||
&& (self.right.left.is_string(compressor)
|
||||
|| (self.left.is_string(compressor)
|
||||
@@ -5645,8 +5648,8 @@ merge(Compressor.prototype, {
|
||||
return reachable;
|
||||
}
|
||||
|
||||
var ASSIGN_OPS = [ '+', '-', '/', '*', '%', '>>', '<<', '>>>', '|', '^', '&' ];
|
||||
var ASSIGN_OPS_COMMUTATIVE = [ '*', '|', '^', '&' ];
|
||||
var ASSIGN_OPS = makePredicate("+ - / * % >> << >>> | ^ &");
|
||||
var ASSIGN_OPS_COMMUTATIVE = makePredicate("* | ^ &");
|
||||
OPT(AST_Assign, function(self, compressor){
|
||||
var def;
|
||||
if (compressor.option("dead_code")
|
||||
@@ -5675,14 +5678,14 @@ merge(Compressor.prototype, {
|
||||
// x = expr1 OP expr2
|
||||
if (self.right.left instanceof AST_SymbolRef
|
||||
&& self.right.left.name == self.left.name
|
||||
&& member(self.right.operator, ASSIGN_OPS)) {
|
||||
&& ASSIGN_OPS[self.right.operator]) {
|
||||
// x = x - 2 ---> x -= 2
|
||||
self.operator = self.right.operator + "=";
|
||||
self.right = self.right.right;
|
||||
}
|
||||
else if (self.right.right instanceof AST_SymbolRef
|
||||
&& self.right.right.name == self.left.name
|
||||
&& member(self.right.operator, ASSIGN_OPS_COMMUTATIVE)
|
||||
&& ASSIGN_OPS_COMMUTATIVE[self.right.operator]
|
||||
&& !self.right.left.has_side_effects(compressor)) {
|
||||
// x = 2 & x ---> x &= 2
|
||||
self.operator = self.right.operator + "=";
|
||||
|
||||
@@ -280,7 +280,7 @@ function OutputStream(options) {
|
||||
might_need_semicolon = false;
|
||||
|
||||
if (prev == ":" && ch == "}" || (!ch || ";}".indexOf(ch) < 0) && prev != ";") {
|
||||
if (options.semicolons || requireSemicolonChars(ch)) {
|
||||
if (options.semicolons || requireSemicolonChars[ch]) {
|
||||
OUTPUT += ";";
|
||||
current_col++;
|
||||
current_pos++;
|
||||
@@ -1244,7 +1244,7 @@ function OutputStream(options) {
|
||||
var expr = self.expression;
|
||||
expr.print(output);
|
||||
var prop = self.property;
|
||||
if (output.option("ie8") && RESERVED_WORDS(prop)) {
|
||||
if (output.option("ie8") && RESERVED_WORDS[prop]) {
|
||||
output.print("[");
|
||||
output.add_mapping(self.end);
|
||||
output.print_string(prop);
|
||||
@@ -1356,7 +1356,7 @@ function OutputStream(options) {
|
||||
output.print_string(key);
|
||||
} else if ("" + +key == key && key >= 0) {
|
||||
output.print(make_num(key));
|
||||
} else if (RESERVED_WORDS(key) ? !output.option("ie8") : is_identifier_string(key)) {
|
||||
} else if (RESERVED_WORDS[key] ? !output.option("ie8") : is_identifier_string(key)) {
|
||||
if (quote && output.option("keep_quoted_props")) {
|
||||
output.print_string(key, quote);
|
||||
} else {
|
||||
|
||||
42
lib/parse.js
42
lib/parse.js
@@ -165,7 +165,7 @@ function is_unicode_connector_punctuation(ch) {
|
||||
};
|
||||
|
||||
function is_identifier(name) {
|
||||
return !RESERVED_WORDS(name) && /^[a-z_$][a-z0-9_$]*$/i.test(name);
|
||||
return !RESERVED_WORDS[name] && /^[a-z_$][a-z0-9_$]*$/i.test(name);
|
||||
};
|
||||
|
||||
function is_identifier_start(code) {
|
||||
@@ -245,7 +245,7 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
||||
var ch = S.text.charAt(S.pos++);
|
||||
if (signal_eof && !ch)
|
||||
throw EX_EOF;
|
||||
if (NEWLINE_CHARS(ch)) {
|
||||
if (NEWLINE_CHARS[ch]) {
|
||||
S.newline_before = S.newline_before || !in_string;
|
||||
++S.line;
|
||||
S.col = 0;
|
||||
@@ -272,7 +272,7 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
||||
var text = S.text;
|
||||
for (var i = S.pos, n = S.text.length; i < n; ++i) {
|
||||
var ch = text[i];
|
||||
if (NEWLINE_CHARS(ch))
|
||||
if (NEWLINE_CHARS[ch])
|
||||
return i;
|
||||
}
|
||||
return -1;
|
||||
@@ -292,9 +292,9 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
||||
|
||||
var prev_was_dot = false;
|
||||
function token(type, value, is_comment) {
|
||||
S.regex_allowed = ((type == "operator" && !UNARY_POSTFIX(value)) ||
|
||||
(type == "keyword" && KEYWORDS_BEFORE_EXPRESSION(value)) ||
|
||||
(type == "punc" && PUNC_BEFORE_EXPRESSION(value)));
|
||||
S.regex_allowed = ((type == "operator" && !UNARY_POSTFIX[value]) ||
|
||||
(type == "keyword" && KEYWORDS_BEFORE_EXPRESSION[value]) ||
|
||||
(type == "punc" && PUNC_BEFORE_EXPRESSION[value]));
|
||||
if (type == "punc" && value == ".") {
|
||||
prev_was_dot = true;
|
||||
} else if (!is_comment) {
|
||||
@@ -324,7 +324,7 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
||||
};
|
||||
|
||||
function skip_whitespace() {
|
||||
while (WHITESPACE_CHARS(peek()))
|
||||
while (WHITESPACE_CHARS[peek()])
|
||||
next();
|
||||
};
|
||||
|
||||
@@ -424,7 +424,7 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
||||
for (;;) {
|
||||
var ch = next(true, true);
|
||||
if (ch == "\\") ch = read_escaped_char(true);
|
||||
else if (NEWLINE_CHARS(ch)) parse_error("Unterminated string constant");
|
||||
else if (NEWLINE_CHARS[ch]) parse_error("Unterminated string constant");
|
||||
else if (ch == quote) break;
|
||||
ret += ch;
|
||||
}
|
||||
@@ -476,7 +476,7 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
||||
backslash = false;
|
||||
}
|
||||
}
|
||||
if (KEYWORDS(name) && escaped) {
|
||||
if (KEYWORDS[name] && escaped) {
|
||||
hex = name.charCodeAt(0).toString(16).toUpperCase();
|
||||
name = "\\u" + "0000".substr(hex.length) + hex + name.slice(1);
|
||||
}
|
||||
@@ -485,7 +485,7 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
||||
|
||||
var read_regexp = with_eof_error("Unterminated regular expression", function(source) {
|
||||
var prev_backslash = false, ch, in_class = false;
|
||||
while ((ch = next(true))) if (NEWLINE_CHARS(ch)) {
|
||||
while ((ch = next(true))) if (NEWLINE_CHARS[ch]) {
|
||||
parse_error("Unexpected line terminator");
|
||||
} else if (prev_backslash) {
|
||||
source += "\\" + ch;
|
||||
@@ -517,7 +517,7 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
||||
function grow(op) {
|
||||
if (!peek()) return op;
|
||||
var bigger = op + peek();
|
||||
if (OPERATORS(bigger)) {
|
||||
if (OPERATORS[bigger]) {
|
||||
next();
|
||||
return grow(bigger);
|
||||
} else {
|
||||
@@ -550,9 +550,9 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
||||
function read_word() {
|
||||
var word = read_name();
|
||||
if (prev_was_dot) return token("name", word);
|
||||
return KEYWORDS_ATOM(word) ? token("atom", word)
|
||||
: !KEYWORDS(word) ? token("name", word)
|
||||
: OPERATORS(word) ? token("operator", word)
|
||||
return KEYWORDS_ATOM[word] ? token("atom", word)
|
||||
: !KEYWORDS[word] ? token("name", word)
|
||||
: OPERATORS[word] ? token("operator", word)
|
||||
: token("keyword", word);
|
||||
};
|
||||
|
||||
@@ -603,8 +603,8 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
||||
}
|
||||
}
|
||||
if (is_digit(code)) return read_num();
|
||||
if (PUNC_CHARS(ch)) return token("punc", next());
|
||||
if (OPERATOR_CHARS(ch)) return read_operator();
|
||||
if (PUNC_CHARS[ch]) return token("punc", next());
|
||||
if (OPERATOR_CHARS[ch]) return read_operator();
|
||||
if (code == 92 || is_identifier_start(code)) return read_word();
|
||||
break;
|
||||
}
|
||||
@@ -1321,7 +1321,7 @@ function parse($TEXT, options) {
|
||||
func.end = prev();
|
||||
return subscripts(func, allow_calls);
|
||||
}
|
||||
if (ATOMIC_START_TOKEN(S.token.type)) {
|
||||
if (ATOMIC_START_TOKEN[S.token.type]) {
|
||||
return subscripts(as_atom_node(), allow_calls);
|
||||
}
|
||||
unexpected();
|
||||
@@ -1406,7 +1406,7 @@ function parse($TEXT, options) {
|
||||
var tmp = S.token;
|
||||
switch (tmp.type) {
|
||||
case "operator":
|
||||
if (!KEYWORDS(tmp.value)) unexpected();
|
||||
if (!KEYWORDS[tmp.value]) unexpected();
|
||||
case "num":
|
||||
case "string":
|
||||
case "name":
|
||||
@@ -1504,7 +1504,7 @@ function parse($TEXT, options) {
|
||||
|
||||
var maybe_unary = function(allow_calls) {
|
||||
var start = S.token;
|
||||
if (is("operator") && UNARY_PREFIX(start.value)) {
|
||||
if (is("operator") && UNARY_PREFIX[start.value]) {
|
||||
next();
|
||||
handle_regexp();
|
||||
var ex = make_unary(AST_UnaryPrefix, start, maybe_unary(allow_calls));
|
||||
@@ -1513,7 +1513,7 @@ function parse($TEXT, options) {
|
||||
return ex;
|
||||
}
|
||||
var val = expr_atom(allow_calls);
|
||||
while (is("operator") && UNARY_POSTFIX(S.token.value) && !has_newline_before(S.token)) {
|
||||
while (is("operator") && UNARY_POSTFIX[S.token.value] && !has_newline_before(S.token)) {
|
||||
val = make_unary(AST_UnaryPostfix, S.token, val);
|
||||
val.start = start;
|
||||
val.end = S.token;
|
||||
@@ -1585,7 +1585,7 @@ function parse($TEXT, options) {
|
||||
var maybe_assign = function(no_in) {
|
||||
var start = S.token;
|
||||
var left = maybe_conditional(no_in), val = S.token.value;
|
||||
if (is("operator") && ASSIGNMENT(val)) {
|
||||
if (is("operator") && ASSIGNMENT[val]) {
|
||||
if (is_assignable(left)) {
|
||||
next();
|
||||
return new AST_Assign({
|
||||
|
||||
@@ -344,7 +344,7 @@ function next_mangled_name(scope, options, def) {
|
||||
}
|
||||
while (true) {
|
||||
name = base54(++scope.cname);
|
||||
if (in_use[name] || !is_identifier(name) || member(name, options.reserved)) continue;
|
||||
if (in_use[name] || !is_identifier(name) || options._reserved[name]) continue;
|
||||
if (!names[name]) break;
|
||||
holes.push(scope.cname);
|
||||
}
|
||||
@@ -387,6 +387,7 @@ function _default_mangler_options(options) {
|
||||
if (!Array.isArray(options.reserved)) options.reserved = [];
|
||||
// Never mangle arguments
|
||||
push_uniq(options.reserved, "arguments");
|
||||
options._reserved = makePredicate(options.reserved);
|
||||
return options;
|
||||
}
|
||||
|
||||
@@ -452,10 +453,9 @@ AST_Toplevel.DEFMETHOD("mangle_names", function(options){
|
||||
redefined.forEach(mangle);
|
||||
|
||||
function mangle(def) {
|
||||
if (!member(def.name, options.reserved)) {
|
||||
if (options._reserved[def.name]) return;
|
||||
def.mangle(options);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
AST_Toplevel.DEFMETHOD("find_colliding_names", function(options) {
|
||||
@@ -504,7 +504,7 @@ AST_Toplevel.DEFMETHOD("expand_names", function(options) {
|
||||
function rename(def) {
|
||||
if (def.global && options.cache) return;
|
||||
if (def.unmangleable(options)) return;
|
||||
if (member(def.name, options.reserved)) return;
|
||||
if (options._reserved[def.name]) return;
|
||||
var d = def.redefined();
|
||||
def.name = d ? d.name : next_name();
|
||||
def.orig.forEach(function(sym) {
|
||||
|
||||
49
lib/utils.js
49
lib/utils.js
@@ -145,7 +145,7 @@ var MAP = (function(){
|
||||
}
|
||||
return is_last;
|
||||
};
|
||||
if (a instanceof Array) {
|
||||
if (Array.isArray(a)) {
|
||||
if (backwards) {
|
||||
for (i = a.length; --i >= 0;) if (doit()) break;
|
||||
ret.reverse();
|
||||
@@ -210,51 +210,14 @@ function mergeSort(array, cmp) {
|
||||
return _ms(array);
|
||||
};
|
||||
|
||||
// this function is taken from Acorn [1], written by Marijn Haverbeke
|
||||
// [1] https://github.com/marijnh/acorn
|
||||
function makePredicate(words) {
|
||||
if (!(words instanceof Array)) words = words.split(" ");
|
||||
var f = "", cats = [];
|
||||
out: for (var i = 0; i < words.length; ++i) {
|
||||
for (var j = 0; j < cats.length; ++j)
|
||||
if (cats[j][0].length == words[i].length) {
|
||||
cats[j].push(words[i]);
|
||||
continue out;
|
||||
}
|
||||
cats.push([words[i]]);
|
||||
}
|
||||
function quote(word) {
|
||||
return JSON.stringify(word).replace(/[\u2028\u2029]/g, function(s) {
|
||||
switch (s) {
|
||||
case "\u2028": return "\\u2028";
|
||||
case "\u2029": return "\\u2029";
|
||||
}
|
||||
return s;
|
||||
if (!Array.isArray(words)) words = words.split(" ");
|
||||
var map = Object.create(null);
|
||||
words.forEach(function(word) {
|
||||
map[word] = true;
|
||||
});
|
||||
return map;
|
||||
}
|
||||
function compareTo(arr) {
|
||||
if (arr.length == 1) return f += "return str === " + quote(arr[0]) + ";";
|
||||
f += "switch(str){";
|
||||
for (var i = 0; i < arr.length; ++i) f += "case " + quote(arr[i]) + ":";
|
||||
f += "return true}return false;";
|
||||
}
|
||||
// When there are more than three length categories, an outer
|
||||
// switch first dispatches on the lengths, to save on comparisons.
|
||||
if (cats.length > 3) {
|
||||
cats.sort(function(a, b) {return b.length - a.length;});
|
||||
f += "switch(str.length){";
|
||||
for (var i = 0; i < cats.length; ++i) {
|
||||
var cat = cats[i];
|
||||
f += "case " + cat[0].length + ":";
|
||||
compareTo(cat);
|
||||
}
|
||||
f += "}";
|
||||
// Otherwise, simply generate a flat `switch` statement.
|
||||
} else {
|
||||
compareTo(words);
|
||||
}
|
||||
return new Function("str", f);
|
||||
};
|
||||
|
||||
function all(array, predicate) {
|
||||
for (var i = array.length; --i >= 0;)
|
||||
|
||||
Reference in New Issue
Block a user