Compare commits

...

24 Commits

Author SHA1 Message Date
Alex Lam S.L
59eecb6bf5 v3.3.22 2018-04-20 19:50:16 +00:00
Alex Lam S.L
d83c6490ab fix corner case in strip_func_ids() (#3090) 2018-04-19 04:51:42 +08:00
Alex Lam S.L
7362f57966 improve performance when handling unused variables in collapse_vars (#3084)
fixes #3082
2018-04-15 12:38:31 +08:00
Alex Lam S.L
eaa2c1f6af v3.3.21 2018-04-12 07:08:53 +00:00
Alex Lam S.L
6a916523d4 fix inline of catch-scoped variables (#3077)
fixes #3076
2018-04-11 15:44:43 +08:00
Alex Lam S.L
ba7069d52b suppress hoist_props for embedded assignments (#3074) 2018-04-11 05:19:16 +08:00
Alex Lam S.L
4dd7d0e39b extend hoist_props (#3073)
- handle `AST_Assign` the same way as `AST_VarDef`
- inject `AST_Var` as succeeding statement

fixes #3071
2018-04-11 02:48:15 +08:00
Alex Lam S.L
90199d0a96 extend join_vars on object assignments (#3072) 2018-04-11 01:35:42 +08:00
Alex Lam S.L
b82fd0ad41 handle flow control in loops with reduce_vars (#3069)
fixes #3068
2018-04-10 06:51:03 +08:00
Alex Lam S.L
183da16896 handle pure_funcs under inline & reduce_vars correctly (#3066)
fixes #3065
2018-04-10 02:46:38 +08:00
Alex Lam S.L
87857b0f1b v3.3.20 2018-04-08 03:06:15 +08:00
Alex Lam S.L
e5f6a88233 fix corner case in reuse of mangle options (#3062) 2018-04-08 02:29:37 +08:00
Alex Lam S.L
8d0b00317e v3.3.19 2018-04-07 22:27:55 +08:00
Alex Lam S.L
db49daf365 mangle Object.defineProperty() (#3059)
fixes #869
2018-04-06 17:10:36 +08:00
Alex Lam S.L
923deeff35 support inline source map from multiple files (#3058)
fixes #145
2018-04-06 16:04:15 +08:00
Alex Lam S.L
0b62a28b47 improve usability of includeSources (#3057)
Exclude source contents from input source map if `includeSources=false`

fixes #3041
2018-04-06 13:32:26 +08:00
Alex Lam S.L
44116c6d2b fix AST corruption during inline of simple return (#3056)
fixes #3054
2018-04-06 05:39:07 +08:00
Alex Lam S.L
b5bab254ce speed up has_parens() (take 2) (#3052)
fixes #3050
2018-04-05 04:12:04 +08:00
Alex Lam S.L
81603ecd15 improve performance through makePredicate() (#3048) 2018-04-03 15:15:01 +08:00
Alex Lam S.L
e67553fa55 fix tree traversal on AST_Do (#3047)
fixes #3046
2018-04-02 22:31:23 +08:00
Alex Lam S.L
fcf542f262 v3.3.18 2018-04-02 04:26:28 +00:00
b-fuze
8adfc29f91 Don't load source map until the JS source is fully received (#3040) 2018-03-31 20:26:40 +09:00
Alex Lam S.L
02f47e1713 give sensible error against invalid input source map (#3044) 2018-03-31 18:48:20 +09:00
Alex Lam S.L
07f64d4050 fix escape analysis on AST_New (#3043)
fixes #3042
2018-03-31 15:03:46 +09:00
25 changed files with 1017 additions and 294 deletions

View File

@@ -685,7 +685,8 @@ If you're using the `X-SourceMap` header instead, you can just omit `sourceMap.u
pass `pure_funcs: [ 'Math.floor' ]` to let it know that this
function won't produce any side effect, in which case the whole
statement would get discarded. The current implementation adds some
overhead (compression will be slower).
overhead (compression will be slower). Make sure symbols under `pure_funcs`
are also under `mangle.reserved` to avoid mangling.
- `pure_getters` (default: `"strict"`) -- If you pass `true` for
this, UglifyJS will assume that object property access

View File

@@ -46,7 +46,7 @@ program.option("--name-cache <file>", "File to hold mangled name mappings.");
program.option("--rename", "Force symbol expansion.");
program.option("--no-rename", "Disable symbol expansion.");
program.option("--self", "Build UglifyJS as a library (implies --wrap UglifyJS)");
program.option("--source-map [options]", "Enable source map/specify source map options.", parse_source_map());
program.option("--source-map [options]", "Enable source map/specify source map options.", parse_js());
program.option("--timings", "Display operations run time on STDERR.")
program.option("--toplevel", "Compress and/or mangle variables in toplevel scope.");
program.option("--verbose", "Print diagnostic messages.");
@@ -176,6 +176,11 @@ function run() {
UglifyJS.AST_Node.warn_function = function(msg) {
print_error("WARN: " + msg);
};
var content = program.sourceMap && program.sourceMap.content;
if (content && content != "inline") {
print_error("INFO: Using input source map: " + content);
options.sourceMap.content = read_file(content, content);
}
if (program.timings) options.timings = true;
try {
if (program.parse) {
@@ -377,19 +382,6 @@ function parse_js(flag) {
}
}
function parse_source_map() {
var parse = parse_js();
return function(value, options) {
var hasContent = options && "content" in options;
var settings = parse(value, options);
if (!hasContent && settings.content && settings.content != "inline") {
print_error("INFO: Using input source map: " + settings.content);
settings.content = read_file(settings.content, settings.content);
}
return settings;
}
}
function skip_key(key) {
return skip_keys.indexOf(key) >= 0;
}

View File

@@ -428,14 +428,14 @@ merge(Compressor.prototype, {
var parent = tw.parent(level);
if (value && value.is_constant()) return;
if (parent instanceof AST_Assign && parent.operator == "=" && node === parent.right
|| parent instanceof AST_Call && node !== parent.expression
|| parent instanceof AST_Call && (node !== parent.expression || parent instanceof AST_New)
|| parent instanceof AST_Exit && node === parent.value && node.scope !== d.scope
|| parent instanceof AST_VarDef && node === parent.value) {
if (depth > 1 && !(value && value.is_constant_expression(scope))) depth = 1;
if (!d.escaped || d.escaped > depth) d.escaped = depth;
return;
} else if (parent instanceof AST_Array
|| parent instanceof AST_Binary && lazy_op(parent.operator)
|| parent instanceof AST_Binary && lazy_op[parent.operator]
|| parent instanceof AST_Conditional && node !== parent.condition
|| parent instanceof AST_Sequence && node === parent.tail_node()) {
mark_escaped(tw, d, scope, parent, parent, level + 1, depth);
@@ -489,7 +489,7 @@ merge(Compressor.prototype, {
return true;
});
def(AST_Binary, function(tw) {
if (!lazy_op(this.operator)) return;
if (!lazy_op[this.operator]) return;
this.left.walk(tw);
push(tw);
this.right.walk(tw);
@@ -535,6 +535,10 @@ merge(Compressor.prototype, {
tw.in_loop = this;
push(tw);
this.body.walk(tw);
if (has_break_or_continue(this)) {
pop(tw);
push(tw);
}
this.condition.walk(tw);
pop(tw);
tw.in_loop = saved_loop;
@@ -544,19 +548,17 @@ merge(Compressor.prototype, {
if (this.init) this.init.walk(tw);
var saved_loop = tw.in_loop;
tw.in_loop = this;
if (this.condition) {
push(tw);
this.condition.walk(tw);
pop(tw);
}
push(tw);
if (this.condition) this.condition.walk(tw);
this.body.walk(tw);
pop(tw);
if (this.step) {
push(tw);
if (has_break_or_continue(this)) {
pop(tw);
push(tw);
}
this.step.walk(tw);
pop(tw);
}
pop(tw);
tw.in_loop = saved_loop;
return true;
});
@@ -714,12 +716,11 @@ merge(Compressor.prototype, {
}
}
});
def(AST_While, function(tw) {
def(AST_While, function(tw, descend) {
var saved_loop = tw.in_loop;
tw.in_loop = this;
push(tw);
this.condition.walk(tw);
this.body.walk(tw);
descend();
pop(tw);
tw.in_loop = saved_loop;
return true;
@@ -895,7 +896,7 @@ merge(Compressor.prototype, {
var global_names = makePredicate("Array Boolean clearInterval clearTimeout console Date decodeURI decodeURIComponent encodeURI encodeURIComponent Error escape eval EvalError Function isFinite isNaN JSON Math Number parseFloat parseInt RangeError ReferenceError RegExp Object setInterval setTimeout String SyntaxError TypeError unescape URIError");
AST_SymbolRef.DEFMETHOD("is_declared", function(compressor) {
return !this.definition().undeclared
|| compressor.option("unsafe") && global_names(this.name);
|| compressor.option("unsafe") && global_names[this.name];
});
var identifier_atom = makePredicate("Infinity NaN undefined");
@@ -988,7 +989,7 @@ merge(Compressor.prototype, {
}
// Stop only if candidate is found within conditional branches
if (!stop_if_hit
&& (parent instanceof AST_Binary && lazy_op(parent.operator) && parent.left !== node
&& (parent instanceof AST_Binary && lazy_op[parent.operator] && parent.left !== node
|| parent instanceof AST_Conditional && parent.condition !== node
|| parent instanceof AST_If && parent.condition !== node)) {
stop_if_hit = parent;
@@ -1296,7 +1297,7 @@ merge(Compressor.prototype, {
return node;
}
if (parent instanceof AST_Binary) {
if (write_only && (!lazy_op(parent.operator) || parent.left === node)) {
if (write_only && (!lazy_op[parent.operator] || parent.left === node)) {
return find_stop(parent, level + 1, write_only);
}
return node;
@@ -1347,8 +1348,9 @@ merge(Compressor.prototype, {
if (expr instanceof AST_VarDef) {
var def = expr.name.definition();
if (!member(expr.name, def.orig)) return;
var declared = def.orig.length - def.eliminated;
var referenced = def.references.length - def.replaced;
if (!referenced) return;
var declared = def.orig.length - def.eliminated;
if (declared > 1 && !(expr.name instanceof AST_SymbolFunarg)
|| (referenced > 1 ? mangleable_var(expr) : !compressor.exposed(def))) {
return make_node(AST_SymbolRef, expr.name, expr.name);
@@ -1881,9 +1883,6 @@ merge(Compressor.prototype, {
}
function join_object_assignments(defn, body) {
if (!(defn instanceof AST_Definitions)) return;
var def = defn.definitions[defn.definitions.length - 1];
if (!(def.value instanceof AST_Object)) return;
var exprs;
if (body instanceof AST_Assign) {
exprs = [ body ];
@@ -1891,6 +1890,23 @@ merge(Compressor.prototype, {
exprs = body.expressions.slice();
}
if (!exprs) return;
if (defn instanceof AST_Definitions) {
var def = defn.definitions[defn.definitions.length - 1];
if (trim_object_assignments(def.name, def.value, exprs)) return exprs;
}
for (var i = exprs.length - 1; --i >= 0;) {
var expr = exprs[i];
if (!(expr instanceof AST_Assign)) continue;
if (expr.operator != "=") continue;
if (!(expr.left instanceof AST_SymbolRef)) continue;
var tail = exprs.slice(i + 1);
if (!trim_object_assignments(expr.left, expr.right, tail)) continue;
return exprs.slice(0, i + 1).concat(tail);
}
}
function trim_object_assignments(name, value, exprs) {
if (!(value instanceof AST_Object)) return;
var trimmed = false;
do {
var node = exprs[0];
@@ -1899,7 +1915,7 @@ merge(Compressor.prototype, {
if (!(node.left instanceof AST_PropAccess)) break;
var sym = node.left.expression;
if (!(sym instanceof AST_SymbolRef)) break;
if (def.name.name != sym.name) break;
if (name.name != sym.name) break;
if (!node.right.is_constant_expression(scope)) break;
var prop = node.left.property;
if (prop instanceof AST_Node) {
@@ -1912,15 +1928,15 @@ merge(Compressor.prototype, {
} : function(node) {
return node.key.name != prop;
};
if (!all(def.value.properties, diff)) break;
def.value.properties.push(make_node(AST_ObjectKeyVal, node, {
if (!all(value.properties, diff)) break;
value.properties.push(make_node(AST_ObjectKeyVal, node, {
key: prop,
value: node.right
}));
exprs.shift();
trimmed = true;
} while (exprs.length);
return trimmed && exprs;
return trimmed;
}
function join_consecutive_vars(statements) {
@@ -2130,15 +2146,15 @@ merge(Compressor.prototype, {
// methods to determine whether an expression has a boolean result type
(function(def){
var unary_bool = [ "!", "delete" ];
var binary_bool = [ "in", "instanceof", "==", "!=", "===", "!==", "<", "<=", ">=", ">" ];
var unary_bool = makePredicate("! delete");
var binary_bool = makePredicate("in instanceof == != === !== < <= >= >");
def(AST_Node, return_false);
def(AST_UnaryPrefix, function(){
return member(this.operator, unary_bool);
return unary_bool[this.operator];
});
def(AST_Binary, function(){
return member(this.operator, binary_bool)
|| lazy_op(this.operator)
return binary_bool[this.operator]
|| lazy_op[this.operator]
&& this.left.is_boolean()
&& this.right.is_boolean();
});
@@ -2163,16 +2179,16 @@ merge(Compressor.prototype, {
def(AST_Number, return_true);
var unary = makePredicate("+ - ~ ++ --");
def(AST_Unary, function(){
return unary(this.operator);
return unary[this.operator];
});
var binary = makePredicate("- * / % & | ^ << >> >>>");
def(AST_Binary, function(compressor){
return binary(this.operator) || this.operator == "+"
return binary[this.operator] || this.operator == "+"
&& this.left.is_number(compressor)
&& this.right.is_number(compressor);
});
def(AST_Assign, function(compressor){
return binary(this.operator.slice(0, -1))
return binary[this.operator.slice(0, -1)]
|| this.operator == "=" && this.right.is_number(compressor);
});
def(AST_Sequence, function(compressor){
@@ -2213,7 +2229,7 @@ merge(Compressor.prototype, {
var unary_side_effects = makePredicate("delete ++ --");
function is_lhs(node, parent) {
if (parent instanceof AST_Unary && unary_side_effects(parent.operator)) return parent.expression;
if (parent instanceof AST_Unary && unary_side_effects[parent.operator]) return parent.expression;
if (parent instanceof AST_Assign && parent.left === node) return node;
}
@@ -2416,7 +2432,7 @@ merge(Compressor.prototype, {
} else {
return this instanceof AST_UnaryPrefix
&& this.expression instanceof AST_Constant
&& unaryPrefix(this.operator);
&& unaryPrefix[this.operator];
}
});
def(AST_Statement, function(){
@@ -2486,7 +2502,7 @@ merge(Compressor.prototype, {
&& e.fixed_value() instanceof AST_Lambda)) {
return typeof function(){};
}
if (!non_converting_unary(this.operator)) depth++;
if (!non_converting_unary[this.operator]) depth++;
e = e._eval(compressor, cached, depth);
if (e === this.expression) return this;
switch (this.operator) {
@@ -2505,7 +2521,7 @@ merge(Compressor.prototype, {
});
var non_converting_binary = makePredicate("&& || === !==");
def(AST_Binary, function(compressor, cached, depth) {
if (!non_converting_binary(this.operator)) depth++;
if (!non_converting_binary[this.operator]) depth++;
var left = this.left._eval(compressor, cached, depth);
if (left === this.left) return this;
var right = this.right._eval(compressor, cached, depth);
@@ -2608,7 +2624,8 @@ merge(Compressor.prototype, {
var exp = this.expression;
var val;
if (is_undeclared_ref(exp)) {
if (!(static_values[exp.name] || return_false)(key)) return this;
var static_value = static_values[exp.name];
if (!static_value || !static_value[key]) return this;
val = global_objs[exp.name];
} else {
val = exp._eval(compressor, cached, depth + 1);
@@ -2637,11 +2654,14 @@ merge(Compressor.prototype, {
var val;
var e = exp.expression;
if (is_undeclared_ref(e)) {
if (!(static_fns[e.name] || return_false)(key)) return this;
var static_fn = static_fns[e.name];
if (!static_fn || !static_fn[key]) return this;
val = global_objs[e.name];
} else {
val = e._eval(compressor, cached, depth + 1);
if (val === e || !(val && native_fns[val.constructor.name] || return_false)(key)) return this;
if (val === e || !val) return this;
var native_fn = native_fns[val.constructor.name];
if (!native_fn || !native_fn[key]) return this;
}
var args = [];
for (var i = 0, len = this.args.length; i < len; i++) {
@@ -2749,11 +2769,10 @@ merge(Compressor.prototype, {
AST_Call.DEFMETHOD("is_expr_pure", function(compressor) {
if (compressor.option("unsafe")) {
var expr = this.expression;
if (is_undeclared_ref(expr) && global_pure_fns(expr.name)) return true;
if (expr instanceof AST_Dot
&& is_undeclared_ref(expr.expression)
&& (static_fns[expr.expression.name] || return_false)(expr.property)) {
return true;
if (is_undeclared_ref(expr) && global_pure_fns[expr.name]) return true;
if (expr instanceof AST_Dot && is_undeclared_ref(expr.expression)) {
var static_fn = static_fns[expr.expression.name];
return static_fn && static_fn[expr.property];
}
}
return this.pure || !compressor.pure_funcs(this);
@@ -2762,21 +2781,21 @@ merge(Compressor.prototype, {
AST_Dot.DEFMETHOD("is_call_pure", function(compressor) {
if (!compressor.option("unsafe")) return;
var expr = this.expression;
var fns = return_false;
var map;
if (expr instanceof AST_Array) {
fns = native_fns.Array;
map = native_fns.Array;
} else if (expr.is_boolean()) {
fns = native_fns.Boolean;
map = native_fns.Boolean;
} else if (expr.is_number(compressor)) {
fns = native_fns.Number;
map = native_fns.Number;
} else if (expr instanceof AST_RegExp) {
fns = native_fns.RegExp;
map = native_fns.RegExp;
} else if (expr.is_string(compressor)) {
fns = native_fns.String;
map = native_fns.String;
} else if (!this.may_throw_on_access(compressor)) {
fns = native_fns.Object;
map = native_fns.Object;
}
return fns(this.property);
return map && map[this.property];
});
// determine if expression has side effects
@@ -2841,7 +2860,7 @@ merge(Compressor.prototype, {
|| this.alternative.has_side_effects(compressor);
});
def(AST_Unary, function(compressor){
return unary_side_effects(this.operator)
return unary_side_effects[this.operator]
|| this.expression.has_side_effects(compressor);
});
def(AST_SymbolRef, function(compressor){
@@ -3621,28 +3640,50 @@ merge(Compressor.prototype, {
var top_retain = self instanceof AST_Toplevel && compressor.top_retain || return_false;
var defs_by_id = Object.create(null);
return self.transform(new TreeTransformer(function(node, descend) {
if (node instanceof AST_VarDef) {
var sym = node.name, def, value;
if (sym.scope === self
&& (def = sym.definition()).escaped != 1
&& !def.assignments
&& !def.direct_access
&& !def.single_use
&& !top_retain(def)
&& (value = sym.fixed_value()) === node.value
&& value instanceof AST_Object) {
descend(node, this);
var defs = new Dictionary();
var assignments = [];
value.properties.forEach(function(prop) {
assignments.push(make_node(AST_VarDef, node, {
name: make_sym(prop.key),
value: prop.value
}));
if (node instanceof AST_Assign
&& node.operator == "="
&& node.write_only
&& can_hoist(node.left, node.right, 1)) {
descend(node, this);
var defs = new Dictionary();
var assignments = [];
var decls = [];
node.right.properties.forEach(function(prop) {
var decl = make_sym(node.left, prop.key);
decls.push(make_node(AST_VarDef, node, {
name: decl,
value: null
}));
var sym = make_node(AST_SymbolRef, node, {
name: decl.name,
scope: self,
thedef: decl.definition()
});
defs_by_id[def.id] = defs;
return MAP.splice(assignments);
}
sym.reference({});
assignments.push(make_node(AST_Assign, node, {
operator: "=",
left: sym,
right: prop.value
}));
});
defs_by_id[node.left.definition().id] = defs;
self.body.splice(self.body.indexOf(this.stack[1]) + 1, 0, make_node(AST_Var, node, {
definitions: decls
}));
return make_sequence(node, assignments);
}
if (node instanceof AST_VarDef && can_hoist(node.name, node.value, 0)) {
descend(node, this);
var defs = new Dictionary();
var var_defs = [];
node.value.properties.forEach(function(prop) {
var_defs.push(make_node(AST_VarDef, node, {
name: make_sym(node.name, prop.key),
value: prop.value
}));
});
defs_by_id[node.name.definition().id] = defs;
return MAP.splice(var_defs);
}
if (node instanceof AST_PropAccess && node.expression instanceof AST_SymbolRef) {
var defs = defs_by_id[node.expression.definition().id];
@@ -3658,8 +3699,20 @@ merge(Compressor.prototype, {
}
}
function make_sym(key) {
var new_var = make_node(sym.CTOR, sym, {
function can_hoist(sym, right, count) {
if (sym.scope !== self) return;
var def = sym.definition();
if (def.assignments != count) return;
if (def.direct_access) return;
if (def.escaped == 1) return;
if (def.single_use) return;
if (top_retain(def)) return;
if (sym.fixed_value() !== right) return;
return right instanceof AST_Object;
}
function make_sym(sym, key) {
var new_var = make_node(AST_SymbolVar, sym, {
name: self.make_var_name(sym.name + "_" + key),
scope: self
});
@@ -3731,7 +3784,7 @@ merge(Compressor.prototype, {
def(AST_Binary, function(compressor, first_in_statement){
var right = this.right.drop_side_effect_free(compressor);
if (!right) return this.left.drop_side_effect_free(compressor, first_in_statement);
if (lazy_op(this.operator)) {
if (lazy_op[this.operator]) {
if (right === this.right) return this;
var node = this.clone();
node.right = right;
@@ -3776,7 +3829,7 @@ merge(Compressor.prototype, {
return node;
});
def(AST_Unary, function(compressor, first_in_statement){
if (unary_side_effects(this.operator)) {
if (unary_side_effects[this.operator]) {
this.write_only = !this.expression.has_side_effects(compressor);
return this;
}
@@ -3845,6 +3898,20 @@ merge(Compressor.prototype, {
return compressor.option("loops") ? make_node(AST_For, self, self).optimize(compressor) : self;
});
function has_break_or_continue(loop, parent) {
var found = false;
var tw = new TreeWalker(function(node) {
if (found || node instanceof AST_Scope) return true;
if (node instanceof AST_LoopControl && tw.loopcontrol_target(node) === loop) {
return found = true;
}
});
if (parent instanceof AST_LabeledStatement) tw.push(parent);
tw.push(loop);
loop.body.walk(tw);
return found;
}
OPT(AST_Do, function(self, compressor){
if (!compressor.option("loops")) return self;
var cond = self.condition.is_truthy() || self.condition.tail_node().evaluate(compressor);
@@ -3859,22 +3926,16 @@ merge(Compressor.prototype, {
]
})
}).optimize(compressor);
var has_loop_control = false;
var tw = new TreeWalker(function(node) {
if (node instanceof AST_Scope || has_loop_control) return true;
if (node instanceof AST_LoopControl && tw.loopcontrol_target(node) === self)
return has_loop_control = true;
});
var parent = compressor.parent();
(parent instanceof AST_LabeledStatement ? parent : self).walk(tw);
if (!has_loop_control) return make_node(AST_BlockStatement, self.body, {
body: [
self.body,
make_node(AST_SimpleStatement, self.condition, {
body: self.condition
})
]
}).optimize(compressor);
if (!has_break_or_continue(self, compressor.parent())) {
return make_node(AST_BlockStatement, self.body, {
body: [
self.body,
make_node(AST_SimpleStatement, self.condition, {
body: self.condition
})
]
}).optimize(compressor);
}
}
if (self.body instanceof AST_SimpleStatement) return make_node(AST_For, self, {
condition: make_sequence(self.condition, [
@@ -4581,16 +4642,22 @@ merge(Compressor.prototype, {
}
}
var stat = is_func && fn.body[0];
if (compressor.option("inline") && stat instanceof AST_Return) {
var can_inline = compressor.option("inline") && !self.is_expr_pure(compressor);
if (can_inline && stat instanceof AST_Return) {
var value = stat.value;
if (!value || value.is_constant_expression()) {
var args = self.args.concat(value || make_node(AST_Undefined, self));
if (value) {
value = value.clone(true);
} else {
value = make_node(AST_Undefined, self);
}
var args = self.args.concat(value);
return make_sequence(self, args).optimize(compressor);
}
}
if (is_func) {
var def, value, scope, in_loop, level = -1;
if (compressor.option("inline")
if (can_inline
&& !fn.uses_arguments
&& !fn.uses_eval
&& !(fn.name && fn instanceof AST_Function)
@@ -4679,7 +4746,7 @@ merge(Compressor.prototype, {
if (arg.__unused) continue;
if (!safe_to_inject
|| catches[arg.name]
|| identifier_atom(arg.name)
|| identifier_atom[arg.name]
|| scope.var_names()[arg.name]) {
return false;
}
@@ -4697,7 +4764,7 @@ merge(Compressor.prototype, {
for (var j = stat.definitions.length; --j >= 0;) {
var name = stat.definitions[j].name;
if (catches[name.name]
|| identifier_atom(name.name)
|| identifier_atom[name.name]
|| scope.var_names()[name.name]) {
return false;
}
@@ -4775,6 +4842,11 @@ merge(Compressor.prototype, {
for (var j = 0, defs = stat.definitions.length; j < defs; j++) {
var var_def = stat.definitions[j];
var name = var_def.name;
var redef = name.definition().redefined();
if (redef) {
name = name.clone();
name.thedef = redef;
}
append_var(decls, expressions, name, var_def.value);
if (in_loop && all(fn.argnames, function(argname) {
return argname.name != name.name;
@@ -5012,7 +5084,7 @@ merge(Compressor.prototype, {
self.right = tmp;
}
}
if (commutativeOperators(self.operator)) {
if (commutativeOperators[self.operator]) {
if (self.right.is_constant()
&& !self.left.is_constant()) {
// if right is a constant, whatever side effects the
@@ -5405,7 +5477,7 @@ merge(Compressor.prototype, {
// "x" + (y + "z")==> "x" + y + "z"
if (self.right instanceof AST_Binary
&& self.right.operator == self.operator
&& (lazy_op(self.operator)
&& (lazy_op[self.operator]
|| (self.operator == "+"
&& (self.right.left.is_string(compressor)
|| (self.left.is_string(compressor)
@@ -5463,11 +5535,12 @@ merge(Compressor.prototype, {
return make_node(AST_Infinity, self).optimize(compressor);
}
}
if (compressor.option("reduce_vars")
&& is_lhs(self, compressor.parent()) !== self) {
var parent = compressor.parent();
if (compressor.option("reduce_vars") && is_lhs(self, parent) !== self) {
var d = self.definition();
var fixed = self.fixed_value();
var single_use = d.single_use;
var single_use = d.single_use
&& !(parent instanceof AST_Call && parent.is_expr_pure(compressor));
if (single_use && fixed instanceof AST_Lambda) {
if (d.scope !== self.scope
&& (!compressor.option("reduce_funcs")
@@ -5656,8 +5729,8 @@ merge(Compressor.prototype, {
return reachable;
}
var ASSIGN_OPS = [ '+', '-', '/', '*', '%', '>>', '<<', '>>>', '|', '^', '&' ];
var ASSIGN_OPS_COMMUTATIVE = [ '*', '|', '^', '&' ];
var ASSIGN_OPS = makePredicate("+ - / * % >> << >>> | ^ &");
var ASSIGN_OPS_COMMUTATIVE = makePredicate("* | ^ &");
OPT(AST_Assign, function(self, compressor){
var def;
if (compressor.option("dead_code")
@@ -5686,14 +5759,14 @@ merge(Compressor.prototype, {
// x = expr1 OP expr2
if (self.right.left instanceof AST_SymbolRef
&& self.right.left.name == self.left.name
&& member(self.right.operator, ASSIGN_OPS)) {
&& ASSIGN_OPS[self.right.operator]) {
// x = x - 2 ---> x -= 2
self.operator = self.right.operator + "=";
self.right = self.right.right;
}
else if (self.right.right instanceof AST_SymbolRef
&& self.right.right.name == self.left.name
&& member(self.right.operator, ASSIGN_OPS_COMMUTATIVE)
&& ASSIGN_OPS_COMMUTATIVE[self.right.operator]
&& !self.right.left.has_side_effects(compressor)) {
// x = 2 & x ---> x &= 2
self.operator = self.right.operator + "=";

View File

@@ -7,15 +7,23 @@ var to_base64 = typeof btoa == "undefined" ? function(str) {
return new Buffer(str).toString("base64");
} : btoa;
function read_source_map(code) {
function read_source_map(name, code) {
var match = /\n\/\/# sourceMappingURL=data:application\/json(;.*?)?;base64,(.*)/.exec(code);
if (!match) {
AST_Node.warn("inline source map not found");
AST_Node.warn("inline source map not found: " + name);
return null;
}
return to_ascii(match[2]);
}
function parse_source_map(content) {
try {
return JSON.parse(content);
} catch (ex) {
throw new Error("invalid input source map: " + content);
}
}
function set_shorthand(name, options, keys) {
if (options[name]) {
keys.forEach(function(key) {
@@ -113,7 +121,7 @@ function minify(files, options) {
};
}
if (timings) timings.parse = Date.now();
var toplevel;
var source_maps, toplevel;
if (files instanceof AST_Toplevel) {
toplevel = files;
} else {
@@ -122,13 +130,23 @@ function minify(files, options) {
}
options.parse = options.parse || {};
options.parse.toplevel = null;
var source_map_content = options.sourceMap && options.sourceMap.content;
if (typeof source_map_content == "string" && source_map_content != "inline") {
source_map_content = parse_source_map(source_map_content);
}
source_maps = source_map_content && Object.create(null);
for (var name in files) if (HOP(files, name)) {
options.parse.filename = name;
options.parse.toplevel = parse(files[name], options.parse);
if (options.sourceMap && options.sourceMap.content == "inline") {
if (Object.keys(files).length > 1)
throw new Error("inline source map only works with singular input");
options.sourceMap.content = read_source_map(files[name]);
if (source_maps) {
if (source_map_content == "inline") {
var inlined_content = read_source_map(name, files[name]);
if (inlined_content) {
source_maps[name] = parse_source_map(inlined_content);
}
} else {
source_maps[name] = source_map_content;
}
}
}
toplevel = options.parse.toplevel;
@@ -164,12 +182,9 @@ function minify(files, options) {
}
if (!HOP(options.output, "code") || options.output.code) {
if (options.sourceMap) {
if (typeof options.sourceMap.content == "string") {
options.sourceMap.content = JSON.parse(options.sourceMap.content);
}
options.output.source_map = SourceMap({
file: options.sourceMap.filename,
orig: options.sourceMap.content,
orig: source_maps,
root: options.sourceMap.root
});
if (options.sourceMap.includeSources) {
@@ -178,6 +193,8 @@ function minify(files, options) {
} else for (var name in files) if (HOP(files, name)) {
options.output.source_map.get().setSourceContent(name, files[name]);
}
} else {
options.output.source_map.get()._sourcesContents = null;
}
}
delete options.output.ast;

View File

@@ -197,6 +197,7 @@ function OutputStream(options) {
/* -----[ beautification/minification ]----- */
var has_parens = false;
var might_need_space = false;
var might_need_semicolon = false;
var might_add_newline = 0;
@@ -280,7 +281,7 @@ function OutputStream(options) {
might_need_semicolon = false;
if (prev == ":" && ch == "}" || (!ch || ";}".indexOf(ch) < 0) && prev != ";") {
if (options.semicolons || requireSemicolonChars(ch)) {
if (options.semicolons || requireSemicolonChars[ch]) {
OUTPUT += ";";
current_col++;
current_pos++;
@@ -340,6 +341,7 @@ function OutputStream(options) {
}
OUTPUT += str;
has_parens = str[str.length - 1] == "(";
current_pos += str.length;
var a = str.split(/\r?\n/), n = a.length - 1;
current_line += n;
@@ -576,7 +578,7 @@ function OutputStream(options) {
indentation : function() { return indentation },
current_width : function() { return current_col - indentation },
should_break : function() { return options.width && this.current_width() >= options.width },
has_parens : function() { return OUTPUT[OUTPUT.length - 1] == "(" },
has_parens : function() { return has_parens },
newline : newline,
print : print,
space : space,
@@ -1244,7 +1246,7 @@ function OutputStream(options) {
var expr = self.expression;
expr.print(output);
var prop = self.property;
if (output.option("ie8") && RESERVED_WORDS(prop)) {
if (output.option("ie8") && RESERVED_WORDS[prop]) {
output.print("[");
output.add_mapping(self.end);
output.print_string(prop);
@@ -1356,7 +1358,7 @@ function OutputStream(options) {
output.print_string(key);
} else if ("" + +key == key && key >= 0) {
output.print(make_num(key));
} else if (RESERVED_WORDS(key) ? !output.option("ie8") : is_identifier_string(key)) {
} else if (RESERVED_WORDS[key] ? !output.option("ie8") : is_identifier_string(key)) {
if (quote && output.option("keep_quoted_props")) {
output.print_string(key, quote);
} else {

View File

@@ -165,7 +165,7 @@ function is_unicode_connector_punctuation(ch) {
};
function is_identifier(name) {
return !RESERVED_WORDS(name) && /^[a-z_$][a-z0-9_$]*$/i.test(name);
return !RESERVED_WORDS[name] && /^[a-z_$][a-z0-9_$]*$/i.test(name);
};
function is_identifier_start(code) {
@@ -245,7 +245,7 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
var ch = S.text.charAt(S.pos++);
if (signal_eof && !ch)
throw EX_EOF;
if (NEWLINE_CHARS(ch)) {
if (NEWLINE_CHARS[ch]) {
S.newline_before = S.newline_before || !in_string;
++S.line;
S.col = 0;
@@ -272,7 +272,7 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
var text = S.text;
for (var i = S.pos, n = S.text.length; i < n; ++i) {
var ch = text[i];
if (NEWLINE_CHARS(ch))
if (NEWLINE_CHARS[ch])
return i;
}
return -1;
@@ -292,9 +292,9 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
var prev_was_dot = false;
function token(type, value, is_comment) {
S.regex_allowed = ((type == "operator" && !UNARY_POSTFIX(value)) ||
(type == "keyword" && KEYWORDS_BEFORE_EXPRESSION(value)) ||
(type == "punc" && PUNC_BEFORE_EXPRESSION(value)));
S.regex_allowed = ((type == "operator" && !UNARY_POSTFIX[value]) ||
(type == "keyword" && KEYWORDS_BEFORE_EXPRESSION[value]) ||
(type == "punc" && PUNC_BEFORE_EXPRESSION[value]));
if (type == "punc" && value == ".") {
prev_was_dot = true;
} else if (!is_comment) {
@@ -324,7 +324,7 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
};
function skip_whitespace() {
while (WHITESPACE_CHARS(peek()))
while (WHITESPACE_CHARS[peek()])
next();
};
@@ -424,7 +424,7 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
for (;;) {
var ch = next(true, true);
if (ch == "\\") ch = read_escaped_char(true);
else if (NEWLINE_CHARS(ch)) parse_error("Unterminated string constant");
else if (NEWLINE_CHARS[ch]) parse_error("Unterminated string constant");
else if (ch == quote) break;
ret += ch;
}
@@ -476,7 +476,7 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
backslash = false;
}
}
if (KEYWORDS(name) && escaped) {
if (KEYWORDS[name] && escaped) {
hex = name.charCodeAt(0).toString(16).toUpperCase();
name = "\\u" + "0000".substr(hex.length) + hex + name.slice(1);
}
@@ -485,7 +485,7 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
var read_regexp = with_eof_error("Unterminated regular expression", function(source) {
var prev_backslash = false, ch, in_class = false;
while ((ch = next(true))) if (NEWLINE_CHARS(ch)) {
while ((ch = next(true))) if (NEWLINE_CHARS[ch]) {
parse_error("Unexpected line terminator");
} else if (prev_backslash) {
source += "\\" + ch;
@@ -517,7 +517,7 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
function grow(op) {
if (!peek()) return op;
var bigger = op + peek();
if (OPERATORS(bigger)) {
if (OPERATORS[bigger]) {
next();
return grow(bigger);
} else {
@@ -550,9 +550,9 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
function read_word() {
var word = read_name();
if (prev_was_dot) return token("name", word);
return KEYWORDS_ATOM(word) ? token("atom", word)
: !KEYWORDS(word) ? token("name", word)
: OPERATORS(word) ? token("operator", word)
return KEYWORDS_ATOM[word] ? token("atom", word)
: !KEYWORDS[word] ? token("name", word)
: OPERATORS[word] ? token("operator", word)
: token("keyword", word);
};
@@ -603,8 +603,8 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
}
}
if (is_digit(code)) return read_num();
if (PUNC_CHARS(ch)) return token("punc", next());
if (OPERATOR_CHARS(ch)) return read_operator();
if (PUNC_CHARS[ch]) return token("punc", next());
if (OPERATOR_CHARS[ch]) return read_operator();
if (code == 92 || is_identifier_start(code)) return read_word();
break;
}
@@ -1321,7 +1321,7 @@ function parse($TEXT, options) {
func.end = prev();
return subscripts(func, allow_calls);
}
if (ATOMIC_START_TOKEN(S.token.type)) {
if (ATOMIC_START_TOKEN[S.token.type]) {
return subscripts(as_atom_node(), allow_calls);
}
unexpected();
@@ -1406,7 +1406,7 @@ function parse($TEXT, options) {
var tmp = S.token;
switch (tmp.type) {
case "operator":
if (!KEYWORDS(tmp.value)) unexpected();
if (!KEYWORDS[tmp.value]) unexpected();
case "num":
case "string":
case "name":
@@ -1504,7 +1504,7 @@ function parse($TEXT, options) {
var maybe_unary = function(allow_calls) {
var start = S.token;
if (is("operator") && UNARY_PREFIX(start.value)) {
if (is("operator") && UNARY_PREFIX[start.value]) {
next();
handle_regexp();
var ex = make_unary(AST_UnaryPrefix, start, maybe_unary(allow_calls));
@@ -1513,7 +1513,7 @@ function parse($TEXT, options) {
return ex;
}
var val = expr_atom(allow_calls);
while (is("operator") && UNARY_POSTFIX(S.token.value) && !has_newline_before(S.token)) {
while (is("operator") && UNARY_POSTFIX[S.token.value] && !has_newline_before(S.token)) {
val = make_unary(AST_UnaryPostfix, S.token, val);
val.start = start;
val.end = S.token;
@@ -1585,7 +1585,7 @@ function parse($TEXT, options) {
var maybe_assign = function(no_in) {
var start = S.token;
var left = maybe_conditional(no_in), val = S.token.value;
if (is("operator") && ASSIGNMENT(val)) {
if (is("operator") && ASSIGNMENT[val]) {
if (is_assignable(left)) {
next();
return new AST_Assign({

View File

@@ -150,6 +150,10 @@ function mangle_properties(ast, options) {
else if (node instanceof AST_Sub) {
addStrings(node.property, add);
}
else if (node instanceof AST_Call
&& node.expression.print_to_string() == "Object.defineProperty") {
addStrings(node.args[1], add);
}
}));
// step 2: transform the tree, renaming properties
@@ -167,6 +171,10 @@ function mangle_properties(ast, options) {
else if (!options.keep_quoted && node instanceof AST_Sub) {
node.property = mangleStrings(node.property);
}
else if (node instanceof AST_Call
&& node.expression.print_to_string() == "Object.defineProperty") {
node.args[1] = mangleStrings(node.args[1]);
}
}));
// only function declarations after this line

View File

@@ -344,7 +344,7 @@ function next_mangled_name(scope, options, def) {
}
while (true) {
name = base54(++scope.cname);
if (in_use[name] || !is_identifier(name) || member(name, options.reserved)) continue;
if (in_use[name] || !is_identifier(name) || options.reserved.has[name]) continue;
if (!names[name]) break;
holes.push(scope.cname);
}
@@ -387,6 +387,7 @@ function _default_mangler_options(options) {
if (!Array.isArray(options.reserved)) options.reserved = [];
// Never mangle arguments
push_uniq(options.reserved, "arguments");
options.reserved.has = makePredicate(options.reserved);
return options;
}
@@ -452,9 +453,8 @@ AST_Toplevel.DEFMETHOD("mangle_names", function(options){
redefined.forEach(mangle);
function mangle(def) {
if (!member(def.name, options.reserved)) {
def.mangle(options);
}
if (options.reserved.has[def.name]) return;
def.mangle(options);
}
});
@@ -504,7 +504,7 @@ AST_Toplevel.DEFMETHOD("expand_names", function(options) {
function rename(def) {
if (def.global && options.cache) return;
if (def.unmangleable(options)) return;
if (member(def.name, options.reserved)) return;
if (options.reserved.has[def.name]) return;
var d = def.redefined();
def.name = d ? d.name : next_name();
def.orig.forEach(function(sym) {

View File

@@ -57,26 +57,26 @@ function SourceMap(options) {
file : options.file,
sourceRoot : options.root
});
var orig_map = options.orig && new MOZ_SourceMap.SourceMapConsumer(options.orig);
if (orig_map && Array.isArray(options.orig.sources)) {
orig_map._sources.toArray().forEach(function(source) {
var sourceContent = orig_map.sourceContentFor(source, true);
if (sourceContent) {
generator.setSourceContent(source, sourceContent);
}
});
var maps = options.orig && Object.create(null);
if (maps) for (var source in options.orig) {
var map = new MOZ_SourceMap.SourceMapConsumer(options.orig[source]);
if (Array.isArray(options.orig[source].sources)) {
map._sources.toArray().forEach(function(source) {
var sourceContent = map.sourceContentFor(source, true);
if (sourceContent) generator.setSourceContent(source, sourceContent);
});
}
maps[source] = map;
}
function add(source, gen_line, gen_col, orig_line, orig_col, name) {
if (orig_map) {
var info = orig_map.originalPositionFor({
var map = maps && maps[source];
if (map) {
var info = map.originalPositionFor({
line: orig_line,
column: orig_col
});
if (info.source === null) {
return;
}
if (info.source === null) return;
source = info.source;
orig_line = info.line;
orig_col = info.column;

View File

@@ -93,7 +93,12 @@ TreeTransformer.prototype = new TreeWalker;
self.body = do_list(self.body, tw);
});
_(AST_DWLoop, function(self, tw){
_(AST_Do, function(self, tw){
self.body = self.body.transform(tw);
self.condition = self.condition.transform(tw);
});
_(AST_While, function(self, tw){
self.condition = self.condition.transform(tw);
self.body = self.body.transform(tw);
});

View File

@@ -145,7 +145,7 @@ var MAP = (function(){
}
return is_last;
};
if (a instanceof Array) {
if (Array.isArray(a)) {
if (backwards) {
for (i = a.length; --i >= 0;) if (doit()) break;
ret.reverse();
@@ -210,51 +210,14 @@ function mergeSort(array, cmp) {
return _ms(array);
};
// this function is taken from Acorn [1], written by Marijn Haverbeke
// [1] https://github.com/marijnh/acorn
function makePredicate(words) {
if (!(words instanceof Array)) words = words.split(" ");
var f = "", cats = [];
out: for (var i = 0; i < words.length; ++i) {
for (var j = 0; j < cats.length; ++j)
if (cats[j][0].length == words[i].length) {
cats[j].push(words[i]);
continue out;
}
cats.push([words[i]]);
}
function quote(word) {
return JSON.stringify(word).replace(/[\u2028\u2029]/g, function(s) {
switch (s) {
case "\u2028": return "\\u2028";
case "\u2029": return "\\u2029";
}
return s;
});
}
function compareTo(arr) {
if (arr.length == 1) return f += "return str === " + quote(arr[0]) + ";";
f += "switch(str){";
for (var i = 0; i < arr.length; ++i) f += "case " + quote(arr[i]) + ":";
f += "return true}return false;";
}
// When there are more than three length categories, an outer
// switch first dispatches on the lengths, to save on comparisons.
if (cats.length > 3) {
cats.sort(function(a, b) {return b.length - a.length;});
f += "switch(str.length){";
for (var i = 0; i < cats.length; ++i) {
var cat = cats[i];
f += "case " + cat[0].length + ":";
compareTo(cat);
}
f += "}";
// Otherwise, simply generate a flat `switch` statement.
} else {
compareTo(words);
}
return new Function("str", f);
};
if (!Array.isArray(words)) words = words.split(" ");
var map = Object.create(null);
words.forEach(function(word) {
map[word] = true;
});
return map;
}
function all(array, predicate) {
for (var i = array.length; --i >= 0;)

View File

@@ -3,7 +3,7 @@
"description": "JavaScript parser, mangler/compressor and beautifier toolkit",
"author": "Mihai Bazon <mihai.bazon@gmail.com> (http://lisperator.net/)",
"license": "BSD-2-Clause",
"version": "3.3.17",
"version": "3.3.22",
"engines": {
"node": ">=0.8.0"
},

View File

@@ -2237,3 +2237,69 @@ issue_3018: {
}
expect_stdout: "PASS"
}
issue_3054: {
options = {
booleans: true,
collapse_vars: true,
inline: 1,
reduce_vars: true,
toplevel: true,
}
input: {
"use strict";
function f() {
return { a: true };
}
console.log(function(b) {
b = false;
return f();
}().a, f.call().a);
}
expect: {
"use strict";
function f() {
return { a: !0 };
}
console.log(function(b) {
return { a: !(b = !1) };
}().a, f.call().a);
}
expect_stdout: "true true"
}
issue_3076: {
options = {
dead_code: true,
inline: true,
sequences: true,
unused: true,
}
input: {
var c = "PASS";
(function(b) {
var n = 2;
while (--b + function() {
e && (c = "FAIL");
e = 5;
return 1;
try {
var a = 5;
} catch (e) {
var e;
}
}().toString() && --n > 0);
})(2);
console.log(c);
}
expect: {
var c = "PASS";
(function(b) {
var n = 2;
while (--b + (e = void 0, e && (c = "FAIL"), e = 5, 1).toString() && --n > 0);
var e;
})(2),
console.log(c);
}
expect_stdout: "PASS"
}

View File

@@ -716,3 +716,143 @@ issue_3021: {
}
expect_stdout: "2 2"
}
issue_3046: {
options = {
hoist_props: true,
reduce_vars: true,
}
input: {
console.log(function(a) {
do {
var b = {
c: a++
};
} while (b.c && a);
return a;
}(0));
}
expect: {
console.log(function(a) {
do {
var b_c = a++;
} while (b_c && a);
return a;
}(0));
}
expect_stdout: "1"
}
issue_3071_1: {
options = {
evaluate: true,
inline: true,
join_vars: true,
hoist_props: true,
passes: 3,
reduce_vars: true,
sequences: true,
side_effects: true,
toplevel: true,
unused: true,
}
input: {
(function() {
var obj = {};
obj.one = 1;
obj.two = 2;
console.log(obj.one);
})();
}
expect: {
console.log(1);
}
expect_stdout: "1"
}
issue_3071_2: {
options = {
evaluate: true,
inline: true,
join_vars: true,
hoist_props: true,
passes: 3,
reduce_vars: true,
sequences: true,
side_effects: true,
unused: true,
}
input: {
(function() {
obj = {};
obj.one = 1;
obj.two = 2;
console.log(obj.one);
var obj;
})();
}
expect: {
console.log(1);
}
expect_stdout: "1"
}
issue_3071_2_toplevel: {
options = {
evaluate: true,
inline: true,
join_vars: true,
hoist_props: true,
passes: 3,
reduce_vars: true,
sequences: true,
side_effects: true,
toplevel: true,
unused: true,
}
input: {
(function() {
obj = {};
obj.one = 1;
obj.two = 2;
console.log(obj.one);
var obj;
})();
}
expect: {
console.log(1);
}
expect_stdout: "1"
}
issue_3071_3: {
options = {
hoist_props: true,
reduce_vars: true,
}
input: {
var c = 0;
(function(a, b) {
(function f(o) {
var n = 2;
while (--b + (o = {
p: c++,
}) && --n > 0);
})();
})();
console.log(c);
}
expect: {
var c = 0;
(function(a, b) {
(function f(o) {
var n = 2;
while (--b + (o = {
p: c++,
}) && --n > 0);
})();
})();
console.log(c);
}
expect_stdout: "2"
}

View File

@@ -1208,6 +1208,37 @@ join_object_assignments_3: {
expect_stdout: "PASS"
}
join_object_assignments_4: {
options = {
join_vars: true,
sequences: true,
}
input: {
var o;
console.log(o);
o = {};
o.a = "foo";
console.log(o.b);
o.b = "bar";
console.log(o.a);
}
expect: {
var o;
console.log(o),
o = {
a: "foo",
},
console.log(o.b),
o.b = "bar",
console.log(o.a);
}
expect_stdout: [
"undefined",
"undefined",
"foo",
]
}
join_object_assignments_return_1: {
options = {
join_vars: true,
@@ -1640,3 +1671,61 @@ issue_2893_2: {
}
expect_stdout: "PASS"
}
issue_869_1: {
mangle = {
properties: {
reserved: [ "get" ]
},
}
input: {
var o = { p: "FAIL" };
Object.defineProperty(o, "p", {
get: function() {
return "PASS";
}
});
console.log(o.p);
}
expect: {
var o = { o: "FAIL" };
Object.defineProperty(o, "o", {
get: function() {
return "PASS";
}
});
console.log(o.o);
}
expect_stdout: "PASS"
}
issue_869_2: {
mangle = {
properties: {
reserved: [ "get" ]
},
}
input: {
var o = { p: "FAIL" };
Object.defineProperties(o, {
p: {
get: function() {
return "PASS";
}
}
});
console.log(o.p);
}
expect: {
var o = { o: "FAIL" };
Object.defineProperties(o, {
o: {
get: function() {
return "PASS";
}
}
});
console.log(o.o);
}
expect_stdout: "PASS"
}

View File

@@ -535,3 +535,110 @@ issue_2705_6: {
"/* */new(/* */a()||b())(c(),d());",
]
}
issue_3065_1: {
options = {
inline: true,
pure_funcs: [ "pureFunc" ],
reduce_vars: true,
side_effects: true,
toplevel: true,
unused: true,
}
input: {
function modifyWrapper(a, f, wrapper) {
wrapper.a = a;
wrapper.f = f;
return wrapper;
}
function pureFunc(fun) {
return modifyWrapper(1, fun, function(a) {
return fun(a);
});
}
var unused = pureFunc(function(x) {
return x;
});
}
expect: {}
}
issue_3065_2: {
rename = true
options = {
inline: true,
pure_funcs: [ "pureFunc" ],
reduce_vars: true,
side_effects: true,
toplevel: true,
unused: true,
}
mangle = {
reserved: [ "pureFunc" ],
toplevel: true,
}
input: {
function modifyWrapper(a, f, wrapper) {
wrapper.a = a;
wrapper.f = f;
return wrapper;
}
function pureFunc(fun) {
return modifyWrapper(1, fun, function(a) {
return fun(a);
});
}
var unused = pureFunc(function(x) {
return x;
});
}
expect: {}
}
issue_3065_3: {
options = {
pure_funcs: [ "debug" ],
reduce_vars: true,
side_effects: true,
toplevel: true,
unused: true,
}
input: {
function debug(msg) {
console.log(msg);
}
debug(function() {
console.log("PASS");
return "FAIL";
}());
}
expect: {
(function() {
console.log("PASS");
})();
}
}
issue_3065_4: {
options = {
pure_funcs: [ "debug" ],
reduce_vars: true,
side_effects: true,
toplevel: true,
unused: true,
}
input: {
var debug = function(msg) {
console.log(msg);
};
debug(function() {
console.log("PASS");
return "FAIL";
}());
}
expect: {
(function() {
console.log("PASS");
})();
}
}

View File

@@ -5582,3 +5582,131 @@ issue_2992: {
}
expect_stdout: "PASS"
}
issue_3042_1: {
options = {
reduce_funcs: true,
reduce_vars: true,
toplevel: true,
unused: true,
}
input: {
function f() {}
var a = [ 1, 2 ].map(function() {
return new f();
});
console.log(a[0].constructor === a[1].constructor);
}
expect: {
function f() {}
var a = [ 1, 2 ].map(function() {
return new f();
});
console.log(a[0].constructor === a[1].constructor);
}
expect_stdout: "true"
}
issue_3042_2: {
options = {
reduce_funcs: true,
reduce_vars: true,
toplevel: true,
unused: true,
}
input: {
function Foo() {
this.isFoo = function(o) {
return o instanceof Foo;
};
}
function FooCollection() {
this.foos = [1, 1].map(function() {
return new Foo();
});
}
var fooCollection = new FooCollection();
console.log(fooCollection.foos[0].isFoo(fooCollection.foos[0]));
console.log(fooCollection.foos[0].isFoo(fooCollection.foos[1]));
console.log(fooCollection.foos[1].isFoo(fooCollection.foos[0]));
console.log(fooCollection.foos[1].isFoo(fooCollection.foos[1]));
}
expect: {
function Foo() {
this.isFoo = function(o) {
return o instanceof Foo;
};
}
var fooCollection = new function() {
this.foos = [1, 1].map(function() {
return new Foo();
});
}();
console.log(fooCollection.foos[0].isFoo(fooCollection.foos[0]));
console.log(fooCollection.foos[0].isFoo(fooCollection.foos[1]));
console.log(fooCollection.foos[1].isFoo(fooCollection.foos[0]));
console.log(fooCollection.foos[1].isFoo(fooCollection.foos[1]));
}
expect_stdout: [
"true",
"true",
"true",
"true",
]
}
issue_3068_1: {
options = {
evaluate: true,
reduce_vars: true,
}
input: {
(function() {
do {
continue;
var b = "defined";
} while (b && b.c);
})();
}
expect: {
(function() {
do {
continue;
var b = "defined";
} while (b && b.c);
})();
}
expect_stdout: true
}
issue_3068_2: {
options = {
evaluate: true,
reduce_vars: true,
}
input: {
(function() {
do {
try {
while ("" == typeof a);
} finally {
continue;
}
var b = "defined";
} while (b && b.c);
})();
}
expect: {
(function() {
do {
try {
while ("" == typeof a);
} finally {
continue;
}
var b = "defined";
} while (b && b.c);
})();
}
expect_stdout: true
}

View File

@@ -0,0 +1,2 @@
function _toConsumableArray(arr){if(Array.isArray(arr)){for(var i=0,arr2=Array(arr.length);i<arr.length;i++){arr2[i]=arr[i]}return arr2}else{return Array.from(arr)}}var _require=require("bar"),foo=_require.foo;var _require2=require("world"),hello=_require2.hello;foo.x.apply(foo,_toConsumableArray(foo.y(hello.z)));
//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJzb3VyY2VzIjpbImlucHV0Mi5qcyJdLCJuYW1lcyI6WyJyZXF1aXJlIiwiYXJyIl0sIm1hcHBpbmdzIjoiMEpBQWNBLEtBQVFDIiwic291cmNlc0NvbnRlbnQiOlsiY29uc3Qge2Zvb30gPSByZXF1aXJlKFwiYmFyXCIpO1xuY29uc3Qge2hlbGxvfSA9IHJlcXVpcmUoXCJ3b3JsZFwiKTtcblxuZm9vLngoLi4uZm9vLnkoaGVsbG8ueikpO1xuIl19

View File

@@ -0,0 +1,11 @@
function _toConsumableArray(arr) { if (Array.isArray(arr)) { for (var i = 0, arr2 = Array(arr.length); i < arr.length; i++) { arr2[i] = arr[i]; } return arr2; } else { return Array.from(arr); } }
var _require = require("bar"),
foo = _require.foo;
var _require2 = require("world"),
hello = _require2.hello;
foo.x.apply(foo, _toConsumableArray(foo.y(hello.z)));
//# sourceMappingURL=input.js.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["input2.js"],"names":["require","foo","hello","x","apply","_toConsumableArray","y","z"],"mappings":"kLAAcA,QAAQ,OAAfC,aAAAA,kBACSD,QAAQ,SAAjBE,gBAAAA,MAEPD,IAAIE,EAAJC,MAAAH,IAAAI,mBAASJ,IAAIK,EAAEJ,MAAMK","sourcesContent":["const {foo} = require(\"bar\");\nconst {hello} = require(\"world\");\n\nfoo.x(...foo.y(hello.z));\n"]}

View File

@@ -1,9 +1,9 @@
var assert = require("assert");
var exec = require("child_process").exec;
var readFileSync = require("fs").readFileSync;
var fs = require("fs");
function read(path) {
return readFileSync(path, "utf8");
return fs.readFileSync(path, "utf8");
}
describe("bin/uglifyjs", function () {
@@ -56,6 +56,18 @@ describe("bin/uglifyjs", function () {
done();
});
});
it("Should give sensible error against invalid input source map", function(done) {
var command = uglifyjscmd + " test/mocha.js --source-map content=blah,url=inline";
exec(command, function (err, stdout, stderr) {
assert.ok(err);
assert.deepEqual(stderr.split(/\n/).slice(0, 2), [
"INFO: Using input source map: blah",
"ERROR: invalid input source map: blah",
]);
done();
});
});
it("Should append source map to output when using --source-map url=inline", function (done) {
var command = uglifyjscmd + " test/input/issue-1323/sample.js --source-map url=inline";
@@ -94,6 +106,36 @@ describe("bin/uglifyjs", function () {
done();
});
});
it("Should not load source map before finish reading from STDIN", function(done) {
var mapFile = "tmp/input.js.map";
try {
fs.mkdirSync("./tmp");
} catch (e) {
if (e.code != "EEXIST") throw e;
}
try {
fs.unlinkSync(mapFile);
} catch (e) {
if (e.code != "ENOENT") throw e;
}
var command = [
uglifyjscmd,
"--source-map", "content=" + mapFile,
"--source-map", "includeSources=true",
"--source-map", "url=inline",
].join(" ");
var child = exec(command, function(err, stdout) {
if (err) throw err;
assert.strictEqual(stdout, read("test/input/pr-3040/expect.js"));
done();
});
setTimeout(function() {
fs.writeFileSync(mapFile, read("test/input/pr-3040/input.js.map"));
child.stdin.end(read("test/input/pr-3040/input.js"));
}, 1000);
});
it("Should work with --keep-fnames (mangle only)", function (done) {
var command = uglifyjscmd + ' test/input/issue-1431/sample.js --keep-fnames -m';
@@ -175,7 +217,14 @@ describe("bin/uglifyjs", function () {
});
});
it("Should process inline source map", function(done) {
var command = uglifyjscmd + " test/input/issue-520/input.js -mc toplevel --source-map content=inline,url=inline";
var command = [
uglifyjscmd,
"test/input/issue-520/input.js",
"-mc", "toplevel",
"--source-map", "content=inline",
"--source-map", "includeSources=true",
"--source-map", "url=inline",
].join(" ");
exec(command, function (err, stdout) {
if (err) throw err;
@@ -195,16 +244,27 @@ describe("bin/uglifyjs", function () {
"//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJzb3VyY2VzIjpbInRlc3QvaW5wdXQvaXNzdWUtMTMyMy9zYW1wbGUuanMiXSwibmFtZXMiOlsiYmFyIiwiZm9vIl0sIm1hcHBpbmdzIjoiQUFBQSxJQUFJQSxJQUFNLFdBQ04sU0FBU0MsSUFBS0QsS0FDVixPQUFPQSxJQUdYLE9BQU9DLElBTEQifQ==",
"",
].join("\n"));
assert.strictEqual(stderr, "WARN: inline source map not found\n");
assert.strictEqual(stderr, "WARN: inline source map not found: test/input/issue-1323/sample.js\n");
done();
});
});
it("Should fail with multiple input and inline source map", function(done) {
var command = uglifyjscmd + " test/input/issue-520/input.js test/input/issue-520/output.js --source-map content=inline,url=inline";
it("Should handle multiple input and inline source map", function(done) {
var command = [
uglifyjscmd,
"test/input/issue-520/input.js",
"test/input/issue-1323/sample.js",
"--source-map", "content=inline,url=inline",
].join(" ");
exec(command, function (err, stdout, stderr) {
assert.ok(err);
assert.strictEqual(stderr.split(/\n/)[0], "ERROR: inline source map only works with singular input");
if (err) throw err;
assert.strictEqual(stdout, [
"var Foo=function Foo(){console.log(1+2)};new Foo;var bar=function(){function foo(bar){return bar}return foo}();",
"//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJzb3VyY2VzIjpbInN0ZGluIiwidGVzdC9pbnB1dC9pc3N1ZS0xMzIzL3NhbXBsZS5qcyJdLCJuYW1lcyI6WyJGb28iLCJjb25zb2xlIiwibG9nIiwiYmFyIiwiZm9vIl0sIm1hcHBpbmdzIjoiQUFBQSxJQUFNQSxJQUFJLFNBQUFBLE1BQWdCQyxRQUFRQyxJQUFJLEVBQUUsSUFBTyxJQUFJRixJQ0FuRCxJQUFJRyxJQUFNLFdBQ04sU0FBU0MsSUFBS0QsS0FDVixPQUFPQSxJQUdYLE9BQU9DLElBTEQifQ==",
"",
].join("\n"));
assert.strictEqual(stderr, "WARN: inline source map not found: test/input/issue-1323/sample.js\n");
done();
});
});

View File

@@ -1,66 +1,68 @@
var Uglify = require('../../');
var assert = require("assert");
var Uglify = require("../../");
var SourceMapConsumer = require("source-map").SourceMapConsumer;
function getMap() {
return {
"version": 3,
"sources": ["index.js"],
"names": [],
"mappings": ";;AAAA,IAAI,MAAM,SAAN,GAAM;AAAA,SAAK,SAAS,CAAd;AAAA,CAAV;AACA,QAAQ,GAAR,CAAY,IAAI,KAAJ,CAAZ",
"file": "bundle.js",
"sourcesContent": ["let foo = x => \"foo \" + x;\nconsole.log(foo(\"bar\"));"]
};
}
function prepareMap(sourceMap) {
var code = [
'"use strict";',
"",
"var foo = function foo(x) {",
' return "foo " + x;',
"};",
'console.log(foo("bar"));',
"",
"//# sourceMappingURL=bundle.js.map",
].join("\n");
var result = Uglify.minify(code, {
sourceMap: {
content: sourceMap,
includeSources: true,
}
});
if (result.error) throw result.error;
return new SourceMapConsumer(result.map);
}
describe("input sourcemaps", function() {
var transpilemap, map;
function getMap() {
return {
"version": 3,
"sources": ["index.js"],
"names": [],
"mappings": ";;AAAA,IAAI,MAAM,SAAN,GAAM;AAAA,SAAK,SAAS,CAAd;AAAA,CAAV;AACA,QAAQ,GAAR,CAAY,IAAI,KAAJ,CAAZ",
"file": "bundle.js",
"sourcesContent": ["let foo = x => \"foo \" + x;\nconsole.log(foo(\"bar\"));"]
};
}
function prepareMap(sourceMap) {
var transpiled = '"use strict";\n\n' +
'var foo = function foo(x) {\n return "foo " + x;\n};\n' +
'console.log(foo("bar"));\n\n' +
'//# sourceMappingURL=bundle.js.map';
transpilemap = sourceMap || getMap();
var result = Uglify.minify(transpiled, {
sourceMap: {
content: transpilemap
}
});
map = new SourceMapConsumer(result.map);
}
beforeEach(function () {
prepareMap();
});
it("Should copy over original sourcesContent", function() {
assert.equal(map.sourceContentFor("index.js"), transpilemap.sourcesContent[0]);
var orig = getMap();
var map = prepareMap(orig);
assert.equal(map.sourceContentFor("index.js"), orig.sourcesContent[0]);
});
it("Should copy sourcesContent if sources are relative", function () {
it("Should copy sourcesContent if sources are relative", function() {
var relativeMap = getMap();
relativeMap.sources = ['./index.js'];
prepareMap(relativeMap);
var map = prepareMap(relativeMap);
assert.notEqual(map.sourcesContent, null);
assert.equal(map.sourcesContent.length, 1);
assert.equal(map.sourceContentFor("index.js"), transpilemap.sourcesContent[0]);
assert.equal(map.sourceContentFor("index.js"), relativeMap.sourcesContent[0]);
});
it("Final sourcemap should not have invalid mappings from inputSourceMap (issue #882)", function() {
it("Should not have invalid mappings from inputSourceMap (issue #882)", function() {
var map = prepareMap(getMap());
// The original source has only 2 lines, check that mappings don't have more lines
var msg = "Mapping should not have higher line number than the original file had";
map.eachMapping(function(mapping) {
assert.ok(mapping.originalLine <= 2, msg)
assert.ok(mapping.originalLine <= 2, msg);
});
map.allGeneratedPositionsFor({source: "index.js", line: 1, column: 1}).forEach(function(pos) {
map.allGeneratedPositionsFor({
source: "index.js",
line: 1,
column: 1
}).forEach(function(pos) {
assert.ok(pos.line <= 2, msg);
})
});
});
});

View File

@@ -1,7 +1,7 @@
var assert = require("assert");
var uglify = require("../../");
describe("New", function() {
describe("parentheses", function() {
it("Should add trailing parentheses for new expressions with zero arguments in beautify mode", function() {
var tests = [
"new x(1);",
@@ -83,4 +83,23 @@ describe("New", function() {
);
}
});
});
it("Should compress leading parenthesis with reasonable performance", function() {
this.timeout(30000);
var code = [
"({}?0:1)&&x();",
"(function(){}).name;",
];
for (var i = 16; --i >= 0;) {
[].push.apply(code, code);
}
code = code.join("");
var result = uglify.minify(code, {
compress: false,
mangle: false,
});
if (result.error) throw result.error;
// Dismal performance for `assert.strictEqual()` in Node.js 6
assert.ok(result.code === code);
});
});

View File

@@ -70,6 +70,7 @@ describe("sourcemaps", function() {
compress: { toplevel: true },
sourceMap: {
content: "inline",
includeSources: true,
url: "inline"
}
}).code + "\n";
@@ -90,24 +91,60 @@ describe("sourcemaps", function() {
});
assert.strictEqual(result.code, "var bar=function(bar){return bar};");
assert.strictEqual(warnings.length, 1);
assert.strictEqual(warnings[0], "inline source map not found");
assert.strictEqual(warnings[0], "inline source map not found: 0");
} finally {
Uglify.AST_Node.warn_function = warn_function;
}
});
it("Should fail with multiple input and inline source map", function() {
var result = Uglify.minify([
read("./test/input/issue-520/input.js"),
read("./test/input/issue-520/output.js")
], {
it("Should handle multiple input and inline source map", function() {
var warn_function = Uglify.AST_Node.warn_function;
var warnings = [];
Uglify.AST_Node.warn_function = function(txt) {
warnings.push(txt);
};
try {
var result = Uglify.minify([
read("./test/input/issue-520/input.js"),
read("./test/input/issue-1323/sample.js"),
], {
sourceMap: {
content: "inline",
url: "inline",
}
});
if (result.error) throw result.error;
assert.strictEqual(result.code, [
"var Foo=function(){console.log(3)};new Foo;var bar=function(o){return o};",
"//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJzb3VyY2VzIjpbInN0ZGluIiwiMSJdLCJuYW1lcyI6WyJGb28iLCJjb25zb2xlIiwibG9nIiwiYmFyIl0sIm1hcHBpbmdzIjoiQUFBQSxJQUFNQSxJQUFJLFdBQWdCQyxRQUFRQyxJQUFJLElBQVMsSUFBSUYsSUNBbkQsSUFBSUcsSUFDQSxTQUFjQSxHQUNWLE9BQU9BIn0=",
].join("\n"));
assert.strictEqual(warnings.length, 1);
assert.strictEqual(warnings[0], "inline source map not found: 1");
} finally {
Uglify.AST_Node.warn_function = warn_function;
}
});
it("Should drop source contents for includeSources=false", function() {
var result = Uglify.minify(read("./test/input/issue-520/input.js"), {
compress: false,
mangle: false,
sourceMap: {
content: "inline",
url: "inline"
}
includeSources: true,
},
});
var err = result.error;
assert.ok(err instanceof Error);
assert.strictEqual(err.stack.split(/\n/)[0], "Error: inline source map only works with singular input");
if (result.error) throw result.error;
var map = JSON.parse(result.map);
assert.strictEqual(map.sourcesContent.length, 1);
result = Uglify.minify(result.code, {
compress: false,
mangle: false,
sourceMap: {
content: result.map,
},
});
if (result.error) throw result.error;
map = JSON.parse(result.map);
assert.ok(!("sourcesContent" in map));
});
});

View File

@@ -40,7 +40,7 @@ function safe_log(arg, level) {
}
function strip_func_ids(text) {
return text.toString().replace(/F[0-9]{6}N/g, "<F<>N>");
return ("" + text).replace(/F[0-9]{6}N/g, "<F<>N>");
}
var context;