Compare commits

...

7 Commits

Author SHA1 Message Date
Alex Lam S.L
014f428153 v3.0.1 2017-05-08 07:05:57 +08:00
Alex Lam S.L
a3b2eb75bd return Error from minify() (#1880)
Have `minify()` return `Error` in `result.error` rather than throwing it.
2017-05-08 07:05:19 +08:00
Alex Lam S.L
da295de82b support dumping AST (#1879)
Re-order `AST_Binary` properties to make dump more readable.

closes #769
2017-05-08 06:23:01 +08:00
Alex Lam S.L
4f8ca4626e deprecate low level API (#1877)
fixes #1872
2017-05-08 03:24:42 +08:00
Alex Lam S.L
e54748365c support minify() output as AST (#1878)
- `options.output.ast` (default `false`)
- `options.output.code` (default `true`)
2017-05-08 02:11:45 +08:00
Alex Lam S.L
2d99d06601 update documentation
Remove deprecated CLI option
2017-05-07 03:02:46 +08:00
Alex Lam S.L
98cf95e5b5 fix test for #1865 (#1873) 2017-05-07 02:56:02 +08:00
28 changed files with 178 additions and 124 deletions

View File

@@ -42,8 +42,7 @@ in sequence and apply any compression options. The files are parsed in the
same global scope, that is, a reference from a file to some
variable/function declared in another file will be matched properly.
If you want to read from STDIN instead, pass a single dash instead of input
files.
If no input file is specified, UglifyJS will read from STDIN.
If you wish to pass your options before the input files, separate the two with
a double dash to prevent input files being used as option arguments:
@@ -96,8 +95,9 @@ The available options are:
`wrap_iife` Wrap IIFEs in parenthesis. Note: you may
want to disable `negate_iife` under
compressor options.
-o, --output <file> Output file (default STDOUT). Specify "spidermonkey"
to dump SpiderMonkey AST format (as JSON) to STDOUT.
-o, --output <file> Output file path (default STDOUT). Specify `ast` or
`spidermonkey` to write UglifyJS or SpiderMonkey AST
as JSON to STDOUT respectively.
--comments [filter] Preserve copyright comments in the output. By
default this works like Google Closure, keeping
JSDoc-style comments that contain "@license" or

View File

@@ -15,6 +15,7 @@ var path = require("path");
var program = require("commander");
var UglifyJS = require("../tools/node");
var skip_keys = [ "cname", "enclosed", "parent_scope", "scope", "thedef", "uses_eval", "uses_with" ];
var files = {};
var options = {
compress: false,
@@ -89,7 +90,7 @@ if (program.mangleProps) {
if (typeof program.mangleProps != "object") program.mangleProps = {};
if (!Array.isArray(program.mangleProps.reserved)) program.mangleProps.reserved = [];
require("../tools/domprops").forEach(function(name) {
UglifyJS.push_uniq(program.mangleProps.reserved, name);
UglifyJS._push_uniq(program.mangleProps.reserved, name);
});
}
if (typeof options.mangle != "object") options.mangle = {};
@@ -107,6 +108,12 @@ if (program.nameCache) {
}
}
}
if (program.output == "ast") {
options.output = {
ast: true,
code: false
};
}
if (program.parse) {
if (program.parse.acorn || program.parse.spidermonkey) {
if (program.sourceMap) fatal("ERROR: inline source map only works with built-in parser");
@@ -165,7 +172,7 @@ function run() {
UglifyJS.AST_Node.warn_function = function(msg) {
console.error("WARN:", msg);
};
if (program.stats) program.stats = Date.now();
if (program.stats) program.stats = Date.now();
try {
if (program.parse) {
if (program.parse.acorn) {
@@ -185,9 +192,13 @@ function run() {
});
}
}
var result = UglifyJS.minify(files, options);
} catch (ex) {
if (ex instanceof UglifyJS.JS_Parse_Error) {
fatal("ERROR: " + ex.message);
}
var result = UglifyJS.minify(files, options);
if (result.error) {
var ex = result.error;
if (ex.name == "SyntaxError") {
console.error("Parse error at " + ex.filename + ":" + ex.line + "," + ex.col);
var col = ex.col;
var lines = files[ex.filename].split(/\r?\n/);
@@ -210,9 +221,31 @@ function run() {
console.error(ex.defs);
}
fatal("ERROR: " + ex.message);
}
if (program.output == "spidermonkey") {
console.log(JSON.stringify(UglifyJS.parse(result.code).to_mozilla_ast(), null, 2));
} else if (program.output == "ast") {
console.log(JSON.stringify(result.ast, function(key, value) {
if (skip_key(key)) return;
if (value instanceof UglifyJS.AST_Token) return;
if (value instanceof UglifyJS.Dictionary) return;
if (value instanceof UglifyJS.AST_Node) {
var result = {
_class: "AST_" + value.TYPE
};
value.CTOR.PROPS.forEach(function(prop) {
result[prop] = value[prop];
});
return result;
}
return value;
}, 2));
} else if (program.output == "spidermonkey") {
console.log(JSON.stringify(UglifyJS.minify(result.code, {
compress: false,
mangle: false,
output: {
ast: true,
code: false
}
}).ast.to_mozilla_ast(), null, 2));
} else if (program.output) {
fs.writeFileSync(program.output, result.code);
if (result.map) {
@@ -230,8 +263,8 @@ function run() {
}
function fatal(message) {
console.error(message);
process.exit(1);
console.error(message);
process.exit(1);
}
// A file glob function that only supports "*" and "?" wildcards in the basename.
@@ -278,9 +311,17 @@ function parse_js(flag, constants) {
return function(value, options) {
options = options || {};
try {
UglifyJS.parse(value, {
expression: true
}).walk(new UglifyJS.TreeWalker(function(node) {
UglifyJS.minify(value, {
parse: {
expression: true
},
compress: false,
mangle: false,
output: {
ast: true,
code: false
}
}).ast.walk(new UglifyJS.TreeWalker(function(node) {
if (node instanceof UglifyJS.AST_Assign) {
var name = node.left.print_to_string();
var value = node.right;
@@ -337,3 +378,7 @@ function to_cache(key) {
}
return cache[key];
}
function skip_key(key) {
return skip_keys.indexOf(key) >= 0;
}

View File

@@ -619,7 +619,7 @@ var AST_UnaryPostfix = DEFNODE("UnaryPostfix", null, {
$documentation: "Unary postfix expression, i.e. `i++`"
}, AST_Unary);
var AST_Binary = DEFNODE("Binary", "left operator right", {
var AST_Binary = DEFNODE("Binary", "operator left right", {
$documentation: "Binary expression, i.e. `a + b`",
$propdoc: {
left: "[AST_Node] left-hand side expression",

View File

@@ -108,38 +108,46 @@ function minify(files, options) {
toplevel = mangle_properties(toplevel, options.mangle.properties);
}
}
if (options.sourceMap) {
if (typeof options.sourceMap.content == "string") {
options.sourceMap.content = JSON.parse(options.sourceMap.content);
}
options.output.source_map = SourceMap({
file: options.sourceMap.filename,
orig: options.sourceMap.content,
root: options.sourceMap.root
});
if (options.sourceMap.includeSources) {
for (var name in files) {
options.output.source_map.get().setSourceContent(name, files[name]);
var result = {};
if (options.output.ast) {
result.ast = toplevel;
}
if (!HOP(options.output, "code") || options.output.code) {
if (options.sourceMap) {
if (typeof options.sourceMap.content == "string") {
options.sourceMap.content = JSON.parse(options.sourceMap.content);
}
options.output.source_map = SourceMap({
file: options.sourceMap.filename,
orig: options.sourceMap.content,
root: options.sourceMap.root
});
if (options.sourceMap.includeSources) {
for (var name in files) {
options.output.source_map.get().setSourceContent(name, files[name]);
}
}
}
}
var stream = OutputStream(options.output);
toplevel.print(stream);
var result = {
code: stream.get()
};
if (options.sourceMap) {
result.map = options.output.source_map.toString();
if (options.sourceMap.url == "inline") {
result.code += "\n//# sourceMappingURL=data:application/json;charset=utf-8;base64," + to_base64(result.map);
} else if (options.sourceMap.url) {
result.code += "\n//# sourceMappingURL=" + options.sourceMap.url;
delete options.output.ast;
delete options.output.code;
var stream = OutputStream(options.output);
toplevel.print(stream);
result.code = stream.get();
if (options.sourceMap) {
result.map = options.output.source_map.toString();
if (options.sourceMap.url == "inline") {
result.code += "\n//# sourceMappingURL=data:application/json;charset=utf-8;base64," + to_base64(result.map);
} else if (options.sourceMap.url) {
result.code += "\n//# sourceMappingURL=" + options.sourceMap.url;
}
}
}
if (warnings.length) {
result.warnings = warnings;
}
return result;
} catch (ex) {
return { error: ex };
} finally {
AST_Node.warn_function = warn_function;
}

View File

@@ -4,7 +4,7 @@
"homepage": "http://lisperator.net/uglifyjs",
"author": "Mihai Bazon <mihai.bazon@gmail.com> (http://lisperator.net/)",
"license": "BSD-2-Clause",
"version": "3.0.0",
"version": "3.0.1",
"engines": {
"node": ">=0.8.0"
},

View File

@@ -2440,8 +2440,8 @@ issue_1865: {
unsafe: true,
}
input: {
function f(a) {
a.b = false;
function f(some) {
some.thing = false;
}
console.log(function() {
var some = { thing: true };
@@ -2450,8 +2450,8 @@ issue_1865: {
}());
}
expect: {
function f(a) {
a.b = false;
function f(some) {
some.thing = false;
}
console.log(function() {
var some = { thing: true };

13
test/exports.js Normal file
View File

@@ -0,0 +1,13 @@
exports["Compressor"] = Compressor;
exports["JS_Parse_Error"] = JS_Parse_Error;
exports["OutputStream"] = OutputStream;
exports["SourceMap"] = SourceMap;
exports["TreeWalker"] = TreeWalker;
exports["base54"] = base54;
exports["defaults"] = defaults;
exports["mangle_properties"] = mangle_properties;
exports["minify"] = minify;
exports["parse"] = parse;
exports["string_template"] = string_template;
exports["tokenizer"] = tokenizer;
exports["is_identifier"] = is_identifier;

View File

@@ -1,4 +1,4 @@
var UglifyJS = require('../../');
var UglifyJS = require("../node");
var assert = require("assert");
describe("Accessor tokens", function() {

View File

@@ -1,4 +1,4 @@
var UglifyJS = require('../../');
var UglifyJS = require("../node");
var assert = require("assert");
describe("arguments", function() {

View File

@@ -19,7 +19,7 @@ describe("bin/uglifyjs", function () {
eval(stdout);
assert.strictEqual(typeof WrappedUglifyJS, 'object');
assert.strictEqual(true, WrappedUglifyJS.parse('foo;') instanceof WrappedUglifyJS.AST_Node);
assert.strictEqual(WrappedUglifyJS.minify("foo([true,,2+3]);").code, "foo([!0,,5]);");
done();
});
@@ -509,4 +509,15 @@ describe("bin/uglifyjs", function () {
return JSON.stringify(map).replace(/"/g, '\\"');
}
});
it("Should dump AST as JSON", function(done) {
var command = uglifyjscmd + " test/input/global_defs/simple.js -mco ast";
exec(command, function (err, stdout) {
if (err) throw err;
var ast = JSON.parse(stdout);
assert.strictEqual(ast._class, "AST_Toplevel");
assert.ok(Array.isArray(ast.body));
done();
});
});
});

View File

@@ -1,4 +1,4 @@
var UglifyJS = require('../../');
var UglifyJS = require("../node");
var assert = require("assert");
describe("comment filters", function() {

View File

@@ -1,5 +1,5 @@
var assert = require("assert");
var uglify = require("../../");
var uglify = require("../node");
describe("Comment", function() {
it("Should recognize eol of single line comments", function() {

View File

@@ -1,5 +1,5 @@
var assert = require("assert");
var uglify = require("../../");
var uglify = require("../node");
describe("Directives", function() {
it ("Should allow tokenizer to store directives state", function() {

View File

@@ -1,4 +1,4 @@
var UglifyJS = require('../../');
var UglifyJS = require("../node");
var assert = require("assert");
describe("Getters and setters", function() {

View File

@@ -1,4 +1,4 @@
var Uglify = require('../../');
var Uglify = require("../node");
var assert = require("assert");
describe("line-endings", function() {

View File

@@ -114,17 +114,18 @@ describe("minify", function() {
}
});
it("Should fail with multiple input and inline source map", function() {
assert.throws(function() {
Uglify.minify([
read("./test/input/issue-520/input.js"),
read("./test/input/issue-520/output.js")
], {
sourceMap: {
content: "inline",
url: "inline"
}
});
var result = Uglify.minify([
read("./test/input/issue-520/input.js"),
read("./test/input/issue-520/output.js")
], {
sourceMap: {
content: "inline",
url: "inline"
}
});
var err = result.error;
assert.ok(err instanceof Error);
assert.strictEqual(err.stack.split(/\n/)[0], "Error: inline source map only works with singular input");
});
});
@@ -170,26 +171,14 @@ describe("minify", function() {
});
describe("JS_Parse_Error", function() {
it("should throw syntax error", function() {
assert.throws(function() {
Uglify.minify("function f(a{}");
}, function(err) {
assert.ok(err instanceof Error);
assert.strictEqual(err.stack.split(/\n/)[0], "SyntaxError: Unexpected token punc «{», expected punc «,»");
assert.strictEqual(err.filename, "0");
assert.strictEqual(err.line, 1);
assert.strictEqual(err.col, 12);
return true;
});
it("should return syntax error", function() {
var result = Uglify.minify("function f(a{}");
var err = result.error;
assert.ok(err instanceof Error);
assert.strictEqual(err.stack.split(/\n/)[0], "SyntaxError: Unexpected token punc «{», expected punc «,»");
assert.strictEqual(err.filename, "0");
assert.strictEqual(err.line, 1);
assert.strictEqual(err.col, 12);
});
});
describe("Compressor", function() {
it("should be backward compatible with ast.transform(compressor)", function() {
var ast = Uglify.parse("function f(a){for(var i=0;i<a;i++)console.log(i)}");
ast.figure_out_scope();
ast = ast.transform(Uglify.Compressor());
assert.strictEqual(ast.print_to_string(), "function f(a){for(var i=0;i<a;i++)console.log(i)}");
});
})
});

View File

@@ -1,5 +1,5 @@
var assert = require("assert");
var uglify = require("../../");
var uglify = require("../node");
describe("Number literals", function () {
it("Should not allow legacy octal literals in strict mode", function() {

View File

@@ -1,4 +1,4 @@
var UglifyJS = require("../../");
var UglifyJS = require("../node");
var assert = require("assert");
describe("operator", function() {

View File

@@ -1,6 +1,6 @@
var assert = require("assert");
var exec = require("child_process").exec;
var uglify = require("../../");
var uglify = require("../node");
describe("spidermonkey export/import sanity test", function() {
it("should produce a functional build when using --self with spidermonkey", function(done) {
@@ -15,18 +15,7 @@ describe("spidermonkey export/import sanity test", function() {
eval(stdout);
assert.strictEqual(typeof SpiderUglify, "object");
var ast = SpiderUglify.parse("foo([true,,2+3]);");
assert.strictEqual(true, ast instanceof SpiderUglify.AST_Node);
ast.figure_out_scope();
ast = SpiderUglify.Compressor({}).compress(ast);
assert.strictEqual(true, ast instanceof SpiderUglify.AST_Node);
var stream = SpiderUglify.OutputStream({});
ast.print(stream);
var code = stream.toString();
assert.strictEqual(code, "foo([!0,,5]);");
assert.strictEqual(SpiderUglify.minify("foo([true,,2+3]);").code, "foo([!0,,5]);");
done();
});

View File

@@ -1,4 +1,4 @@
var UglifyJS = require('../../');
var UglifyJS = require("../node");
var assert = require("assert");
describe("String literals", function() {

View File

@@ -1,5 +1,5 @@
var assert = require("assert");
var uglify = require("../../");
var uglify = require("../node");
describe("With", function() {
it("Should throw syntaxError when using with statement in strict mode", function() {

View File

@@ -1,7 +1,7 @@
// Testing UglifyJS <-> SpiderMonkey AST conversion
// through generative testing.
var UglifyJS = require(".."),
var UglifyJS = require("./node"),
escodegen = require("escodegen"),
esfuzz = require("esfuzz"),
estraverse = require("estraverse"),

6
test/node.js Normal file
View File

@@ -0,0 +1,6 @@
var fs = require("fs");
new Function("MOZ_SourceMap", "exports", require("../tools/node").FILES.map(function(file) {
if (/exports\.js$/.test(file)) file = require.resolve("./exports");
return fs.readFileSync(file, "utf8");
}).join("\n\n"))(require("source-map"), exports);

View File

@@ -1,6 +1,6 @@
#! /usr/bin/env node
var U = require("../tools/node");
var U = require("./node");
var path = require("path");
var fs = require("fs");
var assert = require("assert");

View File

@@ -1,4 +1,4 @@
var UglifyJS = require("..");
var UglifyJS = require("./node");
var ok = require("assert");
module.exports = function () {

View File

@@ -12,7 +12,7 @@
stream._handle.setBlocking(true);
});
var UglifyJS = require("..");
var UglifyJS = require("./node");
var randomBytes = require("crypto").randomBytes;
var sandbox = require("./sandbox");

View File

@@ -1,15 +1,4 @@
exports["Compressor"] = Compressor;
exports["Dictionary"] = Dictionary;
exports["JS_Parse_Error"] = JS_Parse_Error;
exports["OutputStream"] = OutputStream;
exports["SourceMap"] = SourceMap;
exports["TreeWalker"] = TreeWalker;
exports["base54"] = base54;
exports["defaults"] = defaults;
exports["mangle_properties"] = mangle_properties;
exports["minify"] = minify;
exports["parse"] = parse;
exports["push_uniq"] = push_uniq;
exports["string_template"] = string_template;
exports["tokenizer"] = tokenizer;
exports["is_identifier"] = is_identifier;
exports["_push_uniq"] = push_uniq;

View File

@@ -18,15 +18,19 @@ var FILES = UglifyJS.FILES = [
return require.resolve(file);
});
new Function("MOZ_SourceMap", "exports", FILES.map(function(file){
return fs.readFileSync(file, "utf8");
}).join("\n\n"))(
new Function("MOZ_SourceMap", "exports", function() {
var code = FILES.map(function(file) {
return fs.readFileSync(file, "utf8");
});
code.push("exports.describe_ast = " + describe_ast.toString());
return code.join("\n\n");
}())(
require("source-map"),
UglifyJS
);
UglifyJS.describe_ast = function() {
var out = UglifyJS.OutputStream({ beautify: true });
function describe_ast() {
var out = OutputStream({ beautify: true });
function doitem(ctor) {
out.print("AST_" + ctor.TYPE);
var props = ctor.SELF_PROPS.filter(function(prop){
@@ -56,6 +60,6 @@ UglifyJS.describe_ast = function() {
});
}
};
doitem(UglifyJS.AST_Node);
doitem(AST_Node);
return out + "";
};
}