Compare commits
221 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5538ec7bd8 | ||
|
|
f101d6429b | ||
|
|
f36a1eaa8b | ||
|
|
a64bdda9ae | ||
|
|
01d19b4b52 | ||
|
|
f0c1a01bc2 | ||
|
|
7be680d3f8 | ||
|
|
57dab1e1db | ||
|
|
21b3c890a1 | ||
|
|
fb0ec720a4 | ||
|
|
7971ed33d1 | ||
|
|
885835a655 | ||
|
|
4c64554808 | ||
|
|
548beeb6b1 | ||
|
|
e3066f9577 | ||
|
|
e391367488 | ||
|
|
18ddf2f7b5 | ||
|
|
f8ee5a0785 | ||
|
|
b467a3c877 | ||
|
|
f2d48e9019 | ||
|
|
5e314bf3e9 | ||
|
|
05ba26c7c8 | ||
|
|
87b72364a4 | ||
|
|
0e3ff1f970 | ||
|
|
ec3e74d7f4 | ||
|
|
62bda71c85 | ||
|
|
83e0939088 | ||
|
|
9798d96e37 | ||
|
|
ac2caf1088 | ||
|
|
8511e80f48 | ||
|
|
91bc3f1f92 | ||
|
|
8463b48f90 | ||
|
|
e3342a3cf6 | ||
|
|
7bf59b5bcd | ||
|
|
025f3e9596 | ||
|
|
8258edd8a5 | ||
|
|
8669ca219b | ||
|
|
71652690b6 | ||
|
|
37693d2812 | ||
|
|
8fbe200012 | ||
|
|
1a34a13e33 | ||
|
|
ef772b0049 | ||
|
|
6fcabbde08 | ||
|
|
14f290f8ab | ||
|
|
e2e09d5754 | ||
|
|
448a8d3845 | ||
|
|
514936beb8 | ||
|
|
f5c09d0bbf | ||
|
|
014f655c5f | ||
|
|
bf30dc3038 | ||
|
|
ef2ef07cbd | ||
|
|
1a4440080d | ||
|
|
ac0086a745 | ||
|
|
2494daaf68 | ||
|
|
9b404f9de6 | ||
|
|
5344b7dab8 | ||
|
|
0007a53b9c | ||
|
|
1dd05f44eb | ||
|
|
bf7b122ab2 | ||
|
|
e29048b54a | ||
|
|
2eeb640eca | ||
|
|
ceb200fe81 | ||
|
|
f5f8239057 | ||
|
|
6f9d051784 | ||
|
|
931862e97f | ||
|
|
1d0127de21 | ||
|
|
2d8fc61677 | ||
|
|
1e31011874 | ||
|
|
75cdbf19aa | ||
|
|
4339bd5cfa | ||
|
|
1ab2fdaa10 | ||
|
|
eda540f6ec | ||
|
|
90a330da16 | ||
|
|
cad1f9cbd1 | ||
|
|
f6203bd5a8 | ||
|
|
03cf94ebe8 | ||
|
|
c3087dd179 | ||
|
|
2c305af478 | ||
|
|
72e6f64ca8 | ||
|
|
b9fac687ff | ||
|
|
2c88eb6fbe | ||
|
|
a67e3bfdd3 | ||
|
|
27142df4f5 | ||
|
|
5e4c7f4245 | ||
|
|
b521b4b926 | ||
|
|
aa9de76370 | ||
|
|
5a083a938d | ||
|
|
7a30d826b8 | ||
|
|
be55a09edf | ||
|
|
15a148ff6d | ||
|
|
428e19fed2 | ||
|
|
f65e55dff4 | ||
|
|
b634018618 | ||
|
|
fa3300f314 | ||
|
|
bd0886a2c0 | ||
|
|
248f304f02 | ||
|
|
dc5f70eab5 | ||
|
|
df8c5623af | ||
|
|
a790c09c91 | ||
|
|
8f35a363d9 | ||
|
|
d2190c2bf3 | ||
|
|
ea10642572 | ||
|
|
547561a568 | ||
|
|
c16d538ce7 | ||
|
|
73d082df2e | ||
|
|
50b8d7272c | ||
|
|
7d11b96f48 | ||
|
|
eab99a1c3d | ||
|
|
19e2fb134d | ||
|
|
f4919e3a25 | ||
|
|
bb700daa4c | ||
|
|
263577d5eb | ||
|
|
63287c0e68 | ||
|
|
c5ed2292bf | ||
|
|
b70670b69f | ||
|
|
9dd97605bc | ||
|
|
e4c5302406 | ||
|
|
bea3d90771 | ||
|
|
785c6064cc | ||
|
|
b214d3786f | ||
|
|
7cf79c302b | ||
|
|
a14c6b6574 | ||
|
|
f1b7094a57 | ||
|
|
0358e376f0 | ||
|
|
b47f7b76b9 | ||
|
|
582cc55cff | ||
|
|
8979579e55 | ||
|
|
0d6e08c541 | ||
|
|
e2daee9a65 | ||
|
|
9cd118ca3d | ||
|
|
cfd5c6155c | ||
|
|
1a5a4bd631 | ||
|
|
63e1a8e1fd | ||
|
|
7055af8221 | ||
|
|
aafe2e1db3 | ||
|
|
118105db43 | ||
|
|
63d04fff69 | ||
|
|
8c9cc920fb | ||
|
|
d09f0adae3 | ||
|
|
3fa9265ce4 | ||
|
|
3a81f60982 | ||
|
|
f2348dd98b | ||
|
|
253c7c2325 | ||
|
|
bb0a762d12 | ||
|
|
8cc86fee60 | ||
|
|
88fb83aa81 | ||
|
|
95b4507c02 | ||
|
|
afdaeba37d | ||
|
|
037199bfe2 | ||
|
|
583fac0a0f | ||
|
|
e8158279ff | ||
|
|
78e98d2611 | ||
|
|
8d14efe818 | ||
|
|
83ba338bd0 | ||
|
|
7c10b25346 | ||
|
|
cb9d16fbe4 | ||
|
|
5d8da864c5 | ||
|
|
85b527ba3d | ||
|
|
1c6efdae34 | ||
|
|
b0ca896d98 | ||
|
|
78a217b94c | ||
|
|
a89d233318 | ||
|
|
c28e1a0237 | ||
|
|
1a95007ec1 | ||
|
|
ed80b4a534 | ||
|
|
4f09df238e | ||
|
|
d9ad3c7cbf | ||
|
|
6ea3f7fe34 | ||
|
|
4c4dc2137c | ||
|
|
4aa4b3e694 | ||
|
|
2604aadb37 | ||
|
|
964d5b9aa4 | ||
|
|
b7adbcab1f | ||
|
|
3435af494f | ||
|
|
41c627379c | ||
|
|
e54df2226f | ||
|
|
dfa395f6ff | ||
|
|
b1febde3e9 | ||
|
|
193049af19 | ||
|
|
4a0bab0fa3 | ||
|
|
9243b0cb9d | ||
|
|
fc9ba323c4 | ||
|
|
d0689c81bb | ||
|
|
02a84385a0 | ||
|
|
a4889a0f2e | ||
|
|
f29f07aabd | ||
|
|
188e28efd7 | ||
|
|
2df48924cc | ||
|
|
9fc6796d2a | ||
|
|
9fc8a52142 | ||
|
|
3a21861580 | ||
|
|
1dbffd48ea | ||
|
|
22a038e6a2 | ||
|
|
f652372c9a | ||
|
|
ad1fc3b71a | ||
|
|
2b40a5ac62 | ||
|
|
ca3388cf5a | ||
|
|
caa8896a8a | ||
|
|
d13aa3954d | ||
|
|
f64539fb76 | ||
|
|
d56ebd7d7b | ||
|
|
3edfe7d0ee | ||
|
|
7f77edadb3 | ||
|
|
a9511dfbe5 | ||
|
|
064e7aa1bb | ||
|
|
46814f88d9 | ||
|
|
4a19802d0c | ||
|
|
1e9f98aa51 | ||
|
|
11e24d53a1 | ||
|
|
0f509f8336 | ||
|
|
a6ed2c84ac | ||
|
|
a1958aad56 | ||
|
|
672699613e | ||
|
|
645d5bdbc5 | ||
|
|
9af2bbffde | ||
|
|
fcd544cc10 | ||
|
|
1e3bc0caa0 | ||
|
|
8227e8795b | ||
|
|
790b3bcdc6 | ||
|
|
d6e6458f68 | ||
|
|
a54b6703c0 |
6
.travis.yml
Normal file
6
.travis.yml
Normal file
@@ -0,0 +1,6 @@
|
||||
language: node_js
|
||||
before_install: "npm install -g npm"
|
||||
node_js:
|
||||
- "0.8"
|
||||
- "0.10"
|
||||
- "0.11"
|
||||
169
README.md
169
README.md
@@ -1,5 +1,6 @@
|
||||
UglifyJS 2
|
||||
==========
|
||||
[](https://travis-ci.org/mishoo/UglifyJS2)
|
||||
|
||||
UglifyJS is a JavaScript parser, minifier, compressor or beautifier toolkit.
|
||||
|
||||
@@ -45,21 +46,31 @@ files.
|
||||
|
||||
The available options are:
|
||||
|
||||
```
|
||||
--source-map Specify an output file where to generate source map.
|
||||
[string]
|
||||
--source-map-root The path to the original source to be included in the
|
||||
source map. [string]
|
||||
--source-map-url The path to the source map to be added in //@
|
||||
--source-map-url The path to the source map to be added in //#
|
||||
sourceMappingURL. Defaults to the value passed with
|
||||
--source-map. [string]
|
||||
--source-map-include-sources
|
||||
Pass this flag if you want to include the content of
|
||||
source files in the source map as sourcesContent
|
||||
property. [boolean]
|
||||
--in-source-map Input source map, useful if you're compressing JS that was
|
||||
generated from some other original code.
|
||||
--screw-ie8 Pass this flag if you don't care about full compliance with
|
||||
Internet Explorer 6-8 quirks (by default UglifyJS will try
|
||||
to be IE-proof).
|
||||
--screw-ie8 Pass this flag if you don't care about full compliance
|
||||
with Internet Explorer 6-8 quirks (by default UglifyJS
|
||||
will try to be IE-proof). [boolean]
|
||||
--expr Parse a single expression, rather than a program (for
|
||||
parsing JSON) [boolean]
|
||||
-p, --prefix Skip prefix for original filenames that appear in source
|
||||
maps. For example -p 3 will drop 3 directories from file
|
||||
names and ensure they are relative paths.
|
||||
names and ensure they are relative paths. You can also
|
||||
specify -p relative, which will make UglifyJS figure out
|
||||
itself the relative paths between original sources, the
|
||||
source map and the output file. [string]
|
||||
-o, --output Output file (default STDOUT).
|
||||
-b, --beautify Beautify output/specify output options. [string]
|
||||
-m, --mangle Mangle names/pass mangler options. [string]
|
||||
@@ -68,6 +79,8 @@ The available options are:
|
||||
like -c hoist_vars=false,if_return=false. Use -c with no
|
||||
argument to use the default compression options. [string]
|
||||
-d, --define Global definitions [string]
|
||||
-e, --enclose Embed everything in a big function, with a configurable
|
||||
parameter/argument list. [string]
|
||||
--comments Preserve copyright comments in the output. By default this
|
||||
works like Google Closure, keeping JSDoc-style comments
|
||||
that contain "@license" or "@preserve". You can optionally
|
||||
@@ -78,9 +91,13 @@ The available options are:
|
||||
Note that currently not *all* comments can be kept when
|
||||
compression is on, because of dead code removal or
|
||||
cascading statements into sequences. [string]
|
||||
--preamble Preamble to prepend to the output. You can use this to
|
||||
insert a comment, for example for licensing information.
|
||||
This will not be parsed, but the source map will adjust
|
||||
for its presence.
|
||||
--stats Display operations run time on STDERR. [boolean]
|
||||
--acorn Use Acorn for parsing. [boolean]
|
||||
--spidermonkey Assume input fles are SpiderMonkey AST format (as JSON).
|
||||
--spidermonkey Assume input files are SpiderMonkey AST format (as JSON).
|
||||
[boolean]
|
||||
--self Build itself (UglifyJS2) as a library (implies
|
||||
--wrap=UglifyJS --export-all) [boolean]
|
||||
@@ -94,6 +111,7 @@ The available options are:
|
||||
--lint Display some scope warnings [boolean]
|
||||
-v, --verbose Verbose [boolean]
|
||||
-V, --version Print version number and exit. [boolean]
|
||||
```
|
||||
|
||||
Specify `--output` (`-o`) to declare the output file. Otherwise the output
|
||||
goes to STDOUT.
|
||||
@@ -155,7 +173,7 @@ To enable the mangler you need to pass `--mangle` (`-m`). The following
|
||||
- `toplevel` — mangle names declared in the toplevel scope (disabled by
|
||||
default).
|
||||
|
||||
- `eval` — mangle names visible in scopes where `eval` or `when` are used
|
||||
- `eval` — mangle names visible in scopes where `eval` or `with` are used
|
||||
(disabled by default).
|
||||
|
||||
When mangling is enabled but you want to prevent certain names from being
|
||||
@@ -174,32 +192,74 @@ you can pass a comma-separated list of options. Options are in the form
|
||||
to set `true`; it's effectively a shortcut for `foo=true`).
|
||||
|
||||
- `sequences` -- join consecutive simple statements using the comma operator
|
||||
|
||||
- `properties` -- rewrite property access using the dot notation, for
|
||||
example `foo["bar"] → foo.bar`
|
||||
|
||||
- `dead_code` -- remove unreachable code
|
||||
|
||||
- `drop_debugger` -- remove `debugger;` statements
|
||||
|
||||
- `unsafe` (default: false) -- apply "unsafe" transformations (discussion below)
|
||||
|
||||
- `conditionals` -- apply optimizations for `if`-s and conditional
|
||||
expressions
|
||||
|
||||
- `comparisons` -- apply certain optimizations to binary nodes, for example:
|
||||
`!(a <= b) → a > b` (only when `unsafe`), attempts to negate binary nodes,
|
||||
e.g. `a = !b && !c && !d && !e → a=!(b||c||d||e)` etc.
|
||||
|
||||
- `evaluate` -- attempt to evaluate constant expressions
|
||||
|
||||
- `booleans` -- various optimizations for boolean context, for example `!!a
|
||||
? b : c → a ? b : c`
|
||||
|
||||
- `loops` -- optimizations for `do`, `while` and `for` loops when we can
|
||||
statically determine the condition
|
||||
|
||||
- `unused` -- drop unreferenced functions and variables
|
||||
|
||||
- `hoist_funs` -- hoist function declarations
|
||||
|
||||
- `hoist_vars` (default: false) -- hoist `var` declarations (this is `false`
|
||||
by default because it seems to increase the size of the output in general)
|
||||
|
||||
- `if_return` -- optimizations for if/return and if/continue
|
||||
|
||||
- `join_vars` -- join consecutive `var` statements
|
||||
|
||||
- `cascade` -- small optimization for sequences, transform `x, x` into `x`
|
||||
and `x = something(), x` into `x = something()`
|
||||
|
||||
- `warnings` -- display warnings when dropping unreachable code or unused
|
||||
declarations etc.
|
||||
|
||||
- `negate_iife` -- negate "Immediately-Called Function Expressions"
|
||||
where the return value is discarded, to avoid the parens that the
|
||||
code generator would insert.
|
||||
|
||||
- `pure_getters` -- the default is `false`. If you pass `true` for
|
||||
this, UglifyJS will assume that object property access
|
||||
(e.g. `foo.bar` or `foo["bar"]`) doesn't have any side effects.
|
||||
|
||||
- `pure_funcs` -- default `null`. You can pass an array of names and
|
||||
UglifyJS will assume that those functions do not produce side
|
||||
effects. DANGER: will not check if the name is redefined in scope.
|
||||
An example case here, for instance `var q = Math.floor(a/b)`. If
|
||||
variable `q` is not used elsewhere, UglifyJS will drop it, but will
|
||||
still keep the `Math.floor(a/b)`, not knowing what it does. You can
|
||||
pass `pure_funcs: [ 'Math.floor' ]` to let it know that this
|
||||
function won't produce any side effect, in which case the whole
|
||||
statement would get discarded. The current implementation adds some
|
||||
overhead (compression will be slower).
|
||||
|
||||
- `drop_console` -- default `false`. Pass `true` to discard calls to
|
||||
`console.*` functions.
|
||||
|
||||
- `keep_fargs` -- default `false`. Pass `true` to prevent the
|
||||
compressor from discarding unused function arguments. You need this
|
||||
for code which relies on `Function.length`.
|
||||
|
||||
### The `unsafe` option
|
||||
|
||||
It enables some transformations that *might* break code logic in certain
|
||||
@@ -212,7 +272,7 @@ when this flag is on:
|
||||
- `String(exp)` or `exp.toString()` → `"" + exp`
|
||||
- `new Object/RegExp/Function/Error/Array (...)` → we discard the `new`
|
||||
- `typeof foo == "undefined"` → `foo === void 0`
|
||||
- `void 0` → `"undefined"` (if there is a variable named "undefined" in
|
||||
- `void 0` → `undefined` (if there is a variable named "undefined" in
|
||||
scope; we do it because the variable name will be mangled, typically
|
||||
reduced to a single character).
|
||||
|
||||
@@ -222,10 +282,11 @@ You can use the `--define` (`-d`) switch in order to declare global
|
||||
variables that UglifyJS will assume to be constants (unless defined in
|
||||
scope). For example if you pass `--define DEBUG=false` then, coupled with
|
||||
dead code removal UglifyJS will discard the following from the output:
|
||||
|
||||
```javascript
|
||||
if (DEBUG) {
|
||||
console.log("debug stuff");
|
||||
}
|
||||
```
|
||||
|
||||
UglifyJS will warn about the condition being always false and about dropping
|
||||
unreachable code; for now there is no option to turn off only this specific
|
||||
@@ -234,10 +295,11 @@ warning, you can pass `warnings=false` to turn off *all* warnings.
|
||||
Another way of doing that is to declare your globals as constants in a
|
||||
separate file and include it into the build. For example you can have a
|
||||
`build/defines.js` file with the following:
|
||||
|
||||
```javascript
|
||||
const DEBUG = false;
|
||||
const PRODUCTION = true;
|
||||
// etc.
|
||||
```
|
||||
|
||||
and build your code like this:
|
||||
|
||||
@@ -274,9 +336,6 @@ can pass additional arguments that control the code output:
|
||||
It doesn't work very well currently, but it does make the code generated
|
||||
by UglifyJS more readable.
|
||||
- `max-line-len` (default 32000) -- maximum line length (for uglified code)
|
||||
- `ie-proof` (default `true`) -- generate “IE-proof” code (for now this
|
||||
means add brackets around the do/while in code like this: `if (foo) do
|
||||
something(); while (bar); else ...`.
|
||||
- `bracketize` (default `false`) -- always insert brackets in `if`, `for`,
|
||||
`do`, `while` or `with` statements, even if their body is a single
|
||||
statement.
|
||||
@@ -284,6 +343,10 @@ can pass additional arguments that control the code output:
|
||||
you pass `false` then whenever possible we will use a newline instead of a
|
||||
semicolon, leading to more readable output of uglified code (size before
|
||||
gzip could be smaller; size after gzip insignificantly larger).
|
||||
- `preamble` (default `null`) -- when passed it must be a string and
|
||||
it will be prepended to the output literally. The source map will
|
||||
adjust for this text. Can be used to insert a comment containing
|
||||
licensing information, for example.
|
||||
|
||||
### Keeping copyright notices or other comments
|
||||
|
||||
@@ -296,7 +359,7 @@ keep only comments that match this regexp. For example `--comments
|
||||
|
||||
Note, however, that there might be situations where comments are lost. For
|
||||
example:
|
||||
|
||||
```javascript
|
||||
function f() {
|
||||
/** @preserve Foo Bar */
|
||||
function g() {
|
||||
@@ -304,6 +367,7 @@ example:
|
||||
}
|
||||
return something();
|
||||
}
|
||||
```
|
||||
|
||||
Even though it has "@preserve", the comment will be lost because the inner
|
||||
function `g` (which is the AST node to which the comment is attached to) is
|
||||
@@ -340,13 +404,46 @@ Acorn is really fast (e.g. 250ms instead of 380ms on some 650K code), but
|
||||
converting the SpiderMonkey tree that Acorn produces takes another 150ms so
|
||||
in total it's a bit more than just using UglifyJS's own parser.
|
||||
|
||||
### Using UglifyJS to transform SpiderMonkey AST
|
||||
|
||||
Now you can use UglifyJS as any other intermediate tool for transforming
|
||||
JavaScript ASTs in SpiderMonkey format.
|
||||
|
||||
Example:
|
||||
|
||||
```javascript
|
||||
function uglify(ast, options, mangle) {
|
||||
// Conversion from SpiderMonkey AST to internal format
|
||||
var uAST = UglifyJS.AST_Node.from_mozilla_ast(ast);
|
||||
|
||||
// Compression
|
||||
uAST.figure_out_scope();
|
||||
uAST = uAST.transform(UglifyJS.Compressor(options));
|
||||
|
||||
// Mangling (optional)
|
||||
if (mangle) {
|
||||
uAST.figure_out_scope();
|
||||
uAST.compute_char_frequency();
|
||||
uAST.mangle_names();
|
||||
}
|
||||
|
||||
// Back-conversion to SpiderMonkey AST
|
||||
return uAST.to_mozilla_ast();
|
||||
}
|
||||
```
|
||||
|
||||
Check out
|
||||
[original blog post](http://rreverser.com/using-mozilla-ast-with-uglifyjs/)
|
||||
for details.
|
||||
|
||||
API Reference
|
||||
-------------
|
||||
|
||||
Assuming installation via NPM, you can load UglifyJS in your application
|
||||
like this:
|
||||
|
||||
```javascript
|
||||
var UglifyJS = require("uglify-js");
|
||||
```
|
||||
|
||||
It exports a lot of names, but I'll discuss here the basics that are needed
|
||||
for parsing, mangling and compressing a piece of code. The sequence is (1)
|
||||
@@ -357,45 +454,49 @@ parse, (2) compress, (3) mangle, (4) generate output code.
|
||||
There's a single toplevel function which combines all the steps. If you
|
||||
don't need additional customization, you might want to go with `minify`.
|
||||
Example:
|
||||
|
||||
```javascript
|
||||
var result = UglifyJS.minify("/path/to/file.js");
|
||||
console.log(result.code); // minified output
|
||||
// if you need to pass code instead of file name
|
||||
var result = UglifyJS.minify("var b = function () {};", {fromString: true});
|
||||
```
|
||||
|
||||
You can also compress multiple files:
|
||||
|
||||
```javascript
|
||||
var result = UglifyJS.minify([ "file1.js", "file2.js", "file3.js" ]);
|
||||
console.log(result.code);
|
||||
```
|
||||
|
||||
To generate a source map:
|
||||
|
||||
```javascript
|
||||
var result = UglifyJS.minify([ "file1.js", "file2.js", "file3.js" ], {
|
||||
outSourceMap: "out.js.map"
|
||||
});
|
||||
console.log(result.code); // minified output
|
||||
console.log(result.map);
|
||||
```
|
||||
|
||||
Note that the source map is not saved in a file, it's just returned in
|
||||
`result.map`. The value passed for `outSourceMap` is only used to set the
|
||||
`file` attribute in the source map (see [the spec][sm-spec]).
|
||||
|
||||
You can also specify sourceRoot property to be included in source map:
|
||||
|
||||
```javascript
|
||||
var result = UglifyJS.minify([ "file1.js", "file2.js", "file3.js" ], {
|
||||
outSourceMap: "out.js.map",
|
||||
sourceRoot: "http://example.com/src"
|
||||
});
|
||||
|
||||
```
|
||||
|
||||
If you're compressing compiled JavaScript and have a source map for it, you
|
||||
can use the `inSourceMap` argument:
|
||||
|
||||
```javascript
|
||||
var result = UglifyJS.minify("compiled.js", {
|
||||
inSourceMap: "compiled.js.map",
|
||||
outSourceMap: "minified.js.map"
|
||||
});
|
||||
// same as before, it returns `code` and `map`
|
||||
```
|
||||
|
||||
The `inSourceMap` is only used if you also request `outSourceMap` (it makes
|
||||
no sense otherwise).
|
||||
@@ -425,8 +526,9 @@ Following there's more detailed API info, in case the `minify` function is
|
||||
too simple for your needs.
|
||||
|
||||
#### The parser
|
||||
|
||||
```javascript
|
||||
var toplevel_ast = UglifyJS.parse(code, options);
|
||||
```
|
||||
|
||||
`options` is optional and if present it must be an object. The following
|
||||
properties are available:
|
||||
@@ -440,15 +542,16 @@ properties are available:
|
||||
The last two options are useful when you'd like to minify multiple files and
|
||||
get a single file as the output and a proper source map. Our CLI tool does
|
||||
something like this:
|
||||
|
||||
```javascript
|
||||
var toplevel = null;
|
||||
files.forEach(function(file){
|
||||
var code = fs.readFileSync(file);
|
||||
var code = fs.readFileSync(file, "utf8");
|
||||
toplevel = UglifyJS.parse(code, {
|
||||
filename: file,
|
||||
toplevel: toplevel
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
After this, we have in `toplevel` a big AST containing all our files, with
|
||||
each token having proper information about where it came from.
|
||||
@@ -462,15 +565,17 @@ referenced, if it is a global or not, if a function is using `eval` or the
|
||||
`with` statement etc. I will discuss this some place else, for now what's
|
||||
important to know is that you need to call the following before doing
|
||||
anything with the tree:
|
||||
|
||||
```javascript
|
||||
toplevel.figure_out_scope()
|
||||
```
|
||||
|
||||
#### Compression
|
||||
|
||||
Like this:
|
||||
|
||||
```javascript
|
||||
var compressor = UglifyJS.Compressor(options);
|
||||
var compressed_ast = toplevel.transform(compressor);
|
||||
```
|
||||
|
||||
The `options` can be missing. Available options are discussed above in
|
||||
“Compressor options”. Defaults should lead to best compression in most
|
||||
@@ -486,23 +591,26 @@ the compressor might drop unused variables / unreachable code and this might
|
||||
change the number of identifiers or their position). Optionally, you can
|
||||
call a trick that helps after Gzip (counting character frequency in
|
||||
non-mangleable words). Example:
|
||||
|
||||
```javascript
|
||||
compressed_ast.figure_out_scope();
|
||||
compressed_ast.compute_char_frequency();
|
||||
compressed_ast.mangle_names();
|
||||
```
|
||||
|
||||
#### Generating output
|
||||
|
||||
AST nodes have a `print` method that takes an output stream. Essentially,
|
||||
to generate code you do this:
|
||||
|
||||
```javascript
|
||||
var stream = UglifyJS.OutputStream(options);
|
||||
compressed_ast.print(stream);
|
||||
var code = stream.toString(); // this is your minified code
|
||||
```
|
||||
|
||||
or, for a shortcut you can do:
|
||||
|
||||
```javascript
|
||||
var code = compressed_ast.print_to_string(options);
|
||||
```
|
||||
|
||||
As usual, `options` is optional. The output stream accepts a lot of otions,
|
||||
most of them documented above in section “Beautifier options”. The two
|
||||
@@ -540,7 +648,7 @@ to be a `SourceMap` object (which is a thin wrapper on top of the
|
||||
[source-map][source-map] library).
|
||||
|
||||
Example:
|
||||
|
||||
```javascript
|
||||
var source_map = UglifyJS.SourceMap(source_map_options);
|
||||
var stream = UglifyJS.OutputStream({
|
||||
...
|
||||
@@ -550,6 +658,7 @@ Example:
|
||||
|
||||
var code = stream.toString();
|
||||
var map = source_map.toString(); // json output for your source map
|
||||
```
|
||||
|
||||
The `source_map_options` (optional) can contain the following properties:
|
||||
|
||||
|
||||
105
bin/uglifyjs
105
bin/uglifyjs
@@ -7,6 +7,7 @@ var UglifyJS = require("../tools/node");
|
||||
var sys = require("util");
|
||||
var optimist = require("optimist");
|
||||
var fs = require("fs");
|
||||
var path = require("path");
|
||||
var async = require("async");
|
||||
var acorn;
|
||||
var ARGS = optimist
|
||||
@@ -20,11 +21,15 @@ mangling you need to use `-c` and `-m`.\
|
||||
")
|
||||
.describe("source-map", "Specify an output file where to generate source map.")
|
||||
.describe("source-map-root", "The path to the original source to be included in the source map.")
|
||||
.describe("source-map-url", "The path to the source map to be added in //@ sourceMappingURL. Defaults to the value passed with --source-map.")
|
||||
.describe("source-map-url", "The path to the source map to be added in //# sourceMappingURL. Defaults to the value passed with --source-map.")
|
||||
.describe("source-map-include-sources", "Pass this flag if you want to include the content of source files in the source map as sourcesContent property.")
|
||||
.describe("in-source-map", "Input source map, useful if you're compressing JS that was generated from some other original code.")
|
||||
.describe("screw-ie8", "Pass this flag if you don't care about full compliance with Internet Explorer 6-8 quirks (by default UglifyJS will try to be IE-proof).")
|
||||
.describe("expr", "Parse a single expression, rather than a program (for parsing JSON)")
|
||||
.describe("p", "Skip prefix for original filenames that appear in source maps. \
|
||||
For example -p 3 will drop 3 directories from file names and ensure they are relative paths.")
|
||||
For example -p 3 will drop 3 directories from file names and ensure they are relative paths. \
|
||||
You can also specify -p relative, which will make UglifyJS figure out itself the relative paths between original sources, \
|
||||
the source map and the output file.")
|
||||
.describe("o", "Output file (default STDOUT).")
|
||||
.describe("b", "Beautify output/specify output options.")
|
||||
.describe("m", "Mangle names/pass mangler options.")
|
||||
@@ -44,9 +49,13 @@ You can optionally pass one of the following arguments to this flag:\n\
|
||||
Note that currently not *all* comments can be kept when compression is on, \
|
||||
because of dead code removal or cascading statements into sequences.")
|
||||
|
||||
.describe("preamble", "Preamble to prepend to the output. You can use this to insert a \
|
||||
comment, for example for licensing information. This will not be \
|
||||
parsed, but the source map will adjust for its presence.")
|
||||
|
||||
.describe("stats", "Display operations run time on STDERR.")
|
||||
.describe("acorn", "Use Acorn for parsing.")
|
||||
.describe("spidermonkey", "Assume input fles are SpiderMonkey AST format (as JSON).")
|
||||
.describe("spidermonkey", "Assume input files are SpiderMonkey AST format (as JSON).")
|
||||
.describe("self", "Build itself (UglifyJS2) as a library (implies --wrap=UglifyJS --export-all)")
|
||||
.describe("wrap", "Embed everything in a big function, making the “exports” and “global” variables available. \
|
||||
You need to pass an argument to this option to specify the name that your module will take when included in, say, a browser.")
|
||||
@@ -54,6 +63,8 @@ You need to pass an argument to this option to specify the name that your module
|
||||
.describe("lint", "Display some scope warnings")
|
||||
.describe("v", "Verbose")
|
||||
.describe("V", "Print version number and exit.")
|
||||
.describe("noerr", "Don't throw an error for unknown options in -c, -b or -m.")
|
||||
.describe("bare-returns", "Allow return outside of functions. Useful when minifying CommonJS modules.")
|
||||
|
||||
.alias("p", "prefix")
|
||||
.alias("o", "output")
|
||||
@@ -76,6 +87,10 @@ You need to pass an argument to this option to specify the name that your module
|
||||
.string("e")
|
||||
.string("comments")
|
||||
.string("wrap")
|
||||
.string("p")
|
||||
|
||||
.boolean("expr")
|
||||
.boolean("source-map-include-sources")
|
||||
.boolean("screw-ie8")
|
||||
.boolean("export-all")
|
||||
.boolean("self")
|
||||
@@ -85,6 +100,8 @@ You need to pass an argument to this option to specify the name that your module
|
||||
.boolean("spidermonkey")
|
||||
.boolean("lint")
|
||||
.boolean("V")
|
||||
.boolean("noerr")
|
||||
.boolean("bare-returns")
|
||||
|
||||
.wrap(80)
|
||||
|
||||
@@ -93,6 +110,12 @@ You need to pass an argument to this option to specify the name that your module
|
||||
|
||||
normalize(ARGS);
|
||||
|
||||
if (ARGS.noerr) {
|
||||
UglifyJS.DefaultsError.croak = function(msg, defs) {
|
||||
sys.error("WARN: " + msg);
|
||||
};
|
||||
}
|
||||
|
||||
if (ARGS.version || ARGS.V) {
|
||||
var json = require("../package.json");
|
||||
sys.puts(json.name + ' ' + json.version);
|
||||
@@ -122,19 +145,21 @@ if (ARGS.d) {
|
||||
if (COMPRESS) COMPRESS.global_defs = getOptions("d");
|
||||
}
|
||||
|
||||
if (ARGS.screw_ie8) {
|
||||
if (COMPRESS) COMPRESS.screw_ie8 = true;
|
||||
if (MANGLE) MANGLE.screw_ie8 = true;
|
||||
}
|
||||
|
||||
if (ARGS.r) {
|
||||
if (MANGLE) MANGLE.except = ARGS.r.replace(/^\s+|\s+$/g).split(/\s*,+\s*/);
|
||||
}
|
||||
|
||||
var OUTPUT_OPTIONS = {
|
||||
beautify: BEAUTIFY ? true : false
|
||||
beautify: BEAUTIFY ? true : false,
|
||||
preamble: ARGS.preamble || null,
|
||||
};
|
||||
|
||||
if (ARGS.screw_ie8) {
|
||||
if (COMPRESS) COMPRESS.screw_ie8 = true;
|
||||
if (MANGLE) MANGLE.screw_ie8 = true;
|
||||
OUTPUT_OPTIONS.screw_ie8 = true;
|
||||
}
|
||||
|
||||
if (BEAUTIFY)
|
||||
UglifyJS.merge(OUTPUT_OPTIONS, BEAUTIFY);
|
||||
|
||||
@@ -196,9 +221,11 @@ if (files.filter(function(el){ return el == "-" }).length > 1) {
|
||||
var STATS = {};
|
||||
var OUTPUT_FILE = ARGS.o;
|
||||
var TOPLEVEL = null;
|
||||
var P_RELATIVE = ARGS.p && ARGS.p == "relative";
|
||||
var SOURCES_CONTENT = {};
|
||||
|
||||
var SOURCE_MAP = ARGS.source_map ? UglifyJS.SourceMap({
|
||||
file: OUTPUT_FILE,
|
||||
file: P_RELATIVE ? path.relative(path.dirname(ARGS.source_map), OUTPUT_FILE) : OUTPUT_FILE,
|
||||
root: ARGS.source_map_root,
|
||||
orig: ORIG_MAP,
|
||||
}) : null;
|
||||
@@ -220,12 +247,20 @@ try {
|
||||
async.eachLimit(files, 1, function (file, cb) {
|
||||
read_whole_file(file, function (err, code) {
|
||||
if (err) {
|
||||
sys.error("ERROR: can't read file: " + filename);
|
||||
sys.error("ERROR: can't read file: " + file);
|
||||
process.exit(1);
|
||||
}
|
||||
if (ARGS.p != null) {
|
||||
if (P_RELATIVE) {
|
||||
file = path.relative(path.dirname(ARGS.source_map), file);
|
||||
} else {
|
||||
var p = parseInt(ARGS.p, 10);
|
||||
if (!isNaN(p)) {
|
||||
file = file.replace(/^\/+/, "").split(/\/+/).slice(ARGS.p).join("/");
|
||||
}
|
||||
}
|
||||
}
|
||||
SOURCES_CONTENT[file] = code;
|
||||
time_it("parse", function(){
|
||||
if (ARGS.spidermonkey) {
|
||||
var program = JSON.parse(code);
|
||||
@@ -235,16 +270,27 @@ async.eachLimit(files, 1, function (file, cb) {
|
||||
else if (ARGS.acorn) {
|
||||
TOPLEVEL = acorn.parse(code, {
|
||||
locations : true,
|
||||
trackComments : true,
|
||||
sourceFile : file,
|
||||
program : TOPLEVEL
|
||||
});
|
||||
}
|
||||
else {
|
||||
try {
|
||||
TOPLEVEL = UglifyJS.parse(code, {
|
||||
filename : file,
|
||||
toplevel: TOPLEVEL
|
||||
toplevel : TOPLEVEL,
|
||||
expression : ARGS.expr,
|
||||
bare_returns : ARGS.bare_returns,
|
||||
});
|
||||
} catch(ex) {
|
||||
if (ex instanceof UglifyJS.JS_Parse_Error) {
|
||||
sys.error("Parse error at " + file + ":" + ex.line + "," + ex.col);
|
||||
sys.error(ex.message);
|
||||
sys.error(ex.stack);
|
||||
process.exit(1);
|
||||
}
|
||||
throw ex;
|
||||
}
|
||||
};
|
||||
});
|
||||
cb();
|
||||
@@ -260,11 +306,12 @@ async.eachLimit(files, 1, function (file, cb) {
|
||||
|
||||
if (ARGS.enclose) {
|
||||
var arg_parameter_list = ARGS.enclose;
|
||||
|
||||
if (!(arg_parameter_list instanceof Array)) {
|
||||
if (arg_parameter_list === true) {
|
||||
arg_parameter_list = [];
|
||||
}
|
||||
else if (!(arg_parameter_list instanceof Array)) {
|
||||
arg_parameter_list = [arg_parameter_list];
|
||||
}
|
||||
|
||||
TOPLEVEL = TOPLEVEL.wrap_enclose(arg_parameter_list);
|
||||
}
|
||||
|
||||
@@ -297,6 +344,15 @@ async.eachLimit(files, 1, function (file, cb) {
|
||||
if (MANGLE) time_it("mangle", function(){
|
||||
TOPLEVEL.mangle_names(MANGLE);
|
||||
});
|
||||
|
||||
if (ARGS.source_map_include_sources) {
|
||||
for (var file in SOURCES_CONTENT) {
|
||||
if (SOURCES_CONTENT.hasOwnProperty(file)) {
|
||||
SOURCE_MAP.get().setSourceContent(file, SOURCES_CONTENT[file]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
time_it("generate", function(){
|
||||
TOPLEVEL.print(output);
|
||||
});
|
||||
@@ -305,14 +361,18 @@ async.eachLimit(files, 1, function (file, cb) {
|
||||
|
||||
if (SOURCE_MAP) {
|
||||
fs.writeFileSync(ARGS.source_map, SOURCE_MAP, "utf8");
|
||||
output += "\n/*\n//@ sourceMappingURL=" + (ARGS.source_map_url || ARGS.source_map) + "\n*/";
|
||||
var source_map_url = ARGS.source_map_url || (
|
||||
P_RELATIVE
|
||||
? path.relative(path.dirname(OUTPUT_FILE), ARGS.source_map)
|
||||
: ARGS.source_map
|
||||
);
|
||||
output += "\n//# sourceMappingURL=" + source_map_url;
|
||||
}
|
||||
|
||||
if (OUTPUT_FILE) {
|
||||
fs.writeFileSync(OUTPUT_FILE, output, "utf8");
|
||||
} else {
|
||||
sys.print(output);
|
||||
sys.error("\n");
|
||||
}
|
||||
|
||||
if (ARGS.stats) {
|
||||
@@ -344,7 +404,7 @@ function getOptions(x, constants) {
|
||||
if (x !== true) {
|
||||
var ast;
|
||||
try {
|
||||
ast = UglifyJS.parse(x);
|
||||
ast = UglifyJS.parse(x, { expression: true });
|
||||
} catch(ex) {
|
||||
if (ex instanceof UglifyJS.JS_Parse_Error) {
|
||||
sys.error("Error parsing arguments in: " + x);
|
||||
@@ -352,8 +412,6 @@ function getOptions(x, constants) {
|
||||
}
|
||||
}
|
||||
ast.walk(new UglifyJS.TreeWalker(function(node){
|
||||
if (node instanceof UglifyJS.AST_Toplevel) return; // descend
|
||||
if (node instanceof UglifyJS.AST_SimpleStatement) return; // descend
|
||||
if (node instanceof UglifyJS.AST_Seq) return; // descend
|
||||
if (node instanceof UglifyJS.AST_Assign) {
|
||||
var name = node.left.print_to_string({ beautify: false }).replace(/-/g, "_");
|
||||
@@ -363,6 +421,11 @@ function getOptions(x, constants) {
|
||||
ret[name] = value;
|
||||
return true; // no descend
|
||||
}
|
||||
if (node instanceof UglifyJS.AST_Symbol || node instanceof UglifyJS.AST_Binary) {
|
||||
var name = node.print_to_string({ beautify: false }).replace(/-/g, "_");
|
||||
ret[name] = true;
|
||||
return true; // no descend
|
||||
}
|
||||
sys.error(node.TYPE)
|
||||
sys.error("Error parsing arguments in: " + x);
|
||||
process.exit(1);
|
||||
|
||||
49
lib/ast.js
49
lib/ast.js
@@ -197,6 +197,10 @@ var AST_LabeledStatement = DEFNODE("LabeledStatement", "label", {
|
||||
}
|
||||
}, AST_StatementWithBody);
|
||||
|
||||
var AST_IterationStatement = DEFNODE("IterationStatement", null, {
|
||||
$documentation: "Internal class. All loops inherit from it."
|
||||
}, AST_StatementWithBody);
|
||||
|
||||
var AST_DWLoop = DEFNODE("DWLoop", "condition", {
|
||||
$documentation: "Base class for do/while statements",
|
||||
$propdoc: {
|
||||
@@ -208,7 +212,7 @@ var AST_DWLoop = DEFNODE("DWLoop", "condition", {
|
||||
this.body._walk(visitor);
|
||||
});
|
||||
}
|
||||
}, AST_StatementWithBody);
|
||||
}, AST_IterationStatement);
|
||||
|
||||
var AST_Do = DEFNODE("Do", null, {
|
||||
$documentation: "A `do` statement",
|
||||
@@ -233,7 +237,7 @@ var AST_For = DEFNODE("For", "init condition step", {
|
||||
this.body._walk(visitor);
|
||||
});
|
||||
}
|
||||
}, AST_StatementWithBody);
|
||||
}, AST_IterationStatement);
|
||||
|
||||
var AST_ForIn = DEFNODE("ForIn", "init name object", {
|
||||
$documentation: "A `for ... in` statement",
|
||||
@@ -249,7 +253,7 @@ var AST_ForIn = DEFNODE("ForIn", "init name object", {
|
||||
this.body._walk(visitor);
|
||||
});
|
||||
}
|
||||
}, AST_StatementWithBody);
|
||||
}, AST_IterationStatement);
|
||||
|
||||
var AST_With = DEFNODE("With", "expression", {
|
||||
$documentation: "A `with` statement",
|
||||
@@ -291,10 +295,10 @@ var AST_Toplevel = DEFNODE("Toplevel", "globals", {
|
||||
var parameters = [];
|
||||
|
||||
arg_parameter_pairs.forEach(function(pair) {
|
||||
var split = pair.split(":");
|
||||
var splitAt = pair.lastIndexOf(":");
|
||||
|
||||
args.push(split[0]);
|
||||
parameters.push(split[1]);
|
||||
args.push(pair.substr(0, splitAt));
|
||||
parameters.push(pair.substr(splitAt + 1));
|
||||
});
|
||||
|
||||
var wrapped_tl = "(function(" + parameters.join(",") + "){ '$ORIG'; })(" + args.join(",") + ")";
|
||||
@@ -367,7 +371,7 @@ var AST_Lambda = DEFNODE("Lambda", "name argnames uses_arguments", {
|
||||
}, AST_Scope);
|
||||
|
||||
var AST_Accessor = DEFNODE("Accessor", null, {
|
||||
$documentation: "A setter/getter function"
|
||||
$documentation: "A setter/getter function. The `name` property is always null."
|
||||
}, AST_Lambda);
|
||||
|
||||
var AST_Function = DEFNODE("Function", null, {
|
||||
@@ -494,12 +498,6 @@ var AST_Try = DEFNODE("Try", "bcatch bfinally", {
|
||||
}
|
||||
}, AST_Block);
|
||||
|
||||
// XXX: this is wrong according to ECMA-262 (12.4). the catch block
|
||||
// should introduce another scope, as the argname should be visible
|
||||
// only inside the catch block. However, doing it this way because of
|
||||
// IE which simply introduces the name in the surrounding scope. If
|
||||
// we ever want to fix this then AST_Catch should inherit from
|
||||
// AST_Scope.
|
||||
var AST_Catch = DEFNODE("Catch", "argname", {
|
||||
$documentation: "A `catch` node; only makes sense as part of a `try` statement",
|
||||
$propdoc: {
|
||||
@@ -752,7 +750,7 @@ var AST_Object = DEFNODE("Object", "properties", {
|
||||
var AST_ObjectProperty = DEFNODE("ObjectProperty", "key value", {
|
||||
$documentation: "Base class for literal object properties",
|
||||
$propdoc: {
|
||||
key: "[string] the property name; it's always a plain string in our AST, no matter if it was a string, number or identifier in original code",
|
||||
key: "[string] the property name converted to a string for ObjectKeyVal. For setters and getters this is an arbitrary AST_Node.",
|
||||
value: "[AST_Node] property value. For setters and getters this is an AST_Function."
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
@@ -821,7 +819,11 @@ var AST_SymbolCatch = DEFNODE("SymbolCatch", null, {
|
||||
var AST_Label = DEFNODE("Label", "references", {
|
||||
$documentation: "Symbol naming a label (declaration)",
|
||||
$propdoc: {
|
||||
references: "[AST_LabelRef*] a list of nodes referring to this label"
|
||||
references: "[AST_LoopControl*] a list of nodes referring to this label"
|
||||
},
|
||||
initialize: function() {
|
||||
this.references = [];
|
||||
this.thedef = this;
|
||||
}
|
||||
}, AST_Symbol);
|
||||
|
||||
@@ -945,6 +947,9 @@ TreeWalker.prototype = {
|
||||
if (x instanceof type) return x;
|
||||
}
|
||||
},
|
||||
has_directive: function(type) {
|
||||
return this.find_parent(AST_Scope).has_directive(type);
|
||||
},
|
||||
in_boolean_context: function() {
|
||||
var stack = this.stack;
|
||||
var i = stack.length, self = stack[--i];
|
||||
@@ -965,21 +970,15 @@ TreeWalker.prototype = {
|
||||
},
|
||||
loopcontrol_target: function(label) {
|
||||
var stack = this.stack;
|
||||
if (label) {
|
||||
for (var i = stack.length; --i >= 0;) {
|
||||
if (label) for (var i = stack.length; --i >= 0;) {
|
||||
var x = stack[i];
|
||||
if (x instanceof AST_LabeledStatement && x.label.name == label.name) {
|
||||
return x.body;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for (var i = stack.length; --i >= 0;) {
|
||||
} else for (var i = stack.length; --i >= 0;) {
|
||||
var x = stack[i];
|
||||
if (x instanceof AST_Switch
|
||||
|| x instanceof AST_For
|
||||
|| x instanceof AST_ForIn
|
||||
|| x instanceof AST_DWLoop) return x;
|
||||
}
|
||||
if (x instanceof AST_Switch || x instanceof AST_IterationStatement)
|
||||
return x;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
676
lib/compress.js
676
lib/compress.js
@@ -61,12 +61,18 @@ function Compressor(options, false_by_default) {
|
||||
loops : !false_by_default,
|
||||
unused : !false_by_default,
|
||||
hoist_funs : !false_by_default,
|
||||
keep_fargs : false,
|
||||
hoist_vars : false,
|
||||
if_return : !false_by_default,
|
||||
join_vars : !false_by_default,
|
||||
cascade : !false_by_default,
|
||||
side_effects : !false_by_default,
|
||||
pure_getters : false,
|
||||
pure_funcs : null,
|
||||
negate_iife : !false_by_default,
|
||||
screw_ie8 : false,
|
||||
drop_console : false,
|
||||
angular : false,
|
||||
|
||||
warnings : true,
|
||||
global_defs : {}
|
||||
@@ -82,22 +88,16 @@ merge(Compressor.prototype, {
|
||||
},
|
||||
before: function(node, descend, in_list) {
|
||||
if (node._squeezed) return node;
|
||||
var was_scope = false;
|
||||
if (node instanceof AST_Scope) {
|
||||
node.drop_unused(this);
|
||||
node = node.hoist_declarations(this);
|
||||
was_scope = true;
|
||||
}
|
||||
descend(node, this);
|
||||
node = node.optimize(this);
|
||||
if (node instanceof AST_Scope) {
|
||||
// dead code removal might leave further unused declarations.
|
||||
// this'll usually save very few bytes, but the performance
|
||||
// hit seems negligible so I'll just drop it here.
|
||||
|
||||
// no point to repeat warnings.
|
||||
var save_warnings = this.options.warnings;
|
||||
this.options.warnings = false;
|
||||
if (was_scope && node instanceof AST_Scope) {
|
||||
node.drop_unused(this);
|
||||
this.options.warnings = save_warnings;
|
||||
descend(node, this);
|
||||
}
|
||||
node._squeezed = true;
|
||||
return node;
|
||||
@@ -200,6 +200,9 @@ merge(Compressor.prototype, {
|
||||
var CHANGED;
|
||||
do {
|
||||
CHANGED = false;
|
||||
if (compressor.option("angular")) {
|
||||
statements = process_for_angular(statements);
|
||||
}
|
||||
statements = eliminate_spurious_blocks(statements);
|
||||
if (compressor.option("dead_code")) {
|
||||
statements = eliminate_dead_code(statements, compressor);
|
||||
@@ -214,8 +217,57 @@ merge(Compressor.prototype, {
|
||||
statements = join_consecutive_vars(statements, compressor);
|
||||
}
|
||||
} while (CHANGED);
|
||||
|
||||
if (compressor.option("negate_iife")) {
|
||||
negate_iifes(statements, compressor);
|
||||
}
|
||||
|
||||
return statements;
|
||||
|
||||
function process_for_angular(statements) {
|
||||
function make_injector(func, name) {
|
||||
return make_node(AST_SimpleStatement, func, {
|
||||
body: make_node(AST_Assign, func, {
|
||||
operator: "=",
|
||||
left: make_node(AST_Dot, name, {
|
||||
expression: make_node(AST_SymbolRef, name, name),
|
||||
property: "$inject"
|
||||
}),
|
||||
right: make_node(AST_Array, func, {
|
||||
elements: func.argnames.map(function(sym){
|
||||
return make_node(AST_String, sym, { value: sym.name });
|
||||
})
|
||||
})
|
||||
})
|
||||
});
|
||||
}
|
||||
return statements.reduce(function(a, stat){
|
||||
a.push(stat);
|
||||
var token = stat.start;
|
||||
var comments = token.comments_before;
|
||||
if (comments && comments.length > 0) {
|
||||
var last = comments.pop();
|
||||
if (/@ngInject/.test(last.value)) {
|
||||
// case 1: defun
|
||||
if (stat instanceof AST_Defun) {
|
||||
a.push(make_injector(stat, stat.name));
|
||||
}
|
||||
else if (stat instanceof AST_Definitions) {
|
||||
stat.definitions.forEach(function(def){
|
||||
if (def.value && def.value instanceof AST_Lambda) {
|
||||
a.push(make_injector(def.value, def.name));
|
||||
}
|
||||
});
|
||||
}
|
||||
else {
|
||||
compressor.warn("Unknown statement marked with @ngInject [{file}:{line},{col}]", token);
|
||||
}
|
||||
}
|
||||
}
|
||||
return a;
|
||||
}, []);
|
||||
}
|
||||
|
||||
function eliminate_spurious_blocks(statements) {
|
||||
var seen_dirs = [];
|
||||
return statements.reduce(function(a, stat){
|
||||
@@ -318,14 +370,14 @@ merge(Compressor.prototype, {
|
||||
|| (ab instanceof AST_Continue && self === loop_body(lct))
|
||||
|| (ab instanceof AST_Break && lct instanceof AST_BlockStatement && self === lct))) {
|
||||
if (ab.label) {
|
||||
remove(ab.label.thedef.references, ab.label);
|
||||
remove(ab.label.thedef.references, ab);
|
||||
}
|
||||
CHANGED = true;
|
||||
var body = as_statement_array(stat.body).slice(0, -1);
|
||||
stat = stat.clone();
|
||||
stat.condition = stat.condition.negate(compressor);
|
||||
stat.body = make_node(AST_BlockStatement, stat, {
|
||||
body: ret
|
||||
body: as_statement_array(stat.alternative).concat(ret)
|
||||
});
|
||||
stat.alternative = make_node(AST_BlockStatement, stat, {
|
||||
body: body
|
||||
@@ -340,7 +392,7 @@ merge(Compressor.prototype, {
|
||||
|| (ab instanceof AST_Continue && self === loop_body(lct))
|
||||
|| (ab instanceof AST_Break && lct instanceof AST_BlockStatement && self === lct))) {
|
||||
if (ab.label) {
|
||||
remove(ab.label.thedef.references, ab.label);
|
||||
remove(ab.label.thedef.references, ab);
|
||||
}
|
||||
CHANGED = true;
|
||||
stat = stat.clone();
|
||||
@@ -379,7 +431,7 @@ merge(Compressor.prototype, {
|
||||
&& loop_body(lct) === self) || (stat instanceof AST_Continue
|
||||
&& loop_body(lct) === self)) {
|
||||
if (stat.label) {
|
||||
remove(stat.label.thedef.references, stat.label);
|
||||
remove(stat.label.thedef.references, stat);
|
||||
}
|
||||
} else {
|
||||
a.push(stat);
|
||||
@@ -497,6 +549,40 @@ merge(Compressor.prototype, {
|
||||
}, []);
|
||||
};
|
||||
|
||||
function negate_iifes(statements, compressor) {
|
||||
statements.forEach(function(stat){
|
||||
if (stat instanceof AST_SimpleStatement) {
|
||||
stat.body = (function transform(thing) {
|
||||
return thing.transform(new TreeTransformer(function(node){
|
||||
if (node instanceof AST_Call && node.expression instanceof AST_Function) {
|
||||
return make_node(AST_UnaryPrefix, node, {
|
||||
operator: "!",
|
||||
expression: node
|
||||
});
|
||||
}
|
||||
else if (node instanceof AST_Call) {
|
||||
node.expression = transform(node.expression);
|
||||
}
|
||||
else if (node instanceof AST_Seq) {
|
||||
node.car = transform(node.car);
|
||||
}
|
||||
else if (node instanceof AST_Conditional) {
|
||||
var expr = transform(node.condition);
|
||||
if (expr !== node.condition) {
|
||||
// it has been negated, reverse
|
||||
node.condition = expr;
|
||||
var tmp = node.consequent;
|
||||
node.consequent = node.alternative;
|
||||
node.alternative = tmp;
|
||||
}
|
||||
}
|
||||
return node;
|
||||
}));
|
||||
})(stat.body);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
};
|
||||
|
||||
function extract_declarations_from_unreachable_code(compressor, stat, target) {
|
||||
@@ -590,14 +676,14 @@ merge(Compressor.prototype, {
|
||||
// elements. If the node has been successfully reduced to a
|
||||
// constant, then the second element tells us the value;
|
||||
// otherwise the second element is missing. The first element
|
||||
// of the array is always an AST_Node descendant; when
|
||||
// of the array is always an AST_Node descendant; if
|
||||
// evaluation was successful it's a node that represents the
|
||||
// constant; otherwise it's the original node.
|
||||
// constant; otherwise it's the original or a replacement node.
|
||||
AST_Node.DEFMETHOD("evaluate", function(compressor){
|
||||
if (!compressor.option("evaluate")) return [ this ];
|
||||
try {
|
||||
var val = this._eval(), ast = make_node_from_constant(compressor, val, this);
|
||||
return [ best_of(ast, this), val ];
|
||||
var val = this._eval(compressor);
|
||||
return [ best_of(make_node_from_constant(compressor, val, this), this), val ];
|
||||
} catch(ex) {
|
||||
if (ex !== def) throw ex;
|
||||
return [ this ];
|
||||
@@ -611,10 +697,12 @@ merge(Compressor.prototype, {
|
||||
// inherits from AST_Statement; however, an AST_Function
|
||||
// isn't really a statement. This could byte in other
|
||||
// places too. :-( Wish JS had multiple inheritance.
|
||||
return [ this ];
|
||||
throw def;
|
||||
});
|
||||
function ev(node) {
|
||||
return node._eval();
|
||||
function ev(node, compressor) {
|
||||
if (!compressor) throw new Error("Compressor must be passed");
|
||||
|
||||
return node._eval(compressor);
|
||||
};
|
||||
def(AST_Node, function(){
|
||||
throw def; // not constant
|
||||
@@ -622,69 +710,77 @@ merge(Compressor.prototype, {
|
||||
def(AST_Constant, function(){
|
||||
return this.getValue();
|
||||
});
|
||||
def(AST_UnaryPrefix, function(){
|
||||
def(AST_UnaryPrefix, function(compressor){
|
||||
var e = this.expression;
|
||||
switch (this.operator) {
|
||||
case "!": return !ev(e);
|
||||
case "!": return !ev(e, compressor);
|
||||
case "typeof":
|
||||
// Function would be evaluated to an array and so typeof would
|
||||
// incorrectly return 'object'. Hence making is a special case.
|
||||
if (e instanceof AST_Function) return typeof function(){};
|
||||
|
||||
e = ev(e);
|
||||
e = ev(e, compressor);
|
||||
|
||||
// typeof <RegExp> returns "object" or "function" on different platforms
|
||||
// so cannot evaluate reliably
|
||||
if (e instanceof RegExp) throw def;
|
||||
|
||||
return typeof e;
|
||||
case "void": return void ev(e);
|
||||
case "~": return ~ev(e);
|
||||
case "void": return void ev(e, compressor);
|
||||
case "~": return ~ev(e, compressor);
|
||||
case "-":
|
||||
e = ev(e);
|
||||
e = ev(e, compressor);
|
||||
if (e === 0) throw def;
|
||||
return -e;
|
||||
case "+": return +ev(e);
|
||||
case "+": return +ev(e, compressor);
|
||||
}
|
||||
throw def;
|
||||
});
|
||||
def(AST_Binary, function(){
|
||||
def(AST_Binary, function(c){
|
||||
var left = this.left, right = this.right;
|
||||
switch (this.operator) {
|
||||
case "&&" : return ev(left) && ev(right);
|
||||
case "||" : return ev(left) || ev(right);
|
||||
case "|" : return ev(left) | ev(right);
|
||||
case "&" : return ev(left) & ev(right);
|
||||
case "^" : return ev(left) ^ ev(right);
|
||||
case "+" : return ev(left) + ev(right);
|
||||
case "*" : return ev(left) * ev(right);
|
||||
case "/" : return ev(left) / ev(right);
|
||||
case "%" : return ev(left) % ev(right);
|
||||
case "-" : return ev(left) - ev(right);
|
||||
case "<<" : return ev(left) << ev(right);
|
||||
case ">>" : return ev(left) >> ev(right);
|
||||
case ">>>" : return ev(left) >>> ev(right);
|
||||
case "==" : return ev(left) == ev(right);
|
||||
case "===" : return ev(left) === ev(right);
|
||||
case "!=" : return ev(left) != ev(right);
|
||||
case "!==" : return ev(left) !== ev(right);
|
||||
case "<" : return ev(left) < ev(right);
|
||||
case "<=" : return ev(left) <= ev(right);
|
||||
case ">" : return ev(left) > ev(right);
|
||||
case ">=" : return ev(left) >= ev(right);
|
||||
case "in" : return ev(left) in ev(right);
|
||||
case "instanceof" : return ev(left) instanceof ev(right);
|
||||
case "&&" : return ev(left, c) && ev(right, c);
|
||||
case "||" : return ev(left, c) || ev(right, c);
|
||||
case "|" : return ev(left, c) | ev(right, c);
|
||||
case "&" : return ev(left, c) & ev(right, c);
|
||||
case "^" : return ev(left, c) ^ ev(right, c);
|
||||
case "+" : return ev(left, c) + ev(right, c);
|
||||
case "*" : return ev(left, c) * ev(right, c);
|
||||
case "/" : return ev(left, c) / ev(right, c);
|
||||
case "%" : return ev(left, c) % ev(right, c);
|
||||
case "-" : return ev(left, c) - ev(right, c);
|
||||
case "<<" : return ev(left, c) << ev(right, c);
|
||||
case ">>" : return ev(left, c) >> ev(right, c);
|
||||
case ">>>" : return ev(left, c) >>> ev(right, c);
|
||||
case "==" : return ev(left, c) == ev(right, c);
|
||||
case "===" : return ev(left, c) === ev(right, c);
|
||||
case "!=" : return ev(left, c) != ev(right, c);
|
||||
case "!==" : return ev(left, c) !== ev(right, c);
|
||||
case "<" : return ev(left, c) < ev(right, c);
|
||||
case "<=" : return ev(left, c) <= ev(right, c);
|
||||
case ">" : return ev(left, c) > ev(right, c);
|
||||
case ">=" : return ev(left, c) >= ev(right, c);
|
||||
case "in" : return ev(left, c) in ev(right, c);
|
||||
case "instanceof" : return ev(left, c) instanceof ev(right, c);
|
||||
}
|
||||
throw def;
|
||||
});
|
||||
def(AST_Conditional, function(){
|
||||
return ev(this.condition)
|
||||
? ev(this.consequent)
|
||||
: ev(this.alternative);
|
||||
def(AST_Conditional, function(compressor){
|
||||
return ev(this.condition, compressor)
|
||||
? ev(this.consequent, compressor)
|
||||
: ev(this.alternative, compressor);
|
||||
});
|
||||
def(AST_SymbolRef, function(){
|
||||
def(AST_SymbolRef, function(compressor){
|
||||
var d = this.definition();
|
||||
if (d && d.constant && d.init) return ev(d.init);
|
||||
if (d && d.constant && d.init) return ev(d.init, compressor);
|
||||
throw def;
|
||||
});
|
||||
def(AST_Dot, function(compressor){
|
||||
if (compressor.option("unsafe") && this.property == "length") {
|
||||
var str = ev(this.expression, compressor);
|
||||
if (typeof str == "string")
|
||||
return str.length;
|
||||
}
|
||||
throw def;
|
||||
});
|
||||
})(function(node, func){
|
||||
@@ -760,70 +856,80 @@ merge(Compressor.prototype, {
|
||||
|
||||
// determine if expression has side effects
|
||||
(function(def){
|
||||
def(AST_Node, function(){ return true });
|
||||
def(AST_Node, function(compressor){ return true });
|
||||
|
||||
def(AST_EmptyStatement, function(){ return false });
|
||||
def(AST_Constant, function(){ return false });
|
||||
def(AST_This, function(){ return false });
|
||||
def(AST_EmptyStatement, function(compressor){ return false });
|
||||
def(AST_Constant, function(compressor){ return false });
|
||||
def(AST_This, function(compressor){ return false });
|
||||
|
||||
def(AST_Block, function(){
|
||||
def(AST_Call, function(compressor){
|
||||
var pure = compressor.option("pure_funcs");
|
||||
if (!pure) return true;
|
||||
return pure.indexOf(this.expression.print_to_string()) < 0;
|
||||
});
|
||||
|
||||
def(AST_Block, function(compressor){
|
||||
for (var i = this.body.length; --i >= 0;) {
|
||||
if (this.body[i].has_side_effects())
|
||||
if (this.body[i].has_side_effects(compressor))
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
});
|
||||
|
||||
def(AST_SimpleStatement, function(){
|
||||
return this.body.has_side_effects();
|
||||
def(AST_SimpleStatement, function(compressor){
|
||||
return this.body.has_side_effects(compressor);
|
||||
});
|
||||
def(AST_Defun, function(){ return true });
|
||||
def(AST_Function, function(){ return false });
|
||||
def(AST_Binary, function(){
|
||||
return this.left.has_side_effects()
|
||||
|| this.right.has_side_effects();
|
||||
def(AST_Defun, function(compressor){ return true });
|
||||
def(AST_Function, function(compressor){ return false });
|
||||
def(AST_Binary, function(compressor){
|
||||
return this.left.has_side_effects(compressor)
|
||||
|| this.right.has_side_effects(compressor);
|
||||
});
|
||||
def(AST_Assign, function(){ return true });
|
||||
def(AST_Conditional, function(){
|
||||
return this.condition.has_side_effects()
|
||||
|| this.consequent.has_side_effects()
|
||||
|| this.alternative.has_side_effects();
|
||||
def(AST_Assign, function(compressor){ return true });
|
||||
def(AST_Conditional, function(compressor){
|
||||
return this.condition.has_side_effects(compressor)
|
||||
|| this.consequent.has_side_effects(compressor)
|
||||
|| this.alternative.has_side_effects(compressor);
|
||||
});
|
||||
def(AST_Unary, function(){
|
||||
def(AST_Unary, function(compressor){
|
||||
return this.operator == "delete"
|
||||
|| this.operator == "++"
|
||||
|| this.operator == "--"
|
||||
|| this.expression.has_side_effects();
|
||||
|| this.expression.has_side_effects(compressor);
|
||||
});
|
||||
def(AST_SymbolRef, function(){ return false });
|
||||
def(AST_Object, function(){
|
||||
def(AST_SymbolRef, function(compressor){
|
||||
return this.global() && this.undeclared();
|
||||
});
|
||||
def(AST_Object, function(compressor){
|
||||
for (var i = this.properties.length; --i >= 0;)
|
||||
if (this.properties[i].has_side_effects())
|
||||
if (this.properties[i].has_side_effects(compressor))
|
||||
return true;
|
||||
return false;
|
||||
});
|
||||
def(AST_ObjectProperty, function(){
|
||||
return this.value.has_side_effects();
|
||||
def(AST_ObjectProperty, function(compressor){
|
||||
return this.value.has_side_effects(compressor);
|
||||
});
|
||||
def(AST_Array, function(){
|
||||
def(AST_Array, function(compressor){
|
||||
for (var i = this.elements.length; --i >= 0;)
|
||||
if (this.elements[i].has_side_effects())
|
||||
if (this.elements[i].has_side_effects(compressor))
|
||||
return true;
|
||||
return false;
|
||||
});
|
||||
// def(AST_Dot, function(){
|
||||
// return this.expression.has_side_effects();
|
||||
// });
|
||||
// def(AST_Sub, function(){
|
||||
// return this.expression.has_side_effects()
|
||||
// || this.property.has_side_effects();
|
||||
// });
|
||||
def(AST_PropAccess, function(){
|
||||
return true;
|
||||
def(AST_Dot, function(compressor){
|
||||
if (!compressor.option("pure_getters")) return true;
|
||||
return this.expression.has_side_effects(compressor);
|
||||
});
|
||||
def(AST_Seq, function(){
|
||||
return this.car.has_side_effects()
|
||||
|| this.cdr.has_side_effects();
|
||||
def(AST_Sub, function(compressor){
|
||||
if (!compressor.option("pure_getters")) return true;
|
||||
return this.expression.has_side_effects(compressor)
|
||||
|| this.property.has_side_effects(compressor);
|
||||
});
|
||||
def(AST_PropAccess, function(compressor){
|
||||
return !compressor.option("pure_getters");
|
||||
});
|
||||
def(AST_Seq, function(compressor){
|
||||
return this.car.has_side_effects(compressor)
|
||||
|| this.cdr.has_side_effects(compressor);
|
||||
});
|
||||
})(function(node, func){
|
||||
node.DEFMETHOD("has_side_effects", func);
|
||||
@@ -907,7 +1013,7 @@ merge(Compressor.prototype, {
|
||||
node.definitions.forEach(function(def){
|
||||
if (def.value) {
|
||||
initializations.add(def.name.name, def.value);
|
||||
if (def.value.has_side_effects()) {
|
||||
if (def.value.has_side_effects(compressor)) {
|
||||
def.value.walk(tw);
|
||||
}
|
||||
}
|
||||
@@ -948,7 +1054,8 @@ merge(Compressor.prototype, {
|
||||
// pass 3: we should drop declarations not in_use
|
||||
var tt = new TreeTransformer(
|
||||
function before(node, descend, in_list) {
|
||||
if (node instanceof AST_Lambda) {
|
||||
if (node instanceof AST_Lambda && !(node instanceof AST_Accessor)) {
|
||||
if (!compressor.option("keep_fargs")) {
|
||||
for (var a = node.argnames, i = a.length; --i >= 0;) {
|
||||
var sym = a[i];
|
||||
if (sym.unreferenced()) {
|
||||
@@ -963,6 +1070,7 @@ merge(Compressor.prototype, {
|
||||
else break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (node instanceof AST_Defun && node !== self) {
|
||||
if (!member(node.name.definition(), in_use)) {
|
||||
compressor.warn("Dropping unused function {name} [{file}:{line},{col}]", {
|
||||
@@ -984,7 +1092,7 @@ merge(Compressor.prototype, {
|
||||
line : def.name.start.line,
|
||||
col : def.name.start.col
|
||||
};
|
||||
if (def.value && def.value.has_side_effects()) {
|
||||
if (def.value && def.value.has_side_effects(compressor)) {
|
||||
def._unused_side_effects = true;
|
||||
compressor.warn("Side effects in initialization of unused variable {name} [{file}:{line},{col}]", w);
|
||||
return true;
|
||||
@@ -1038,19 +1146,24 @@ merge(Compressor.prototype, {
|
||||
}
|
||||
return node;
|
||||
}
|
||||
if (node instanceof AST_For && node.init instanceof AST_BlockStatement) {
|
||||
if (node instanceof AST_For) {
|
||||
descend(node, this);
|
||||
|
||||
if (node.init instanceof AST_BlockStatement) {
|
||||
// certain combination of unused name + side effect leads to:
|
||||
// https://github.com/mishoo/UglifyJS2/issues/44
|
||||
// that's an invalid AST.
|
||||
// We fix it at this stage by moving the `var` outside the `for`.
|
||||
|
||||
var body = node.init.body.slice(0, -1);
|
||||
node.init = node.init.body.slice(-1)[0].body;
|
||||
body.push(node);
|
||||
|
||||
return in_list ? MAP.splice(body) : make_node(AST_BlockStatement, node, {
|
||||
body: body
|
||||
});
|
||||
}
|
||||
}
|
||||
if (node instanceof AST_Scope && node !== self)
|
||||
return node;
|
||||
}
|
||||
@@ -1186,7 +1299,7 @@ merge(Compressor.prototype, {
|
||||
|
||||
OPT(AST_SimpleStatement, function(self, compressor){
|
||||
if (compressor.option("side_effects")) {
|
||||
if (!self.body.has_side_effects()) {
|
||||
if (!self.body.has_side_effects(compressor)) {
|
||||
compressor.warn("Dropping side-effect-free statement [{file}:{line},{col}]", self.start);
|
||||
return make_node(AST_EmptyStatement, self);
|
||||
}
|
||||
@@ -1570,7 +1683,7 @@ merge(Compressor.prototype, {
|
||||
if (self.args.length != 1) {
|
||||
return make_node(AST_Array, self, {
|
||||
elements: self.args
|
||||
});
|
||||
}).transform(compressor);
|
||||
}
|
||||
break;
|
||||
case "Object":
|
||||
@@ -1584,11 +1697,81 @@ merge(Compressor.prototype, {
|
||||
if (self.args.length == 0) return make_node(AST_String, self, {
|
||||
value: ""
|
||||
});
|
||||
return make_node(AST_Binary, self, {
|
||||
if (self.args.length <= 1) return make_node(AST_Binary, self, {
|
||||
left: self.args[0],
|
||||
operator: "+",
|
||||
right: make_node(AST_String, self, { value: "" })
|
||||
}).transform(compressor);
|
||||
break;
|
||||
case "Number":
|
||||
if (self.args.length == 0) return make_node(AST_Number, self, {
|
||||
value: 0
|
||||
});
|
||||
if (self.args.length == 1) return make_node(AST_UnaryPrefix, self, {
|
||||
expression: self.args[0],
|
||||
operator: "+"
|
||||
}).transform(compressor);
|
||||
case "Boolean":
|
||||
if (self.args.length == 0) return make_node(AST_False, self);
|
||||
if (self.args.length == 1) return make_node(AST_UnaryPrefix, self, {
|
||||
expression: make_node(AST_UnaryPrefix, null, {
|
||||
expression: self.args[0],
|
||||
operator: "!"
|
||||
}),
|
||||
operator: "!"
|
||||
}).transform(compressor);
|
||||
break;
|
||||
case "Function":
|
||||
if (all(self.args, function(x){ return x instanceof AST_String })) {
|
||||
// quite a corner-case, but we can handle it:
|
||||
// https://github.com/mishoo/UglifyJS2/issues/203
|
||||
// if the code argument is a constant, then we can minify it.
|
||||
try {
|
||||
var code = "(function(" + self.args.slice(0, -1).map(function(arg){
|
||||
return arg.value;
|
||||
}).join(",") + "){" + self.args[self.args.length - 1].value + "})()";
|
||||
var ast = parse(code);
|
||||
ast.figure_out_scope({ screw_ie8: compressor.option("screw_ie8") });
|
||||
var comp = new Compressor(compressor.options);
|
||||
ast = ast.transform(comp);
|
||||
ast.figure_out_scope({ screw_ie8: compressor.option("screw_ie8") });
|
||||
ast.mangle_names();
|
||||
var fun;
|
||||
try {
|
||||
ast.walk(new TreeWalker(function(node){
|
||||
if (node instanceof AST_Lambda) {
|
||||
fun = node;
|
||||
throw ast;
|
||||
}
|
||||
}));
|
||||
} catch(ex) {
|
||||
if (ex !== ast) throw ex;
|
||||
};
|
||||
if (!fun) return self;
|
||||
var args = fun.argnames.map(function(arg, i){
|
||||
return make_node(AST_String, self.args[i], {
|
||||
value: arg.print_to_string()
|
||||
});
|
||||
});
|
||||
var code = OutputStream();
|
||||
AST_BlockStatement.prototype._codegen.call(fun, fun, code);
|
||||
code = code.toString().replace(/^\{|\}$/g, "");
|
||||
args.push(make_node(AST_String, self.args[self.args.length - 1], {
|
||||
value: code
|
||||
}));
|
||||
self.args = args;
|
||||
return self;
|
||||
} catch(ex) {
|
||||
if (ex instanceof JS_Parse_Error) {
|
||||
compressor.warn("Error parsing code passed to new Function [{file}:{line},{col}]", self.args[self.args.length - 1].start);
|
||||
compressor.warn(ex.toString());
|
||||
} else {
|
||||
console.log(ex);
|
||||
throw ex;
|
||||
}
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
else if (exp instanceof AST_Dot && exp.property == "toString" && self.args.length == 0) {
|
||||
@@ -1598,15 +1781,70 @@ merge(Compressor.prototype, {
|
||||
right: exp.expression
|
||||
}).transform(compressor);
|
||||
}
|
||||
else if (exp instanceof AST_Dot && exp.expression instanceof AST_Array && exp.property == "join") EXIT: {
|
||||
var separator = self.args.length == 0 ? "," : self.args[0].evaluate(compressor)[1];
|
||||
if (separator == null) break EXIT; // not a constant
|
||||
var elements = exp.expression.elements.reduce(function(a, el){
|
||||
el = el.evaluate(compressor);
|
||||
if (a.length == 0 || el.length == 1) {
|
||||
a.push(el);
|
||||
} else {
|
||||
var last = a[a.length - 1];
|
||||
if (last.length == 2) {
|
||||
// it's a constant
|
||||
var val = "" + last[1] + separator + el[1];
|
||||
a[a.length - 1] = [ make_node_from_constant(compressor, val, last[0]), val ];
|
||||
} else {
|
||||
a.push(el);
|
||||
}
|
||||
}
|
||||
return a;
|
||||
}, []);
|
||||
if (elements.length == 0) return make_node(AST_String, self, { value: "" });
|
||||
if (elements.length == 1) return elements[0][0];
|
||||
if (separator == "") {
|
||||
var first;
|
||||
if (elements[0][0] instanceof AST_String
|
||||
|| elements[1][0] instanceof AST_String) {
|
||||
first = elements.shift()[0];
|
||||
} else {
|
||||
first = make_node(AST_String, self, { value: "" });
|
||||
}
|
||||
return elements.reduce(function(prev, el){
|
||||
return make_node(AST_Binary, el[0], {
|
||||
operator : "+",
|
||||
left : prev,
|
||||
right : el[0],
|
||||
});
|
||||
}, first).transform(compressor);
|
||||
}
|
||||
// need this awkward cloning to not affect original element
|
||||
// best_of will decide which one to get through.
|
||||
var node = self.clone();
|
||||
node.expression = node.expression.clone();
|
||||
node.expression.expression = node.expression.expression.clone();
|
||||
node.expression.expression.elements = elements.map(function(el){
|
||||
return el[0];
|
||||
});
|
||||
return best_of(self, node);
|
||||
}
|
||||
}
|
||||
if (compressor.option("side_effects")) {
|
||||
if (self.expression instanceof AST_Function
|
||||
&& self.args.length == 0
|
||||
&& !AST_Block.prototype.has_side_effects.call(self.expression)) {
|
||||
&& !AST_Block.prototype.has_side_effects.call(self.expression, compressor)) {
|
||||
return make_node(AST_Undefined, self).transform(compressor);
|
||||
}
|
||||
}
|
||||
return self;
|
||||
if (compressor.option("drop_console")) {
|
||||
if (self.expression instanceof AST_PropAccess &&
|
||||
self.expression.expression instanceof AST_SymbolRef &&
|
||||
self.expression.expression.name == "console" &&
|
||||
self.expression.expression.undeclared()) {
|
||||
return make_node(AST_Undefined, self).transform(compressor);
|
||||
}
|
||||
}
|
||||
return self.evaluate(compressor)[0];
|
||||
});
|
||||
|
||||
OPT(AST_New, function(self, compressor){
|
||||
@@ -1629,7 +1867,7 @@ merge(Compressor.prototype, {
|
||||
OPT(AST_Seq, function(self, compressor){
|
||||
if (!compressor.option("side_effects"))
|
||||
return self;
|
||||
if (!self.car.has_side_effects()) {
|
||||
if (!self.car.has_side_effects(compressor)) {
|
||||
// we shouldn't compress (1,eval)(something) to
|
||||
// eval(something) because that changes the meaning of
|
||||
// eval (becomes lexical instead of global).
|
||||
@@ -1644,16 +1882,34 @@ merge(Compressor.prototype, {
|
||||
}
|
||||
if (compressor.option("cascade")) {
|
||||
if (self.car instanceof AST_Assign
|
||||
&& !self.car.left.has_side_effects()
|
||||
&& self.car.left.equivalent_to(self.cdr)) {
|
||||
&& !self.car.left.has_side_effects(compressor)) {
|
||||
if (self.car.left.equivalent_to(self.cdr)) {
|
||||
return self.car;
|
||||
}
|
||||
if (!self.car.has_side_effects()
|
||||
&& !self.cdr.has_side_effects()
|
||||
if (self.cdr instanceof AST_Call
|
||||
&& self.cdr.expression.equivalent_to(self.car.left)) {
|
||||
self.cdr.expression = self.car;
|
||||
return self.cdr;
|
||||
}
|
||||
}
|
||||
if (!self.car.has_side_effects(compressor)
|
||||
&& !self.cdr.has_side_effects(compressor)
|
||||
&& self.car.equivalent_to(self.cdr)) {
|
||||
return self.car;
|
||||
}
|
||||
}
|
||||
if (self.cdr instanceof AST_UnaryPrefix
|
||||
&& self.cdr.operator == "void"
|
||||
&& !self.cdr.expression.has_side_effects(compressor)) {
|
||||
self.cdr.operator = self.car;
|
||||
return self.cdr;
|
||||
}
|
||||
if (self.cdr instanceof AST_Undefined) {
|
||||
return make_node(AST_UnaryPrefix, self, {
|
||||
operator : "void",
|
||||
expression : self.car
|
||||
});
|
||||
}
|
||||
return self;
|
||||
});
|
||||
|
||||
@@ -1699,6 +1955,14 @@ merge(Compressor.prototype, {
|
||||
return self.evaluate(compressor)[0];
|
||||
});
|
||||
|
||||
function has_side_effects_or_prop_access(node, compressor) {
|
||||
var save_pure_getters = compressor.option("pure_getters");
|
||||
compressor.options.pure_getters = false;
|
||||
var ret = node.has_side_effects(compressor);
|
||||
compressor.options.pure_getters = save_pure_getters;
|
||||
return ret;
|
||||
}
|
||||
|
||||
AST_Binary.DEFMETHOD("lift_sequences", function(compressor){
|
||||
if (compressor.option("sequences")) {
|
||||
if (this.left instanceof AST_Seq) {
|
||||
@@ -1710,8 +1974,8 @@ merge(Compressor.prototype, {
|
||||
return seq;
|
||||
}
|
||||
if (this.right instanceof AST_Seq
|
||||
&& !(this.operator == "||" || this.operator == "&&")
|
||||
&& !this.left.has_side_effects()) {
|
||||
&& this instanceof AST_Assign
|
||||
&& !has_side_effects_or_prop_access(this.left, compressor)) {
|
||||
var seq = this.right;
|
||||
var x = seq.to_array();
|
||||
this.right = x.pop();
|
||||
@@ -1726,8 +1990,9 @@ merge(Compressor.prototype, {
|
||||
var commutativeOperators = makePredicate("== === != !== * & | ^");
|
||||
|
||||
OPT(AST_Binary, function(self, compressor){
|
||||
function reverse(op) {
|
||||
if (!(self.left.has_side_effects() || self.right.has_side_effects())) {
|
||||
var reverse = compressor.has_directive("use asm") ? noop
|
||||
: function(op, force) {
|
||||
if (force || !(self.left.has_side_effects(compressor) || self.right.has_side_effects(compressor))) {
|
||||
if (op) self.operator = op;
|
||||
var tmp = self.left;
|
||||
self.left = self.right;
|
||||
@@ -1737,7 +2002,40 @@ merge(Compressor.prototype, {
|
||||
if (commutativeOperators(self.operator)) {
|
||||
if (self.right instanceof AST_Constant
|
||||
&& !(self.left instanceof AST_Constant)) {
|
||||
reverse();
|
||||
// if right is a constant, whatever side effects the
|
||||
// left side might have could not influence the
|
||||
// result. hence, force switch.
|
||||
|
||||
if (!(self.left instanceof AST_Binary
|
||||
&& PRECEDENCE[self.left.operator] >= PRECEDENCE[self.operator])) {
|
||||
reverse(null, true);
|
||||
}
|
||||
}
|
||||
if (/^[!=]==?$/.test(self.operator)) {
|
||||
if (self.left instanceof AST_SymbolRef && self.right instanceof AST_Conditional) {
|
||||
if (self.right.consequent instanceof AST_SymbolRef
|
||||
&& self.right.consequent.definition() === self.left.definition()) {
|
||||
if (/^==/.test(self.operator)) return self.right.condition;
|
||||
if (/^!=/.test(self.operator)) return self.right.condition.negate(compressor);
|
||||
}
|
||||
if (self.right.alternative instanceof AST_SymbolRef
|
||||
&& self.right.alternative.definition() === self.left.definition()) {
|
||||
if (/^==/.test(self.operator)) return self.right.condition.negate(compressor);
|
||||
if (/^!=/.test(self.operator)) return self.right.condition;
|
||||
}
|
||||
}
|
||||
if (self.right instanceof AST_SymbolRef && self.left instanceof AST_Conditional) {
|
||||
if (self.left.consequent instanceof AST_SymbolRef
|
||||
&& self.left.consequent.definition() === self.right.definition()) {
|
||||
if (/^==/.test(self.operator)) return self.left.condition;
|
||||
if (/^!=/.test(self.operator)) return self.left.condition.negate(compressor);
|
||||
}
|
||||
if (self.left.alternative instanceof AST_SymbolRef
|
||||
&& self.left.alternative.definition() === self.right.definition()) {
|
||||
if (/^==/.test(self.operator)) return self.left.condition.negate(compressor);
|
||||
if (/^!=/.test(self.operator)) return self.left.condition;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
self = self.lift_sequences(compressor);
|
||||
@@ -1758,8 +2056,8 @@ merge(Compressor.prototype, {
|
||||
&& compressor.option("unsafe")) {
|
||||
if (!(self.right.expression instanceof AST_SymbolRef)
|
||||
|| !self.right.expression.undeclared()) {
|
||||
self.left = self.right.expression;
|
||||
self.right = make_node(AST_Undefined, self.left).optimize(compressor);
|
||||
self.right = self.right.expression;
|
||||
self.left = make_node(AST_Undefined, self.left).optimize(compressor);
|
||||
if (self.operator.length == 2) self.operator += "=";
|
||||
}
|
||||
}
|
||||
@@ -1804,11 +2102,6 @@ merge(Compressor.prototype, {
|
||||
}
|
||||
break;
|
||||
}
|
||||
var exp = self.evaluate(compressor);
|
||||
if (exp.length > 1) {
|
||||
if (best_of(exp[0], self) !== self)
|
||||
return exp[0];
|
||||
}
|
||||
if (compressor.option("comparisons")) {
|
||||
if (!(compressor.parent() instanceof AST_Binary)
|
||||
|| compressor.parent() instanceof AST_Assign) {
|
||||
@@ -1828,7 +2121,76 @@ merge(Compressor.prototype, {
|
||||
&& self.left.operator == "+" && self.left.is_string(compressor)) {
|
||||
return self.left;
|
||||
}
|
||||
return self;
|
||||
if (compressor.option("evaluate")) {
|
||||
if (self.operator == "+") {
|
||||
if (self.left instanceof AST_Constant
|
||||
&& self.right instanceof AST_Binary
|
||||
&& self.right.operator == "+"
|
||||
&& self.right.left instanceof AST_Constant
|
||||
&& self.right.is_string(compressor)) {
|
||||
self = make_node(AST_Binary, self, {
|
||||
operator: "+",
|
||||
left: make_node(AST_String, null, {
|
||||
value: "" + self.left.getValue() + self.right.left.getValue(),
|
||||
start: self.left.start,
|
||||
end: self.right.left.end
|
||||
}),
|
||||
right: self.right.right
|
||||
});
|
||||
}
|
||||
if (self.right instanceof AST_Constant
|
||||
&& self.left instanceof AST_Binary
|
||||
&& self.left.operator == "+"
|
||||
&& self.left.right instanceof AST_Constant
|
||||
&& self.left.is_string(compressor)) {
|
||||
self = make_node(AST_Binary, self, {
|
||||
operator: "+",
|
||||
left: self.left.left,
|
||||
right: make_node(AST_String, null, {
|
||||
value: "" + self.left.right.getValue() + self.right.getValue(),
|
||||
start: self.left.right.start,
|
||||
end: self.right.end
|
||||
})
|
||||
});
|
||||
}
|
||||
if (self.left instanceof AST_Binary
|
||||
&& self.left.operator == "+"
|
||||
&& self.left.is_string(compressor)
|
||||
&& self.left.right instanceof AST_Constant
|
||||
&& self.right instanceof AST_Binary
|
||||
&& self.right.operator == "+"
|
||||
&& self.right.left instanceof AST_Constant
|
||||
&& self.right.is_string(compressor)) {
|
||||
self = make_node(AST_Binary, self, {
|
||||
operator: "+",
|
||||
left: make_node(AST_Binary, self.left, {
|
||||
operator: "+",
|
||||
left: self.left.left,
|
||||
right: make_node(AST_String, null, {
|
||||
value: "" + self.left.right.getValue() + self.right.left.getValue(),
|
||||
start: self.left.right.start,
|
||||
end: self.right.left.end
|
||||
})
|
||||
}),
|
||||
right: self.right.right
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
// x * (y * z) ==> x * y * z
|
||||
if (self.right instanceof AST_Binary
|
||||
&& self.right.operator == self.operator
|
||||
&& (self.operator == "*" || self.operator == "&&" || self.operator == "||"))
|
||||
{
|
||||
self.left = make_node(AST_Binary, self.left, {
|
||||
operator : self.operator,
|
||||
left : self.left,
|
||||
right : self.right.left
|
||||
});
|
||||
self.right = self.right.right;
|
||||
return self.transform(compressor);
|
||||
}
|
||||
return self.evaluate(compressor)[0];
|
||||
});
|
||||
|
||||
OPT(AST_SymbolRef, function(self, compressor){
|
||||
@@ -1919,7 +2281,7 @@ merge(Compressor.prototype, {
|
||||
* ==>
|
||||
* exp = foo ? something : something_else;
|
||||
*/
|
||||
self = make_node(AST_Assign, self, {
|
||||
return make_node(AST_Assign, self, {
|
||||
operator: consequent.operator,
|
||||
left: consequent.left,
|
||||
right: make_node(AST_Conditional, self, {
|
||||
@@ -1929,6 +2291,49 @@ merge(Compressor.prototype, {
|
||||
})
|
||||
});
|
||||
}
|
||||
if (consequent instanceof AST_Call
|
||||
&& alternative.TYPE === consequent.TYPE
|
||||
&& consequent.args.length == alternative.args.length
|
||||
&& consequent.expression.equivalent_to(alternative.expression)) {
|
||||
if (consequent.args.length == 0) {
|
||||
return make_node(AST_Seq, self, {
|
||||
car: self.condition,
|
||||
cdr: consequent
|
||||
});
|
||||
}
|
||||
if (consequent.args.length == 1) {
|
||||
consequent.args[0] = make_node(AST_Conditional, self, {
|
||||
condition: self.condition,
|
||||
consequent: consequent.args[0],
|
||||
alternative: alternative.args[0]
|
||||
});
|
||||
return consequent;
|
||||
}
|
||||
}
|
||||
// x?y?z:a:a --> x&&y?z:a
|
||||
if (consequent instanceof AST_Conditional
|
||||
&& consequent.alternative.equivalent_to(alternative)) {
|
||||
return make_node(AST_Conditional, self, {
|
||||
condition: make_node(AST_Binary, self, {
|
||||
left: self.condition,
|
||||
operator: "&&",
|
||||
right: consequent.condition
|
||||
}),
|
||||
consequent: consequent.consequent,
|
||||
alternative: alternative
|
||||
});
|
||||
}
|
||||
// x=y?1:1 --> x=1
|
||||
if (consequent instanceof AST_Constant
|
||||
&& alternative instanceof AST_Constant
|
||||
&& consequent.equivalent_to(alternative)) {
|
||||
if (self.condition.has_side_effects(compressor)) {
|
||||
return AST_Seq.from_array([self.condition, make_node_from_constant(compressor, consequent.value, self)]);
|
||||
} else {
|
||||
return make_node_from_constant(compressor, consequent.value, self);
|
||||
|
||||
}
|
||||
}
|
||||
return self;
|
||||
});
|
||||
|
||||
@@ -1962,16 +2367,35 @@ merge(Compressor.prototype, {
|
||||
var prop = self.property;
|
||||
if (prop instanceof AST_String && compressor.option("properties")) {
|
||||
prop = prop.getValue();
|
||||
if (is_identifier(prop) || compressor.option("screw_ie8")) {
|
||||
if (RESERVED_WORDS(prop) ? compressor.option("screw_ie8") : is_identifier_string(prop)) {
|
||||
return make_node(AST_Dot, self, {
|
||||
expression : self.expression,
|
||||
property : prop
|
||||
}).optimize(compressor);
|
||||
}
|
||||
var v = parseFloat(prop);
|
||||
if (!isNaN(v) && v.toString() == prop) {
|
||||
self.property = make_node(AST_Number, self.property, {
|
||||
value: v
|
||||
});
|
||||
}
|
||||
}
|
||||
return self;
|
||||
});
|
||||
|
||||
OPT(AST_Dot, function(self, compressor){
|
||||
var prop = self.property;
|
||||
if (RESERVED_WORDS(prop) && !compressor.option("screw_ie8")) {
|
||||
return make_node(AST_Sub, self, {
|
||||
expression : self.expression,
|
||||
property : make_node(AST_String, self, {
|
||||
value: prop
|
||||
})
|
||||
}).optimize(compressor);
|
||||
}
|
||||
return self.evaluate(compressor)[0];
|
||||
});
|
||||
|
||||
function literals_in_boolean_context(self, compressor) {
|
||||
if (compressor.option("booleans") && compressor.in_boolean_context()) {
|
||||
return make_node(AST_True, self);
|
||||
|
||||
@@ -46,37 +46,44 @@
|
||||
(function(){
|
||||
|
||||
var MOZ_TO_ME = {
|
||||
ExpressionStatement: function(M) {
|
||||
var expr = M.expression;
|
||||
if (expr.type === "Literal" && typeof expr.value === "string") {
|
||||
return new AST_Directive({
|
||||
start: my_start_token(M),
|
||||
end: my_end_token(M),
|
||||
value: expr.value
|
||||
});
|
||||
}
|
||||
return new AST_SimpleStatement({
|
||||
start: my_start_token(M),
|
||||
end: my_end_token(M),
|
||||
body: from_moz(expr)
|
||||
});
|
||||
},
|
||||
TryStatement: function(M) {
|
||||
var handlers = M.handlers || [M.handler];
|
||||
if (handlers.length > 1 || M.guardedHandlers && M.guardedHandlers.length) {
|
||||
throw new Error("Multiple catch clauses are not supported.");
|
||||
}
|
||||
return new AST_Try({
|
||||
start : my_start_token(M),
|
||||
end : my_end_token(M),
|
||||
body : from_moz(M.block).body,
|
||||
bcatch : from_moz(M.handlers[0]),
|
||||
bcatch : from_moz(handlers[0]),
|
||||
bfinally : M.finalizer ? new AST_Finally(from_moz(M.finalizer)) : null
|
||||
});
|
||||
},
|
||||
CatchClause : function(M) {
|
||||
return new AST_Catch({
|
||||
start : my_start_token(M),
|
||||
end : my_end_token(M),
|
||||
argname : from_moz(M.param),
|
||||
body : from_moz(M.body).body
|
||||
});
|
||||
},
|
||||
ObjectExpression : function(M) {
|
||||
return new AST_Object({
|
||||
start : my_start_token(M),
|
||||
end : my_end_token(M),
|
||||
properties : M.properties.map(function(prop){
|
||||
var key = prop.key;
|
||||
Property: function(M) {
|
||||
var key = M.key;
|
||||
var name = key.type == "Identifier" ? key.name : key.value;
|
||||
var args = {
|
||||
start : my_start_token(key),
|
||||
end : my_end_token(prop.value),
|
||||
end : my_end_token(M.value),
|
||||
key : name,
|
||||
value : from_moz(prop.value)
|
||||
value : from_moz(M.value)
|
||||
};
|
||||
switch (prop.kind) {
|
||||
switch (M.kind) {
|
||||
case "init":
|
||||
return new AST_ObjectKeyVal(args);
|
||||
case "set":
|
||||
@@ -86,6 +93,14 @@
|
||||
args.value.name = from_moz(key);
|
||||
return new AST_ObjectGetter(args);
|
||||
}
|
||||
},
|
||||
ObjectExpression: function(M) {
|
||||
return new AST_Object({
|
||||
start : my_start_token(M),
|
||||
end : my_end_token(M),
|
||||
properties : M.properties.map(function(prop){
|
||||
prop.type = "Property";
|
||||
return from_moz(prop)
|
||||
})
|
||||
});
|
||||
},
|
||||
@@ -108,6 +123,13 @@
|
||||
body : M.consequent.map(from_moz)
|
||||
});
|
||||
},
|
||||
VariableDeclaration: function(M) {
|
||||
return new (M.kind === "const" ? AST_Const : AST_Var)({
|
||||
start : my_start_token(M),
|
||||
end : my_end_token(M),
|
||||
definitions : M.declarations.map(from_moz)
|
||||
});
|
||||
},
|
||||
Literal: function(M) {
|
||||
var val = M.value, args = {
|
||||
start : my_start_token(M),
|
||||
@@ -128,12 +150,9 @@
|
||||
return new AST_RegExp(args);
|
||||
}
|
||||
},
|
||||
UnaryExpression: From_Moz_Unary,
|
||||
UpdateExpression: From_Moz_Unary,
|
||||
Identifier: function(M) {
|
||||
var p = FROM_MOZ_STACK[FROM_MOZ_STACK.length - 2];
|
||||
return new (M.name == "this" ? AST_This
|
||||
: p.type == "LabeledStatement" ? AST_Label
|
||||
return new ( p.type == "LabeledStatement" ? AST_Label
|
||||
: p.type == "VariableDeclarator" && p.id === M ? (p.kind == "const" ? AST_SymbolConst : AST_SymbolVar)
|
||||
: p.type == "FunctionExpression" ? (p.id === M ? AST_SymbolLambda : AST_SymbolFunarg)
|
||||
: p.type == "FunctionDeclaration" ? (p.id === M ? AST_SymbolDefun : AST_SymbolFunarg)
|
||||
@@ -147,7 +166,8 @@
|
||||
}
|
||||
};
|
||||
|
||||
function From_Moz_Unary(M) {
|
||||
MOZ_TO_ME.UpdateExpression =
|
||||
MOZ_TO_ME.UnaryExpression = function To_Moz_Unary(M) {
|
||||
var prefix = "prefix" in M ? M.prefix
|
||||
: M.type == "UnaryExpression" ? true : false;
|
||||
return new (prefix ? AST_UnaryPrefix : AST_UnaryPostfix)({
|
||||
@@ -158,14 +178,9 @@
|
||||
});
|
||||
};
|
||||
|
||||
var ME_TO_MOZ = {};
|
||||
|
||||
map("Node", AST_Node);
|
||||
map("Program", AST_Toplevel, "body@body");
|
||||
map("Function", AST_Function, "id>name, params@argnames, body%body");
|
||||
map("EmptyStatement", AST_EmptyStatement);
|
||||
map("BlockStatement", AST_BlockStatement, "body@body");
|
||||
map("ExpressionStatement", AST_SimpleStatement, "expression>body");
|
||||
map("IfStatement", AST_If, "test>condition, consequent>body, alternate>alternative");
|
||||
map("LabeledStatement", AST_LabeledStatement, "label>label, body>body");
|
||||
map("BreakStatement", AST_Break, "label>label");
|
||||
@@ -180,71 +195,257 @@
|
||||
map("ForInStatement", AST_ForIn, "left>init, right>object, body>body");
|
||||
map("DebuggerStatement", AST_Debugger);
|
||||
map("FunctionDeclaration", AST_Defun, "id>name, params@argnames, body%body");
|
||||
map("VariableDeclaration", AST_Var, "declarations@definitions");
|
||||
map("VariableDeclarator", AST_VarDef, "id>name, init>value");
|
||||
map("CatchClause", AST_Catch, "param>argname, body%body");
|
||||
|
||||
map("ThisExpression", AST_This);
|
||||
map("ArrayExpression", AST_Array, "elements@elements");
|
||||
map("FunctionExpression", AST_Function, "id>name, params@argnames, body%body");
|
||||
map("BinaryExpression", AST_Binary, "operator=operator, left>left, right>right");
|
||||
map("AssignmentExpression", AST_Assign, "operator=operator, left>left, right>right");
|
||||
map("LogicalExpression", AST_Binary, "operator=operator, left>left, right>right");
|
||||
map("AssignmentExpression", AST_Assign, "operator=operator, left>left, right>right");
|
||||
map("ConditionalExpression", AST_Conditional, "test>condition, consequent>consequent, alternate>alternative");
|
||||
map("NewExpression", AST_New, "callee>expression, arguments@args");
|
||||
map("CallExpression", AST_Call, "callee>expression, arguments@args");
|
||||
|
||||
def_to_moz(AST_Directive, function To_Moz_Directive(M) {
|
||||
return {
|
||||
type: "ExpressionStatement",
|
||||
expression: {
|
||||
type: "Literal",
|
||||
value: M.value
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
def_to_moz(AST_SimpleStatement, function To_Moz_ExpressionStatement(M) {
|
||||
return {
|
||||
type: "ExpressionStatement",
|
||||
expression: to_moz(M.body)
|
||||
};
|
||||
});
|
||||
|
||||
def_to_moz(AST_SwitchBranch, function To_Moz_SwitchCase(M) {
|
||||
return {
|
||||
type: "SwitchCase",
|
||||
test: to_moz(M.expression),
|
||||
consequent: M.body.map(to_moz)
|
||||
};
|
||||
});
|
||||
|
||||
def_to_moz(AST_Try, function To_Moz_TryStatement(M) {
|
||||
return {
|
||||
type: "TryStatement",
|
||||
block: to_moz_block(M),
|
||||
handler: to_moz(M.bcatch),
|
||||
guardedHandlers: [],
|
||||
finalizer: to_moz(M.bfinally)
|
||||
};
|
||||
});
|
||||
|
||||
def_to_moz(AST_Catch, function To_Moz_CatchClause(M) {
|
||||
return {
|
||||
type: "CatchClause",
|
||||
param: to_moz(M.argname),
|
||||
guard: null,
|
||||
body: to_moz_block(M)
|
||||
};
|
||||
});
|
||||
|
||||
def_to_moz(AST_Definitions, function To_Moz_VariableDeclaration(M) {
|
||||
return {
|
||||
type: "VariableDeclaration",
|
||||
kind: M instanceof AST_Const ? "const" : "var",
|
||||
declarations: M.definitions.map(to_moz)
|
||||
};
|
||||
});
|
||||
|
||||
def_to_moz(AST_Seq, function To_Moz_SequenceExpression(M) {
|
||||
return {
|
||||
type: "SequenceExpression",
|
||||
expressions: M.to_array().map(to_moz)
|
||||
};
|
||||
});
|
||||
|
||||
def_to_moz(AST_PropAccess, function To_Moz_MemberExpression(M) {
|
||||
var isComputed = M instanceof AST_Sub;
|
||||
return {
|
||||
type: "MemberExpression",
|
||||
object: to_moz(M.expression),
|
||||
computed: isComputed,
|
||||
property: isComputed ? to_moz(M.property) : {type: "Identifier", name: M.property}
|
||||
};
|
||||
});
|
||||
|
||||
def_to_moz(AST_Unary, function To_Moz_Unary(M) {
|
||||
return {
|
||||
type: M.operator == "++" || M.operator == "--" ? "UpdateExpression" : "UnaryExpression",
|
||||
operator: M.operator,
|
||||
prefix: M instanceof AST_UnaryPrefix,
|
||||
argument: to_moz(M.expression)
|
||||
};
|
||||
});
|
||||
|
||||
def_to_moz(AST_Binary, function To_Moz_BinaryExpression(M) {
|
||||
return {
|
||||
type: M.operator == "&&" || M.operator == "||" ? "LogicalExpression" : "BinaryExpression",
|
||||
left: to_moz(M.left),
|
||||
operator: M.operator,
|
||||
right: to_moz(M.right)
|
||||
};
|
||||
});
|
||||
|
||||
def_to_moz(AST_Object, function To_Moz_ObjectExpression(M) {
|
||||
return {
|
||||
type: "ObjectExpression",
|
||||
properties: M.properties.map(to_moz)
|
||||
};
|
||||
});
|
||||
|
||||
def_to_moz(AST_ObjectProperty, function To_Moz_Property(M) {
|
||||
var key = (
|
||||
is_identifier(M.key)
|
||||
? {type: "Identifier", name: M.key}
|
||||
: {type: "Literal", value: M.key}
|
||||
);
|
||||
var kind;
|
||||
if (M instanceof AST_ObjectKeyVal) {
|
||||
kind = "init";
|
||||
} else
|
||||
if (M instanceof AST_ObjectGetter) {
|
||||
kind = "get";
|
||||
} else
|
||||
if (M instanceof AST_ObjectSetter) {
|
||||
kind = "set";
|
||||
}
|
||||
return {
|
||||
type: "Property",
|
||||
kind: kind,
|
||||
key: key,
|
||||
value: to_moz(M.value)
|
||||
};
|
||||
});
|
||||
|
||||
def_to_moz(AST_Symbol, function To_Moz_Identifier(M) {
|
||||
var def = M.definition();
|
||||
return {
|
||||
type: "Identifier",
|
||||
name: def ? def.mangled_name || def.name : M.name
|
||||
};
|
||||
});
|
||||
|
||||
def_to_moz(AST_Constant, function To_Moz_Literal(M) {
|
||||
var value = M.value;
|
||||
if (typeof value === 'number' && (value < 0 || (value === 0 && 1 / value < 0))) {
|
||||
return {
|
||||
type: "UnaryExpression",
|
||||
operator: "-",
|
||||
prefix: true,
|
||||
argument: {
|
||||
type: "Literal",
|
||||
value: -value
|
||||
}
|
||||
};
|
||||
}
|
||||
return {
|
||||
type: "Literal",
|
||||
value: value
|
||||
};
|
||||
});
|
||||
|
||||
def_to_moz(AST_Atom, function To_Moz_Atom(M) {
|
||||
return {
|
||||
type: "Identifier",
|
||||
name: String(M.value)
|
||||
};
|
||||
});
|
||||
|
||||
AST_Boolean.DEFMETHOD("to_mozilla_ast", AST_Constant.prototype.to_mozilla_ast);
|
||||
AST_Null.DEFMETHOD("to_mozilla_ast", AST_Constant.prototype.to_mozilla_ast);
|
||||
AST_Hole.DEFMETHOD("to_mozilla_ast", function To_Moz_ArrayHole() { return null });
|
||||
|
||||
AST_Block.DEFMETHOD("to_mozilla_ast", AST_BlockStatement.prototype.to_mozilla_ast);
|
||||
AST_Lambda.DEFMETHOD("to_mozilla_ast", AST_Function.prototype.to_mozilla_ast);
|
||||
|
||||
/* -----[ tools ]----- */
|
||||
|
||||
function my_start_token(moznode) {
|
||||
var loc = moznode.loc;
|
||||
var range = moznode.range;
|
||||
return new AST_Token({
|
||||
file : moznode.loc && moznode.loc.source,
|
||||
line : moznode.loc && moznode.loc.start.line,
|
||||
col : moznode.loc && moznode.loc.start.column,
|
||||
pos : moznode.start,
|
||||
endpos : moznode.start
|
||||
file : loc && loc.source,
|
||||
line : loc && loc.start.line,
|
||||
col : loc && loc.start.column,
|
||||
pos : range ? range[0] : moznode.start,
|
||||
endpos : range ? range[0] : moznode.start
|
||||
});
|
||||
};
|
||||
|
||||
function my_end_token(moznode) {
|
||||
var loc = moznode.loc;
|
||||
var range = moznode.range;
|
||||
return new AST_Token({
|
||||
file : moznode.loc && moznode.loc.source,
|
||||
line : moznode.loc && moznode.loc.end.line,
|
||||
col : moznode.loc && moznode.loc.end.column,
|
||||
pos : moznode.end,
|
||||
endpos : moznode.end
|
||||
file : loc && loc.source,
|
||||
line : loc && loc.end.line,
|
||||
col : loc && loc.end.column,
|
||||
pos : range ? range[1] : moznode.end,
|
||||
endpos : range ? range[1] : moznode.end
|
||||
});
|
||||
};
|
||||
|
||||
function map(moztype, mytype, propmap) {
|
||||
var moz_to_me = "function From_Moz_" + moztype + "(M){\n";
|
||||
moz_to_me += "return new mytype({\n" +
|
||||
moz_to_me += "return new " + mytype.name + "({\n" +
|
||||
"start: my_start_token(M),\n" +
|
||||
"end: my_end_token(M)";
|
||||
|
||||
var me_to_moz = "function To_Moz_" + moztype + "(M){\n";
|
||||
me_to_moz += "return {\n" +
|
||||
"type: " + JSON.stringify(moztype);
|
||||
|
||||
if (propmap) propmap.split(/\s*,\s*/).forEach(function(prop){
|
||||
var m = /([a-z0-9$_]+)(=|@|>|%)([a-z0-9$_]+)/i.exec(prop);
|
||||
if (!m) throw new Error("Can't understand property map: " + prop);
|
||||
var moz = "M." + m[1], how = m[2], my = m[3];
|
||||
var moz = m[1], how = m[2], my = m[3];
|
||||
moz_to_me += ",\n" + my + ": ";
|
||||
if (how == "@") {
|
||||
moz_to_me += moz + ".map(from_moz)";
|
||||
} else if (how == ">") {
|
||||
moz_to_me += "from_moz(" + moz + ")";
|
||||
} else if (how == "=") {
|
||||
moz_to_me += moz;
|
||||
} else if (how == "%") {
|
||||
moz_to_me += "from_moz(" + moz + ").body";
|
||||
} else throw new Error("Can't understand operator in propmap: " + prop);
|
||||
me_to_moz += ",\n" + moz + ": ";
|
||||
switch (how) {
|
||||
case "@":
|
||||
moz_to_me += "M." + moz + ".map(from_moz)";
|
||||
me_to_moz += "M." + my + ".map(to_moz)";
|
||||
break;
|
||||
case ">":
|
||||
moz_to_me += "from_moz(M." + moz + ")";
|
||||
me_to_moz += "to_moz(M." + my + ")";
|
||||
break;
|
||||
case "=":
|
||||
moz_to_me += "M." + moz;
|
||||
me_to_moz += "M." + my;
|
||||
break;
|
||||
case "%":
|
||||
moz_to_me += "from_moz(M." + moz + ").body";
|
||||
me_to_moz += "to_moz_block(M)";
|
||||
break;
|
||||
default:
|
||||
throw new Error("Can't understand operator in propmap: " + prop);
|
||||
}
|
||||
});
|
||||
moz_to_me += "\n})}";
|
||||
|
||||
moz_to_me += "\n})\n}";
|
||||
me_to_moz += "\n}\n}";
|
||||
|
||||
//moz_to_me = parse(moz_to_me).print_to_string({ beautify: true });
|
||||
//me_to_moz = parse(me_to_moz).print_to_string({ beautify: true });
|
||||
//console.log(moz_to_me);
|
||||
|
||||
moz_to_me = new Function("mytype", "my_start_token", "my_end_token", "from_moz", "return(" + moz_to_me + ")")(
|
||||
mytype, my_start_token, my_end_token, from_moz
|
||||
moz_to_me = new Function("my_start_token", "my_end_token", "from_moz", "return(" + moz_to_me + ")")(
|
||||
my_start_token, my_end_token, from_moz
|
||||
);
|
||||
return MOZ_TO_ME[moztype] = moz_to_me;
|
||||
me_to_moz = new Function("to_moz", "to_moz_block", "return(" + me_to_moz + ")")(
|
||||
to_moz, to_moz_block
|
||||
);
|
||||
MOZ_TO_ME[moztype] = moz_to_me;
|
||||
def_to_moz(mytype, me_to_moz);
|
||||
};
|
||||
|
||||
var FROM_MOZ_STACK = null;
|
||||
@@ -264,4 +465,46 @@
|
||||
return ast;
|
||||
};
|
||||
|
||||
function moz_sub_loc(token) {
|
||||
return token.line ? {
|
||||
line: token.line,
|
||||
column: token.col
|
||||
} : null;
|
||||
};
|
||||
|
||||
function set_moz_loc(mynode, moznode) {
|
||||
var start = mynode.start;
|
||||
var end = mynode.end;
|
||||
if (start.pos != null && end.pos != null) {
|
||||
moznode.range = [start.pos, end.pos];
|
||||
}
|
||||
if (start.line) {
|
||||
moznode.loc = {
|
||||
start: moz_sub_loc(start),
|
||||
end: moz_sub_loc(end)
|
||||
};
|
||||
if (start.file) {
|
||||
moznode.loc.source = start.file;
|
||||
}
|
||||
}
|
||||
return moznode;
|
||||
};
|
||||
|
||||
function def_to_moz(mytype, handler) {
|
||||
mytype.DEFMETHOD("to_mozilla_ast", function() {
|
||||
return set_moz_loc(this, handler(this));
|
||||
});
|
||||
};
|
||||
|
||||
function to_moz(node) {
|
||||
return node != null ? node.to_mozilla_ast() : null;
|
||||
};
|
||||
|
||||
function to_moz_block(node) {
|
||||
return {
|
||||
type: "BlockStatement",
|
||||
body: node.body.map(to_moz)
|
||||
};
|
||||
};
|
||||
|
||||
})();
|
||||
|
||||
164
lib/output.js
164
lib/output.js
@@ -51,16 +51,18 @@ function OutputStream(options) {
|
||||
quote_keys : false,
|
||||
space_colon : true,
|
||||
ascii_only : false,
|
||||
unescape_regexps : false,
|
||||
inline_script : false,
|
||||
width : 80,
|
||||
max_line_len : 32000,
|
||||
ie_proof : true,
|
||||
beautify : false,
|
||||
source_map : null,
|
||||
bracketize : false,
|
||||
semicolons : true,
|
||||
comments : false,
|
||||
preserve_line : false
|
||||
preserve_line : false,
|
||||
screw_ie8 : false,
|
||||
preamble : null,
|
||||
}, true);
|
||||
|
||||
var indentation = 0;
|
||||
@@ -95,7 +97,7 @@ function OutputStream(options) {
|
||||
case "\u2029": return "\\u2029";
|
||||
case '"': ++dq; return '"';
|
||||
case "'": ++sq; return "'";
|
||||
case "\0": return "\\0";
|
||||
case "\0": return "\\x00";
|
||||
}
|
||||
return s;
|
||||
});
|
||||
@@ -299,6 +301,10 @@ function OutputStream(options) {
|
||||
return OUTPUT;
|
||||
};
|
||||
|
||||
if (options.preamble) {
|
||||
print(options.preamble.replace(/\r\n?|[\n\u2028\u2029]|\s*$/g, "\n"));
|
||||
}
|
||||
|
||||
var stack = [];
|
||||
return {
|
||||
get : get,
|
||||
@@ -350,17 +356,16 @@ function OutputStream(options) {
|
||||
|
||||
AST_Node.DEFMETHOD("print", function(stream, force_parens){
|
||||
var self = this, generator = self._codegen;
|
||||
function doit() {
|
||||
self.add_comments(stream);
|
||||
self.add_source_map(stream);
|
||||
generator(self, stream);
|
||||
}
|
||||
stream.push_node(self);
|
||||
if (force_parens || self.needs_parens(stream)) {
|
||||
stream.with_parens(function(){
|
||||
self.add_comments(stream);
|
||||
self.add_source_map(stream);
|
||||
generator(self, stream);
|
||||
});
|
||||
stream.with_parens(doit);
|
||||
} else {
|
||||
self.add_comments(stream);
|
||||
self.add_source_map(stream);
|
||||
generator(self, stream);
|
||||
doit();
|
||||
}
|
||||
stream.pop_node();
|
||||
});
|
||||
@@ -379,15 +384,23 @@ function OutputStream(options) {
|
||||
var start = self.start;
|
||||
if (start && !start._comments_dumped) {
|
||||
start._comments_dumped = true;
|
||||
var comments = start.comments_before;
|
||||
var comments = start.comments_before || [];
|
||||
|
||||
// XXX: ugly fix for https://github.com/mishoo/UglifyJS2/issues/112
|
||||
// if this node is `return` or `throw`, we cannot allow comments before
|
||||
// the returned or thrown value.
|
||||
if (self instanceof AST_Exit &&
|
||||
self.value && self.value.start.comments_before.length > 0) {
|
||||
comments = (comments || []).concat(self.value.start.comments_before);
|
||||
self.value.start.comments_before = [];
|
||||
// and https://github.com/mishoo/UglifyJS2/issues/372
|
||||
if (self instanceof AST_Exit && self.value) {
|
||||
self.value.walk(new TreeWalker(function(node){
|
||||
if (node.start && node.start.comments_before) {
|
||||
comments = comments.concat(node.start.comments_before);
|
||||
node.start.comments_before = [];
|
||||
}
|
||||
if (node instanceof AST_Function ||
|
||||
node instanceof AST_Array ||
|
||||
node instanceof AST_Object)
|
||||
{
|
||||
return true; // don't go inside.
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
if (c.test) {
|
||||
@@ -400,7 +413,7 @@ function OutputStream(options) {
|
||||
});
|
||||
}
|
||||
comments.forEach(function(c){
|
||||
if (c.type == "comment1") {
|
||||
if (/comment[134]/.test(c.type)) {
|
||||
output.print("//" + c.value + "\n");
|
||||
output.indent();
|
||||
}
|
||||
@@ -421,7 +434,13 @@ function OutputStream(options) {
|
||||
/* -----[ PARENTHESES ]----- */
|
||||
|
||||
function PARENS(nodetype, func) {
|
||||
if (Array.isArray(nodetype)) {
|
||||
nodetype.forEach(function(nodetype){
|
||||
PARENS(nodetype, func);
|
||||
});
|
||||
} else {
|
||||
nodetype.DEFMETHOD("needs_parens", func);
|
||||
}
|
||||
};
|
||||
|
||||
PARENS(AST_Node, function(){
|
||||
@@ -440,7 +459,7 @@ function OutputStream(options) {
|
||||
return first_in_statement(output);
|
||||
});
|
||||
|
||||
PARENS(AST_Unary, function(output){
|
||||
PARENS([ AST_Unary, AST_Undefined ], function(output){
|
||||
var p = output.parent();
|
||||
return p instanceof AST_PropAccess && p.expression === this;
|
||||
});
|
||||
@@ -451,7 +470,7 @@ function OutputStream(options) {
|
||||
|| p instanceof AST_Unary // !(foo, bar, baz)
|
||||
|| p instanceof AST_Binary // 1 + (2, 3) + 4 ==> 8
|
||||
|| p instanceof AST_VarDef // var a = (1, 2), b = a + a; ==> b == 4
|
||||
|| p instanceof AST_Dot // (1, {foo:2}).foo ==> 2
|
||||
|| p instanceof AST_PropAccess // (1, {foo:2}).foo or (1, {foo:2})["foo"] ==> 2
|
||||
|| p instanceof AST_Array // [ 1, (2, 3), 4 ] ==> [ 1, 3, 4 ]
|
||||
|| p instanceof AST_ObjectProperty // { foo: (1, 2) }.foo ==> 2
|
||||
|| p instanceof AST_Conditional /* (false, true) ? (a = 10, b = 20) : (c = 30)
|
||||
@@ -476,11 +495,7 @@ function OutputStream(options) {
|
||||
var so = this.operator, sp = PRECEDENCE[so];
|
||||
if (pp > sp
|
||||
|| (pp == sp
|
||||
&& this === p.right
|
||||
&& !(so == po &&
|
||||
(so == "*" ||
|
||||
so == "&&" ||
|
||||
so == "||")))) {
|
||||
&& this === p.right)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@@ -507,8 +522,17 @@ function OutputStream(options) {
|
||||
});
|
||||
|
||||
PARENS(AST_Call, function(output){
|
||||
var p = output.parent();
|
||||
return p instanceof AST_New && p.expression === this;
|
||||
var p = output.parent(), p1;
|
||||
if (p instanceof AST_New && p.expression === this)
|
||||
return true;
|
||||
|
||||
// workaround for Safari bug.
|
||||
// https://bugs.webkit.org/show_bug.cgi?id=123506
|
||||
return this.expression instanceof AST_Function
|
||||
&& p instanceof AST_PropAccess
|
||||
&& p.expression === this
|
||||
&& (p1 = output.parent(1)) instanceof AST_Assign
|
||||
&& p1.left === p;
|
||||
});
|
||||
|
||||
PARENS(AST_New, function(output){
|
||||
@@ -531,7 +555,7 @@ function OutputStream(options) {
|
||||
return true;
|
||||
});
|
||||
|
||||
function assign_and_conditional_paren_rules(output) {
|
||||
PARENS([ AST_Assign, AST_Conditional ], function (output){
|
||||
var p = output.parent();
|
||||
// !(a = false) → true
|
||||
if (p instanceof AST_Unary)
|
||||
@@ -548,10 +572,7 @@ function OutputStream(options) {
|
||||
// (a = foo)["prop"] —or— (a = foo).prop
|
||||
if (p instanceof AST_PropAccess && p.expression === this)
|
||||
return true;
|
||||
};
|
||||
|
||||
PARENS(AST_Assign, assign_and_conditional_paren_rules);
|
||||
PARENS(AST_Conditional, assign_and_conditional_paren_rules);
|
||||
});
|
||||
|
||||
/* -----[ PRINTERS ]----- */
|
||||
|
||||
@@ -638,7 +659,7 @@ function OutputStream(options) {
|
||||
output.print("for");
|
||||
output.space();
|
||||
output.with_parens(function(){
|
||||
if (self.init) {
|
||||
if (self.init && !(self.init instanceof AST_EmptyStatement)) {
|
||||
if (self.init instanceof AST_Definitions) {
|
||||
self.init.print(output);
|
||||
} else {
|
||||
@@ -757,7 +778,7 @@ function OutputStream(options) {
|
||||
if (!self.body)
|
||||
return output.force_semicolon();
|
||||
if (self.body instanceof AST_Do
|
||||
&& output.option("ie_proof")) {
|
||||
&& !output.option("screw_ie8")) {
|
||||
// https://github.com/mishoo/UglifyJS/issues/#issue/57 IE
|
||||
// croaks with "syntax error" on code like this: if (foo)
|
||||
// do ... while(cond); else ... we need block brackets
|
||||
@@ -978,8 +999,12 @@ function OutputStream(options) {
|
||||
DEFPRINT(AST_UnaryPrefix, function(self, output){
|
||||
var op = self.operator;
|
||||
output.print(op);
|
||||
if (/^[a-z]/i.test(op))
|
||||
if (/^[a-z]/i.test(op)
|
||||
|| (/[+-]$/.test(op)
|
||||
&& self.expression instanceof AST_UnaryPrefix
|
||||
&& /^[+-]/.test(self.expression.operator))) {
|
||||
output.space();
|
||||
}
|
||||
self.expression.print(output);
|
||||
});
|
||||
DEFPRINT(AST_UnaryPostfix, function(self, output){
|
||||
@@ -990,7 +1015,18 @@ function OutputStream(options) {
|
||||
self.left.print(output);
|
||||
output.space();
|
||||
output.print(self.operator);
|
||||
if (self.operator == "<"
|
||||
&& self.right instanceof AST_UnaryPrefix
|
||||
&& self.right.operator == "!"
|
||||
&& self.right.expression instanceof AST_UnaryPrefix
|
||||
&& self.right.expression.operator == "--") {
|
||||
// space is mandatory to avoid outputting <!--
|
||||
// http://javascript.spec.whatwg.org/#comment-syntax
|
||||
output.print(" ");
|
||||
} else {
|
||||
// the space is optional depending on "beautify"
|
||||
output.space();
|
||||
}
|
||||
self.right.print(output);
|
||||
});
|
||||
DEFPRINT(AST_Conditional, function(self, output){
|
||||
@@ -1012,6 +1048,11 @@ function OutputStream(options) {
|
||||
a.forEach(function(exp, i){
|
||||
if (i) output.comma();
|
||||
exp.print(output);
|
||||
// If the final element is a hole, we need to make sure it
|
||||
// doesn't look like a trailing comma, by inserting an actual
|
||||
// trailing comma.
|
||||
if (i === len - 1 && exp instanceof AST_Hole)
|
||||
output.comma();
|
||||
});
|
||||
if (len > 0) output.space();
|
||||
});
|
||||
@@ -1039,20 +1080,24 @@ function OutputStream(options) {
|
||||
&& +key + "" == key)
|
||||
&& parseFloat(key) >= 0) {
|
||||
output.print(make_num(key));
|
||||
} else if (!is_identifier(key)) {
|
||||
output.print_string(key);
|
||||
} else {
|
||||
} else if (RESERVED_WORDS(key) ? output.option("screw_ie8") : is_identifier_string(key)) {
|
||||
output.print_name(key);
|
||||
} else {
|
||||
output.print_string(key);
|
||||
}
|
||||
output.colon();
|
||||
self.value.print(output);
|
||||
});
|
||||
DEFPRINT(AST_ObjectSetter, function(self, output){
|
||||
output.print("set");
|
||||
output.space();
|
||||
self.key.print(output);
|
||||
self.value._do_print(output, true);
|
||||
});
|
||||
DEFPRINT(AST_ObjectGetter, function(self, output){
|
||||
output.print("get");
|
||||
output.space();
|
||||
self.key.print(output);
|
||||
self.value._do_print(output, true);
|
||||
});
|
||||
DEFPRINT(AST_Symbol, function(self, output){
|
||||
@@ -1081,10 +1126,47 @@ function OutputStream(options) {
|
||||
DEFPRINT(AST_Number, function(self, output){
|
||||
output.print(make_num(self.getValue()));
|
||||
});
|
||||
|
||||
function regexp_safe_literal(code) {
|
||||
return [
|
||||
0x5c , // \
|
||||
0x2f , // /
|
||||
0x2e , // .
|
||||
0x2b , // +
|
||||
0x2a , // *
|
||||
0x3f , // ?
|
||||
0x28 , // (
|
||||
0x29 , // )
|
||||
0x5b , // [
|
||||
0x5d , // ]
|
||||
0x7b , // {
|
||||
0x7d , // }
|
||||
0x24 , // $
|
||||
0x5e , // ^
|
||||
0x3a , // :
|
||||
0x7c , // |
|
||||
0x21 , // !
|
||||
0x0a , // \n
|
||||
0x0d , // \r
|
||||
0x00 , // \0
|
||||
0xfeff , // Unicode BOM
|
||||
0x2028 , // unicode "line separator"
|
||||
0x2029 , // unicode "paragraph separator"
|
||||
].indexOf(code) < 0;
|
||||
};
|
||||
|
||||
DEFPRINT(AST_RegExp, function(self, output){
|
||||
var str = self.getValue().toString();
|
||||
if (output.option("ascii_only"))
|
||||
if (output.option("ascii_only")) {
|
||||
str = output.to_ascii(str);
|
||||
} else if (output.option("unescape_regexps")) {
|
||||
str = str.split("\\\\").map(function(str){
|
||||
return str.replace(/\\u[0-9a-fA-F]{4}|\\x[0-9a-fA-F]{2}/g, function(s){
|
||||
var code = parseInt(s.substr(2), 16);
|
||||
return regexp_safe_literal(code) ? String.fromCharCode(code) : s;
|
||||
});
|
||||
}).join("\\\\");
|
||||
}
|
||||
output.print(str);
|
||||
var p = output.parent();
|
||||
if (p instanceof AST_Binary && /^in/.test(p.operator) && p.left === self)
|
||||
@@ -1119,7 +1201,7 @@ function OutputStream(options) {
|
||||
if (p instanceof AST_Statement && p.body === node)
|
||||
return true;
|
||||
if ((p instanceof AST_Seq && p.car === node ) ||
|
||||
(p instanceof AST_Call && p.expression === node ) ||
|
||||
(p instanceof AST_Call && p.expression === node && !(p instanceof AST_New) ) ||
|
||||
(p instanceof AST_Dot && p.expression === node ) ||
|
||||
(p instanceof AST_Sub && p.expression === node ) ||
|
||||
(p instanceof AST_Conditional && p.condition === node ) ||
|
||||
|
||||
170
lib/parse.js
170
lib/parse.js
@@ -46,7 +46,7 @@
|
||||
|
||||
var KEYWORDS = 'break case catch const continue debugger default delete do else finally for function if in instanceof new return switch throw try typeof var void while with';
|
||||
var KEYWORDS_ATOM = 'false null true';
|
||||
var RESERVED_WORDS = 'abstract boolean byte char class double enum export extends final float goto implements import int interface long native package private protected public short static super synchronized this throws transient volatile'
|
||||
var RESERVED_WORDS = 'abstract boolean byte char class double enum export extends final float goto implements import int interface long native package private protected public short static super synchronized this throws transient volatile yield'
|
||||
+ " " + KEYWORDS_ATOM + " " + KEYWORDS;
|
||||
var KEYWORDS_BEFORE_EXPRESSION = 'return new delete throw else case';
|
||||
|
||||
@@ -167,6 +167,10 @@ function is_identifier_char(ch) {
|
||||
;
|
||||
};
|
||||
|
||||
function is_identifier_string(str){
|
||||
return /^[a-z_$][a-z0-9_$]*$/i.test(str);
|
||||
};
|
||||
|
||||
function parse_js_number(num) {
|
||||
if (RE_HEX_NUMBER.test(num)) {
|
||||
return parseInt(num.substr(2), 16);
|
||||
@@ -199,7 +203,7 @@ function is_token(token, type, val) {
|
||||
|
||||
var EX_EOF = {};
|
||||
|
||||
function tokenizer($TEXT, filename) {
|
||||
function tokenizer($TEXT, filename, html5_comments) {
|
||||
|
||||
var S = {
|
||||
text : $TEXT.replace(/\r\n?|[\n\u2028\u2029]/g, "\n").replace(/\uFEFF/g, ''),
|
||||
@@ -231,6 +235,14 @@ function tokenizer($TEXT, filename) {
|
||||
return ch;
|
||||
};
|
||||
|
||||
function forward(i) {
|
||||
while (i-- > 0) next();
|
||||
};
|
||||
|
||||
function looking_at(str) {
|
||||
return S.text.substr(S.pos, str.length) == str;
|
||||
};
|
||||
|
||||
function find(what, signal_eof) {
|
||||
var pos = S.text.indexOf(what, S.pos);
|
||||
if (signal_eof && pos == -1) throw EX_EOF;
|
||||
@@ -243,10 +255,12 @@ function tokenizer($TEXT, filename) {
|
||||
S.tokpos = S.pos;
|
||||
};
|
||||
|
||||
var prev_was_dot = false;
|
||||
function token(type, value, is_comment) {
|
||||
S.regex_allowed = ((type == "operator" && !UNARY_POSTFIX[value]) ||
|
||||
S.regex_allowed = ((type == "operator" && !UNARY_POSTFIX(value)) ||
|
||||
(type == "keyword" && KEYWORDS_BEFORE_EXPRESSION(value)) ||
|
||||
(type == "punc" && PUNC_BEFORE_EXPRESSION(value)));
|
||||
prev_was_dot = (type == "punc" && value == ".");
|
||||
var ret = {
|
||||
type : type,
|
||||
value : value,
|
||||
@@ -368,8 +382,8 @@ function tokenizer($TEXT, filename) {
|
||||
return token("string", ret);
|
||||
});
|
||||
|
||||
function read_line_comment() {
|
||||
next();
|
||||
function skip_line_comment(type) {
|
||||
var regex_allowed = S.regex_allowed;
|
||||
var i = find("\n"), ret;
|
||||
if (i == -1) {
|
||||
ret = S.text.substr(S.pos);
|
||||
@@ -378,11 +392,13 @@ function tokenizer($TEXT, filename) {
|
||||
ret = S.text.substring(S.pos, i);
|
||||
S.pos = i;
|
||||
}
|
||||
return token("comment1", ret, true);
|
||||
S.comments_before.push(token(type, ret, true));
|
||||
S.regex_allowed = regex_allowed;
|
||||
return next_token();
|
||||
};
|
||||
|
||||
var read_multiline_comment = with_eof_error("Unterminated multiline comment", function(){
|
||||
next();
|
||||
var skip_multiline_comment = with_eof_error("Unterminated multiline comment", function(){
|
||||
var regex_allowed = S.regex_allowed;
|
||||
var i = find("*/", true);
|
||||
var text = S.text.substring(S.pos, i);
|
||||
var a = text.split("\n"), n = a.length;
|
||||
@@ -392,8 +408,11 @@ function tokenizer($TEXT, filename) {
|
||||
if (n > 1) S.col = a[n - 1].length;
|
||||
else S.col += a[n - 1].length;
|
||||
S.col += 2;
|
||||
S.newline_before = S.newline_before || text.indexOf("\n") >= 0;
|
||||
return token("comment2", text, true);
|
||||
var nlb = S.newline_before = S.newline_before || text.indexOf("\n") >= 0;
|
||||
S.comments_before.push(token("comment2", text, true));
|
||||
S.regex_allowed = regex_allowed;
|
||||
S.newline_before = nlb;
|
||||
return next_token();
|
||||
});
|
||||
|
||||
function read_name() {
|
||||
@@ -457,16 +476,13 @@ function tokenizer($TEXT, filename) {
|
||||
|
||||
function handle_slash() {
|
||||
next();
|
||||
var regex_allowed = S.regex_allowed;
|
||||
switch (peek()) {
|
||||
case "/":
|
||||
S.comments_before.push(read_line_comment());
|
||||
S.regex_allowed = regex_allowed;
|
||||
return next_token();
|
||||
next();
|
||||
return skip_line_comment("comment1");
|
||||
case "*":
|
||||
S.comments_before.push(read_multiline_comment());
|
||||
S.regex_allowed = regex_allowed;
|
||||
return next_token();
|
||||
next();
|
||||
return skip_multiline_comment();
|
||||
}
|
||||
return S.regex_allowed ? read_regexp("") : read_operator("/");
|
||||
};
|
||||
@@ -480,6 +496,7 @@ function tokenizer($TEXT, filename) {
|
||||
|
||||
function read_word() {
|
||||
var word = read_name();
|
||||
if (prev_was_dot) return token("name", word);
|
||||
return KEYWORDS_ATOM(word) ? token("atom", word)
|
||||
: !KEYWORDS(word) ? token("name", word)
|
||||
: OPERATORS(word) ? token("operator", word)
|
||||
@@ -502,6 +519,16 @@ function tokenizer($TEXT, filename) {
|
||||
return read_regexp(force_regexp);
|
||||
skip_whitespace();
|
||||
start_token();
|
||||
if (html5_comments) {
|
||||
if (looking_at("<!--")) {
|
||||
forward(4);
|
||||
return skip_line_comment("comment3");
|
||||
}
|
||||
if (looking_at("-->") && S.newline_before) {
|
||||
forward(3);
|
||||
return skip_line_comment("comment4");
|
||||
}
|
||||
}
|
||||
var ch = peek();
|
||||
if (!ch) return token("eof");
|
||||
var code = ch.charCodeAt(0);
|
||||
@@ -545,10 +572,10 @@ var UNARY_POSTFIX = makePredicate([ "--", "++" ]);
|
||||
var ASSIGNMENT = makePredicate([ "=", "+=", "-=", "/=", "*=", "%=", ">>=", "<<=", ">>>=", "|=", "^=", "&=" ]);
|
||||
|
||||
var PRECEDENCE = (function(a, ret){
|
||||
for (var i = 0, n = 1; i < a.length; ++i, ++n) {
|
||||
for (var i = 0; i < a.length; ++i) {
|
||||
var b = a[i];
|
||||
for (var j = 0; j < b.length; ++j) {
|
||||
ret[b[j]] = n;
|
||||
ret[b[j]] = i + 1;
|
||||
}
|
||||
}
|
||||
return ret;
|
||||
@@ -579,11 +606,17 @@ function parse($TEXT, options) {
|
||||
options = defaults(options, {
|
||||
strict : false,
|
||||
filename : null,
|
||||
toplevel : null
|
||||
toplevel : null,
|
||||
expression : false,
|
||||
html5_comments : true,
|
||||
bare_returns : false,
|
||||
});
|
||||
|
||||
var S = {
|
||||
input : typeof $TEXT == "string" ? tokenizer($TEXT, options.filename) : $TEXT,
|
||||
input : (typeof $TEXT == "string"
|
||||
? tokenizer($TEXT, options.filename,
|
||||
options.html5_comments)
|
||||
: $TEXT),
|
||||
token : null,
|
||||
prev : null,
|
||||
peeked : null,
|
||||
@@ -676,12 +709,16 @@ function parse($TEXT, options) {
|
||||
};
|
||||
};
|
||||
|
||||
var statement = embed_tokens(function() {
|
||||
var tmp;
|
||||
function handle_regexp() {
|
||||
if (is("operator", "/") || is("operator", "/=")) {
|
||||
S.peeked = null;
|
||||
S.token = S.input(S.token.value.substr(1)); // force regexp
|
||||
}
|
||||
};
|
||||
|
||||
var statement = embed_tokens(function() {
|
||||
var tmp;
|
||||
handle_regexp();
|
||||
switch (S.token.type) {
|
||||
case "string":
|
||||
var dir = S.in_directives, stat = simple_statement();
|
||||
@@ -746,13 +783,13 @@ function parse($TEXT, options) {
|
||||
return for_();
|
||||
|
||||
case "function":
|
||||
return function_(true);
|
||||
return function_(AST_Defun);
|
||||
|
||||
case "if":
|
||||
return if_();
|
||||
|
||||
case "return":
|
||||
if (S.in_function == 0)
|
||||
if (S.in_function == 0 && !options.bare_returns)
|
||||
croak("'return' outside of function");
|
||||
return new AST_Return({
|
||||
value: ( is("punc", ";")
|
||||
@@ -809,6 +846,18 @@ function parse($TEXT, options) {
|
||||
S.labels.push(label);
|
||||
var stat = statement();
|
||||
S.labels.pop();
|
||||
if (!(stat instanceof AST_IterationStatement)) {
|
||||
// check for `continue` that refers to this label.
|
||||
// those should be reported as syntax errors.
|
||||
// https://github.com/mishoo/UglifyJS2/issues/287
|
||||
label.references.forEach(function(ref){
|
||||
if (ref instanceof AST_Continue) {
|
||||
ref = ref.label.start;
|
||||
croak("Continue label `" + label.name + "` refers to non-IterationStatement.",
|
||||
ref.line, ref.col, ref.pos);
|
||||
}
|
||||
});
|
||||
}
|
||||
return new AST_LabeledStatement({ body: stat, label: label });
|
||||
};
|
||||
|
||||
@@ -817,18 +866,22 @@ function parse($TEXT, options) {
|
||||
};
|
||||
|
||||
function break_cont(type) {
|
||||
var label = null;
|
||||
var label = null, ldef;
|
||||
if (!can_insert_semicolon()) {
|
||||
label = as_symbol(AST_LabelRef, true);
|
||||
}
|
||||
if (label != null) {
|
||||
if (!find_if(function(l){ return l.name == label.name }, S.labels))
|
||||
ldef = find_if(function(l){ return l.name == label.name }, S.labels);
|
||||
if (!ldef)
|
||||
croak("Undefined label " + label.name);
|
||||
label.thedef = ldef;
|
||||
}
|
||||
else if (S.in_loop == 0)
|
||||
croak(type.TYPE + " not inside a loop or switch");
|
||||
semicolon();
|
||||
return new type({ label: label });
|
||||
var stat = new type({ label: label });
|
||||
if (ldef) ldef.references.push(stat);
|
||||
return stat;
|
||||
};
|
||||
|
||||
function for_() {
|
||||
@@ -874,19 +927,12 @@ function parse($TEXT, options) {
|
||||
});
|
||||
};
|
||||
|
||||
var function_ = function(in_statement, ctor) {
|
||||
var is_accessor = ctor === AST_Accessor;
|
||||
var name = (is("name") ? as_symbol(in_statement
|
||||
? AST_SymbolDefun
|
||||
: is_accessor
|
||||
? AST_SymbolAccessor
|
||||
: AST_SymbolLambda)
|
||||
: is_accessor && (is("string") || is("num")) ? as_atom_node()
|
||||
: null);
|
||||
var function_ = function(ctor) {
|
||||
var in_statement = ctor === AST_Defun;
|
||||
var name = is("name") ? as_symbol(in_statement ? AST_SymbolDefun : AST_SymbolLambda) : null;
|
||||
if (in_statement && !name)
|
||||
unexpected();
|
||||
expect("(");
|
||||
if (!ctor) ctor = in_statement ? AST_Defun : AST_Function;
|
||||
return new ctor({
|
||||
name: name,
|
||||
argnames: (function(first, a){
|
||||
@@ -1057,7 +1103,9 @@ function parse($TEXT, options) {
|
||||
var tok = S.token, ret;
|
||||
switch (tok.type) {
|
||||
case "name":
|
||||
return as_symbol(AST_SymbolRef);
|
||||
case "keyword":
|
||||
ret = _make_symbol(AST_SymbolRef);
|
||||
break;
|
||||
case "num":
|
||||
ret = new AST_Number({ start: tok, end: tok, value: tok.value });
|
||||
break;
|
||||
@@ -1108,7 +1156,7 @@ function parse($TEXT, options) {
|
||||
}
|
||||
if (is("keyword", "function")) {
|
||||
next();
|
||||
var func = function_(false);
|
||||
var func = function_(AST_Function);
|
||||
func.start = start;
|
||||
func.end = prev();
|
||||
return subscripts(func, allow_calls);
|
||||
@@ -1156,8 +1204,8 @@ function parse($TEXT, options) {
|
||||
if (name == "get") {
|
||||
a.push(new AST_ObjectGetter({
|
||||
start : start,
|
||||
key : name,
|
||||
value : function_(false, AST_Accessor),
|
||||
key : as_atom_node(),
|
||||
value : function_(AST_Accessor),
|
||||
end : prev()
|
||||
}));
|
||||
continue;
|
||||
@@ -1165,8 +1213,8 @@ function parse($TEXT, options) {
|
||||
if (name == "set") {
|
||||
a.push(new AST_ObjectSetter({
|
||||
start : start,
|
||||
key : name,
|
||||
value : function_(false, AST_Accessor),
|
||||
key : as_atom_node(),
|
||||
value : function_(AST_Accessor),
|
||||
end : prev()
|
||||
}));
|
||||
continue;
|
||||
@@ -1214,17 +1262,21 @@ function parse($TEXT, options) {
|
||||
}
|
||||
};
|
||||
|
||||
function _make_symbol(type) {
|
||||
var name = S.token.value;
|
||||
return new (name == "this" ? AST_This : type)({
|
||||
name : String(name),
|
||||
start : S.token,
|
||||
end : S.token
|
||||
});
|
||||
};
|
||||
|
||||
function as_symbol(type, noerror) {
|
||||
if (!is("name")) {
|
||||
if (!noerror) croak("Name expected");
|
||||
return null;
|
||||
}
|
||||
var name = S.token.value;
|
||||
var sym = new (name == "this" ? AST_This : type)({
|
||||
name : String(S.token.value),
|
||||
start : S.token,
|
||||
end : S.token
|
||||
});
|
||||
var sym = _make_symbol(type);
|
||||
next();
|
||||
return sym;
|
||||
};
|
||||
@@ -1267,6 +1319,7 @@ function parse($TEXT, options) {
|
||||
var start = S.token;
|
||||
if (is("operator") && UNARY_PREFIX(start.value)) {
|
||||
next();
|
||||
handle_regexp();
|
||||
var ex = make_unary(AST_UnaryPrefix, start.value, maybe_unary(allow_calls));
|
||||
ex.start = start;
|
||||
ex.end = prev();
|
||||
@@ -1322,7 +1375,7 @@ function parse($TEXT, options) {
|
||||
condition : expr,
|
||||
consequent : yes,
|
||||
alternative : expression(false, no_in),
|
||||
end : peek()
|
||||
end : prev()
|
||||
});
|
||||
}
|
||||
return expr;
|
||||
@@ -1330,15 +1383,8 @@ function parse($TEXT, options) {
|
||||
|
||||
function is_assignable(expr) {
|
||||
if (!options.strict) return true;
|
||||
switch (expr[0]+"") {
|
||||
case "dot":
|
||||
case "sub":
|
||||
case "new":
|
||||
case "call":
|
||||
return true;
|
||||
case "name":
|
||||
return expr[1] != "this";
|
||||
}
|
||||
if (expr instanceof AST_This) return false;
|
||||
return (expr instanceof AST_PropAccess || expr instanceof AST_Symbol);
|
||||
};
|
||||
|
||||
var maybe_assign = function(no_in) {
|
||||
@@ -1382,6 +1428,10 @@ function parse($TEXT, options) {
|
||||
return ret;
|
||||
};
|
||||
|
||||
if (options.expression) {
|
||||
return expression(true);
|
||||
}
|
||||
|
||||
return (function(){
|
||||
var start = S.token;
|
||||
var body = [];
|
||||
|
||||
112
lib/scope.js
112
lib/scope.js
@@ -64,38 +64,41 @@ SymbolDef.prototype = {
|
||||
mangle: function(options) {
|
||||
if (!this.mangled_name && !this.unmangleable(options)) {
|
||||
var s = this.scope;
|
||||
if (this.orig[0] instanceof AST_SymbolLambda && !options.screw_ie8)
|
||||
if (!options.screw_ie8 && this.orig[0] instanceof AST_SymbolLambda)
|
||||
s = s.parent_scope;
|
||||
this.mangled_name = s.next_mangled(options);
|
||||
this.mangled_name = s.next_mangled(options, this);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
AST_Toplevel.DEFMETHOD("figure_out_scope", function(){
|
||||
// This does what ast_add_scope did in UglifyJS v1.
|
||||
//
|
||||
// Part of it could be done at parse time, but it would complicate
|
||||
// the parser (and it's already kinda complex). It's also worth
|
||||
// having it separated because we might need to call it multiple
|
||||
// times on the same tree.
|
||||
AST_Toplevel.DEFMETHOD("figure_out_scope", function(options){
|
||||
options = defaults(options, {
|
||||
screw_ie8: false
|
||||
});
|
||||
|
||||
// pass 1: setup scope chaining and handle definitions
|
||||
var self = this;
|
||||
var scope = self.parent_scope = null;
|
||||
var labels = new Dictionary();
|
||||
var defun = null;
|
||||
var nesting = 0;
|
||||
var tw = new TreeWalker(function(node, descend){
|
||||
if (options.screw_ie8 && node instanceof AST_Catch) {
|
||||
var save_scope = scope;
|
||||
scope = new AST_Scope(node);
|
||||
scope.init_scope_vars(nesting);
|
||||
scope.parent_scope = save_scope;
|
||||
descend();
|
||||
scope = save_scope;
|
||||
return true;
|
||||
}
|
||||
if (node instanceof AST_Scope) {
|
||||
node.init_scope_vars(nesting);
|
||||
var save_scope = node.parent_scope = scope;
|
||||
var save_labels = labels;
|
||||
++nesting;
|
||||
scope = node;
|
||||
labels = new Dictionary();
|
||||
descend();
|
||||
labels = save_labels;
|
||||
var save_defun = defun;
|
||||
defun = scope = node;
|
||||
++nesting; descend(); --nesting;
|
||||
scope = save_scope;
|
||||
--nesting;
|
||||
defun = save_defun;
|
||||
return true; // don't descend again in TreeWalker
|
||||
}
|
||||
if (node instanceof AST_Directive) {
|
||||
@@ -108,24 +111,11 @@ AST_Toplevel.DEFMETHOD("figure_out_scope", function(){
|
||||
s.uses_with = true;
|
||||
return;
|
||||
}
|
||||
if (node instanceof AST_LabeledStatement) {
|
||||
var l = node.label;
|
||||
if (labels.has(l.name))
|
||||
throw new Error(string_template("Label {name} defined twice", l));
|
||||
labels.set(l.name, l);
|
||||
descend();
|
||||
labels.del(l.name);
|
||||
return true; // no descend again
|
||||
}
|
||||
if (node instanceof AST_Symbol) {
|
||||
node.scope = scope;
|
||||
}
|
||||
if (node instanceof AST_Label) {
|
||||
node.thedef = node;
|
||||
node.init_scope_vars();
|
||||
}
|
||||
if (node instanceof AST_SymbolLambda) {
|
||||
scope.def_function(node);
|
||||
defun.def_function(node);
|
||||
}
|
||||
else if (node instanceof AST_SymbolDefun) {
|
||||
// Careful here, the scope where this should be defined is
|
||||
@@ -133,31 +123,17 @@ AST_Toplevel.DEFMETHOD("figure_out_scope", function(){
|
||||
// scope when we encounter the AST_Defun node (which is
|
||||
// instanceof AST_Scope) but we get to the symbol a bit
|
||||
// later.
|
||||
(node.scope = scope.parent_scope).def_function(node);
|
||||
(node.scope = defun.parent_scope).def_function(node);
|
||||
}
|
||||
else if (node instanceof AST_SymbolVar
|
||||
|| node instanceof AST_SymbolConst) {
|
||||
var def = scope.def_variable(node);
|
||||
var def = defun.def_variable(node);
|
||||
def.constant = node instanceof AST_SymbolConst;
|
||||
def.init = tw.parent().value;
|
||||
}
|
||||
else if (node instanceof AST_SymbolCatch) {
|
||||
// XXX: this is wrong according to ECMA-262 (12.4). the
|
||||
// `catch` argument name should be visible only inside the
|
||||
// catch block. For a quick fix AST_Catch should inherit
|
||||
// from AST_Scope. Keeping it this way because of IE,
|
||||
// which doesn't obey the standard. (it introduces the
|
||||
// identifier in the enclosing scope)
|
||||
scope.def_variable(node);
|
||||
}
|
||||
if (node instanceof AST_LabelRef) {
|
||||
var sym = labels.get(node.name);
|
||||
if (!sym) throw new Error(string_template("Undefined label {name} [{line},{col}]", {
|
||||
name: node.name,
|
||||
line: node.start.line,
|
||||
col: node.start.col
|
||||
}));
|
||||
node.thedef = sym;
|
||||
(options.screw_ie8 ? scope : defun)
|
||||
.def_variable(node);
|
||||
}
|
||||
});
|
||||
self.walk(tw);
|
||||
@@ -173,10 +149,6 @@ AST_Toplevel.DEFMETHOD("figure_out_scope", function(){
|
||||
func = prev_func;
|
||||
return true;
|
||||
}
|
||||
if (node instanceof AST_LabelRef) {
|
||||
node.reference();
|
||||
return true;
|
||||
}
|
||||
if (node instanceof AST_SymbolRef) {
|
||||
var name = node.name;
|
||||
var sym = node.scope.find_variable(name);
|
||||
@@ -187,6 +159,7 @@ AST_Toplevel.DEFMETHOD("figure_out_scope", function(){
|
||||
} else {
|
||||
g = new SymbolDef(self, globals.size(), node);
|
||||
g.undeclared = true;
|
||||
g.global = true;
|
||||
globals.set(name, g);
|
||||
}
|
||||
node.thedef = g;
|
||||
@@ -194,7 +167,7 @@ AST_Toplevel.DEFMETHOD("figure_out_scope", function(){
|
||||
for (var s = node.scope; s && !s.uses_eval; s = s.parent_scope)
|
||||
s.uses_eval = true;
|
||||
}
|
||||
if (name == "arguments") {
|
||||
if (func && name == "arguments") {
|
||||
func.uses_arguments = true;
|
||||
}
|
||||
} else {
|
||||
@@ -240,14 +213,6 @@ AST_SymbolRef.DEFMETHOD("reference", function() {
|
||||
this.frame = this.scope.nesting - def.scope.nesting;
|
||||
});
|
||||
|
||||
AST_Label.DEFMETHOD("init_scope_vars", function(){
|
||||
this.references = [];
|
||||
});
|
||||
|
||||
AST_LabelRef.DEFMETHOD("reference", function(){
|
||||
this.thedef.references.push(this);
|
||||
});
|
||||
|
||||
AST_Scope.DEFMETHOD("find_variable", function(name){
|
||||
if (name instanceof AST_Symbol) name = name.name;
|
||||
return this.variables.get(name)
|
||||
@@ -281,6 +246,11 @@ AST_Scope.DEFMETHOD("next_mangled", function(options){
|
||||
out: while (true) {
|
||||
var m = base54(++this.cname);
|
||||
if (!is_identifier(m)) continue; // skip over "do"
|
||||
|
||||
// https://github.com/mishoo/UglifyJS2/issues/242 -- do not
|
||||
// shadow a name excepted from mangling.
|
||||
if (options.except.indexOf(m) >= 0) continue;
|
||||
|
||||
// we must ensure that the mangled name does not shadow a name
|
||||
// from some parent scope that is referenced in this or in
|
||||
// inner scopes.
|
||||
@@ -293,6 +263,19 @@ AST_Scope.DEFMETHOD("next_mangled", function(options){
|
||||
}
|
||||
});
|
||||
|
||||
AST_Function.DEFMETHOD("next_mangled", function(options, def){
|
||||
// #179, #326
|
||||
// in Safari strict mode, something like (function x(x){...}) is a syntax error;
|
||||
// a function expression's argument cannot shadow the function expression's name
|
||||
|
||||
var tricky_def = def.orig[0] instanceof AST_SymbolFunarg && this.name && this.name.definition();
|
||||
while (true) {
|
||||
var name = AST_Lambda.prototype.next_mangled.call(this, options, def);
|
||||
if (!(tricky_def && tricky_def.mangled_name == name))
|
||||
return name;
|
||||
}
|
||||
});
|
||||
|
||||
AST_Scope.DEFMETHOD("references", function(sym){
|
||||
if (sym instanceof AST_Symbol) sym = sym.definition();
|
||||
return this.enclosed.indexOf(sym) < 0 ? null : sym;
|
||||
@@ -382,6 +365,10 @@ AST_Toplevel.DEFMETHOD("mangle_names", function(options){
|
||||
node.mangled_name = name;
|
||||
return true;
|
||||
}
|
||||
if (options.screw_ie8 && node instanceof AST_SymbolCatch) {
|
||||
to_mangle.push(node.definition());
|
||||
return;
|
||||
}
|
||||
});
|
||||
this.walk(tw);
|
||||
to_mangle.forEach(function(def){ def.mangle(options) });
|
||||
@@ -545,6 +532,7 @@ AST_Toplevel.DEFMETHOD("scope_warnings", function(options){
|
||||
}
|
||||
if (options.unreferenced
|
||||
&& (node instanceof AST_SymbolDeclaration || node instanceof AST_Label)
|
||||
&& !(node instanceof AST_SymbolCatch)
|
||||
&& node.unreferenced()) {
|
||||
AST_Node.warn("{type} {name} is declared but not referenced [{file}:{line},{col}]", {
|
||||
type: node instanceof AST_Label ? "Label" : "Symbol",
|
||||
|
||||
@@ -49,6 +49,9 @@ function SourceMap(options) {
|
||||
file : null,
|
||||
root : null,
|
||||
orig : null,
|
||||
|
||||
orig_line_diff : 0,
|
||||
dest_line_diff : 0,
|
||||
});
|
||||
var generator = new MOZ_SourceMap.SourceMapGenerator({
|
||||
file : options.file,
|
||||
@@ -61,14 +64,17 @@ function SourceMap(options) {
|
||||
line: orig_line,
|
||||
column: orig_col
|
||||
});
|
||||
if (info.source === null) {
|
||||
return;
|
||||
}
|
||||
source = info.source;
|
||||
orig_line = info.line;
|
||||
orig_col = info.column;
|
||||
name = info.name;
|
||||
name = info.name || name;
|
||||
}
|
||||
generator.addMapping({
|
||||
generated : { line: gen_line, column: gen_col },
|
||||
original : { line: orig_line, column: orig_col },
|
||||
generated : { line: gen_line + options.dest_line_diff, column: gen_col },
|
||||
original : { line: orig_line + options.orig_line_diff, column: orig_col },
|
||||
source : source,
|
||||
name : name
|
||||
});
|
||||
|
||||
@@ -44,7 +44,6 @@
|
||||
"use strict";
|
||||
|
||||
// Tree transformer helpers.
|
||||
// XXX: eventually I should refactor the compressor to use this infrastructure.
|
||||
|
||||
function TreeTransformer(before, after) {
|
||||
TreeWalker.call(this);
|
||||
@@ -160,6 +159,7 @@ TreeTransformer.prototype = new TreeWalker;
|
||||
});
|
||||
|
||||
_(AST_VarDef, function(self, tw){
|
||||
self.name = self.name.transform(tw);
|
||||
if (self.value) self.value = self.value.transform(tw);
|
||||
});
|
||||
|
||||
|
||||
16
lib/utils.js
16
lib/utils.js
@@ -82,16 +82,23 @@ function repeat_string(str, i) {
|
||||
};
|
||||
|
||||
function DefaultsError(msg, defs) {
|
||||
Error.call(this, msg);
|
||||
this.msg = msg;
|
||||
this.defs = defs;
|
||||
};
|
||||
DefaultsError.prototype = Object.create(Error.prototype);
|
||||
DefaultsError.prototype.constructor = DefaultsError;
|
||||
|
||||
DefaultsError.croak = function(msg, defs) {
|
||||
throw new DefaultsError(msg, defs);
|
||||
};
|
||||
|
||||
function defaults(args, defs, croak) {
|
||||
if (args === true)
|
||||
args = {};
|
||||
var ret = args || {};
|
||||
if (croak) for (var i in ret) if (ret.hasOwnProperty(i) && !defs.hasOwnProperty(i))
|
||||
throw new DefaultsError("`" + i + "` is not a supported option", defs);
|
||||
DefaultsError.croak("`" + i + "` is not a supported option", defs);
|
||||
for (var i in defs) if (defs.hasOwnProperty(i)) {
|
||||
ret[i] = (args && args.hasOwnProperty(i)) ? args[i] : defs[i];
|
||||
}
|
||||
@@ -245,6 +252,13 @@ function makePredicate(words) {
|
||||
return new Function("str", f);
|
||||
};
|
||||
|
||||
function all(array, predicate) {
|
||||
for (var i = array.length; --i >= 0;)
|
||||
if (!predicate(array[i]))
|
||||
return false;
|
||||
return true;
|
||||
};
|
||||
|
||||
function Dictionary() {
|
||||
this._values = Object.create(null);
|
||||
this._size = 0;
|
||||
|
||||
21
package.json
21
package.json
@@ -3,24 +3,35 @@
|
||||
"description": "JavaScript parser, mangler/compressor and beautifier toolkit",
|
||||
"homepage": "http://lisperator.net/uglifyjs",
|
||||
"main": "tools/node.js",
|
||||
"version": "2.3.0",
|
||||
"version": "2.4.16",
|
||||
"engines": { "node" : ">=0.4.0" },
|
||||
"maintainers": [{
|
||||
"name": "Mihai Bazon",
|
||||
"email": "mihai.bazon@gmail.com",
|
||||
"web": "http://lisperator.net/"
|
||||
}],
|
||||
"repositories": [{
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/mishoo/UglifyJS2.git"
|
||||
}],
|
||||
},
|
||||
"dependencies": {
|
||||
"async" : "~0.2.6",
|
||||
"source-map" : "~0.1.7",
|
||||
"optimist" : "~0.3.5"
|
||||
"source-map" : "0.1.34",
|
||||
"optimist": "~0.3.5",
|
||||
"uglify-to-browserify": "~1.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"acorn": "~0.6.0",
|
||||
"escodegen": "~1.3.3",
|
||||
"esfuzz": "~0.3.1",
|
||||
"estraverse": "~1.5.1"
|
||||
},
|
||||
"browserify": {
|
||||
"transform": [ "uglify-to-browserify" ]
|
||||
},
|
||||
"bin": {
|
||||
"uglifyjs" : "bin/uglifyjs"
|
||||
},
|
||||
"license": "BSD",
|
||||
"scripts": {"test": "node test/run-tests.js"}
|
||||
}
|
||||
|
||||
@@ -1,12 +1,74 @@
|
||||
holes_and_undefined: {
|
||||
input: {
|
||||
w = [1,,];
|
||||
x = [1, 2, undefined];
|
||||
y = [1, , 2, ];
|
||||
z = [1, undefined, 3];
|
||||
}
|
||||
expect: {
|
||||
w=[1,,];
|
||||
x=[1,2,void 0];
|
||||
y=[1,,2];
|
||||
z=[1,void 0,3];
|
||||
}
|
||||
}
|
||||
|
||||
constant_join: {
|
||||
options = {
|
||||
unsafe : true,
|
||||
evaluate : true
|
||||
};
|
||||
input: {
|
||||
var a = [ "foo", "bar", "baz" ].join("");
|
||||
var a1 = [ "foo", "bar", "baz" ].join();
|
||||
var b = [ "foo", 1, 2, 3, "bar" ].join("");
|
||||
var c = [ boo(), "foo", 1, 2, 3, "bar", bar() ].join("");
|
||||
var c1 = [ boo(), bar(), "foo", 1, 2, 3, "bar", bar() ].join("");
|
||||
var c2 = [ 1, 2, "foo", "bar", baz() ].join("");
|
||||
var d = [ "foo", 1 + 2 + "bar", "baz" ].join("-");
|
||||
var e = [].join(foo + bar);
|
||||
var f = [].join("");
|
||||
var g = [].join("foo");
|
||||
}
|
||||
expect: {
|
||||
var a = "foobarbaz";
|
||||
var a1 = "foo,bar,baz";
|
||||
var b = "foo123bar";
|
||||
var c = boo() + "foo123bar" + bar();
|
||||
var c1 = "" + boo() + bar() + "foo123bar" + bar();
|
||||
var c2 = "12foobar" + baz();
|
||||
var d = "foo-3bar-baz";
|
||||
var e = [].join(foo + bar);
|
||||
var f = "";
|
||||
var g = "";
|
||||
}
|
||||
}
|
||||
|
||||
constant_join_2: {
|
||||
options = {
|
||||
unsafe : true,
|
||||
evaluate : true
|
||||
};
|
||||
input: {
|
||||
var a = [ "foo", "bar", boo(), "baz", "x", "y" ].join("");
|
||||
var b = [ "foo", "bar", boo(), "baz", "x", "y" ].join("-");
|
||||
var c = [ "foo", "bar", boo(), "baz", "x", "y" ].join("really-long-separator");
|
||||
var d = [ "foo", "bar", boo(),
|
||||
[ "foo", 1, 2, 3, "bar" ].join("+"),
|
||||
"baz", "x", "y" ].join("-");
|
||||
var e = [ "foo", "bar", boo(),
|
||||
[ "foo", 1, 2, 3, "bar" ].join("+"),
|
||||
"baz", "x", "y" ].join("really-long-separator");
|
||||
var f = [ "str", "str" + variable, "foo", "bar", "moo" + foo ].join("");
|
||||
}
|
||||
expect: {
|
||||
var a = "foobar" + boo() + "bazxy";
|
||||
var b = [ "foo-bar", boo(), "baz-x-y" ].join("-");
|
||||
var c = [ "foo", "bar", boo(), "baz", "x", "y" ].join("really-long-separator");
|
||||
var d = [ "foo-bar", boo(), "foo+1+2+3+bar-baz-x-y" ].join("-");
|
||||
var e = [ "foo", "bar", boo(),
|
||||
"foo+1+2+3+bar",
|
||||
"baz", "x", "y" ].join("really-long-separator");
|
||||
var f = "strstr" + variable + "foobarmoo" + foo;
|
||||
}
|
||||
}
|
||||
|
||||
22
test/compress/concat-strings.js
Normal file
22
test/compress/concat-strings.js
Normal file
@@ -0,0 +1,22 @@
|
||||
concat_1: {
|
||||
options = {
|
||||
evaluate: true
|
||||
};
|
||||
input: {
|
||||
var a = "foo" + "bar" + x() + "moo" + "foo" + y() + "x" + "y" + "z" + q();
|
||||
var b = "foo" + 1 + x() + 2 + "boo";
|
||||
var c = 1 + x() + 2 + "boo";
|
||||
|
||||
// this CAN'T safely be shortened to 1 + x() + "5boo"
|
||||
var d = 1 + x() + 2 + 3 + "boo";
|
||||
|
||||
var e = 1 + x() + 2 + "X" + 3 + "boo";
|
||||
}
|
||||
expect: {
|
||||
var a = "foobar" + x() + "moofoo" + y() + "xyz" + q();
|
||||
var b = "foo1" + x() + "2boo";
|
||||
var c = 1 + x() + 2 + "boo";
|
||||
var d = 1 + x() + 2 + 3 + "boo";
|
||||
var e = 1 + x() + 2 + "X3boo";
|
||||
}
|
||||
}
|
||||
@@ -141,3 +141,174 @@ ifs_6: {
|
||||
x = foo || bar || baz || boo ? 20 : 10;
|
||||
}
|
||||
}
|
||||
|
||||
cond_1: {
|
||||
options = {
|
||||
conditionals: true
|
||||
};
|
||||
input: {
|
||||
if (some_condition()) {
|
||||
do_something(x);
|
||||
} else {
|
||||
do_something(y);
|
||||
}
|
||||
}
|
||||
expect: {
|
||||
do_something(some_condition() ? x : y);
|
||||
}
|
||||
}
|
||||
|
||||
cond_2: {
|
||||
options = {
|
||||
conditionals: true
|
||||
};
|
||||
input: {
|
||||
if (some_condition()) {
|
||||
x = new FooBar(1);
|
||||
} else {
|
||||
x = new FooBar(2);
|
||||
}
|
||||
}
|
||||
expect: {
|
||||
x = new FooBar(some_condition() ? 1 : 2);
|
||||
}
|
||||
}
|
||||
|
||||
cond_3: {
|
||||
options = {
|
||||
conditionals: true
|
||||
};
|
||||
input: {
|
||||
if (some_condition()) {
|
||||
new FooBar(1);
|
||||
} else {
|
||||
FooBar(2);
|
||||
}
|
||||
}
|
||||
expect: {
|
||||
some_condition() ? new FooBar(1) : FooBar(2);
|
||||
}
|
||||
}
|
||||
|
||||
cond_4: {
|
||||
options = {
|
||||
conditionals: true
|
||||
};
|
||||
input: {
|
||||
if (some_condition()) {
|
||||
do_something();
|
||||
} else {
|
||||
do_something();
|
||||
}
|
||||
}
|
||||
expect: {
|
||||
some_condition(), do_something();
|
||||
}
|
||||
}
|
||||
|
||||
cond_5: {
|
||||
options = {
|
||||
conditionals: true
|
||||
};
|
||||
input: {
|
||||
if (some_condition()) {
|
||||
if (some_other_condition()) {
|
||||
do_something();
|
||||
} else {
|
||||
alternate();
|
||||
}
|
||||
} else {
|
||||
alternate();
|
||||
}
|
||||
|
||||
if (some_condition()) {
|
||||
if (some_other_condition()) {
|
||||
do_something();
|
||||
}
|
||||
}
|
||||
}
|
||||
expect: {
|
||||
some_condition() && some_other_condition() ? do_something() : alternate();
|
||||
some_condition() && some_other_condition() && do_something();
|
||||
}
|
||||
}
|
||||
|
||||
cond_7: {
|
||||
options = {
|
||||
conditionals: true,
|
||||
evaluate : true
|
||||
};
|
||||
input: {
|
||||
var x, y, z, a, b;
|
||||
// compress these
|
||||
if (y) {
|
||||
x = 1+1;
|
||||
} else {
|
||||
x = 2;
|
||||
}
|
||||
|
||||
if (y) {
|
||||
x = 1+1;
|
||||
} else if (z) {
|
||||
x = 2;
|
||||
} else {
|
||||
x = 3-1;
|
||||
}
|
||||
|
||||
x = y ? 'foo' : 'fo'+'o';
|
||||
|
||||
x = y ? 'foo' : y ? 'foo' : 'fo'+'o';
|
||||
|
||||
// Compress conditions that have side effects
|
||||
if (condition()) {
|
||||
x = 10+10;
|
||||
} else {
|
||||
x = 20;
|
||||
}
|
||||
|
||||
if (z) {
|
||||
x = 'fuji';
|
||||
} else if (condition()) {
|
||||
x = 'fu'+'ji';
|
||||
} else {
|
||||
x = 'fuji';
|
||||
}
|
||||
|
||||
x = condition() ? 'foobar' : 'foo'+'bar';
|
||||
|
||||
// don't compress these
|
||||
x = y ? a : b;
|
||||
|
||||
x = y ? 'foo' : 'fo';
|
||||
}
|
||||
expect: {
|
||||
var x, y, z, a, b;
|
||||
x = 2;
|
||||
x = 2;
|
||||
x = 'foo';
|
||||
x = 'foo';
|
||||
x = (condition(), 20);
|
||||
x = z ? 'fuji' : (condition(), 'fuji');
|
||||
x = (condition(), 'foobar');
|
||||
x = y ? a : b;
|
||||
x = y ? 'foo' : 'fo';
|
||||
}
|
||||
}
|
||||
|
||||
cond_7_1: {
|
||||
options = {
|
||||
conditionals: true,
|
||||
evaluate : true
|
||||
};
|
||||
input: {
|
||||
// access to global should be assumed to have side effects
|
||||
if (y) {
|
||||
x = 1+1;
|
||||
} else {
|
||||
x = 2;
|
||||
}
|
||||
}
|
||||
expect: {
|
||||
x = (y, 2);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -95,3 +95,71 @@ unused_circular_references_3: {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
unused_keep_setter_arg: {
|
||||
options = { unused: true };
|
||||
input: {
|
||||
var x = {
|
||||
_foo: null,
|
||||
set foo(val) {
|
||||
},
|
||||
get foo() {
|
||||
return this._foo;
|
||||
}
|
||||
}
|
||||
}
|
||||
expect: {
|
||||
var x = {
|
||||
_foo: null,
|
||||
set foo(val) {
|
||||
},
|
||||
get foo() {
|
||||
return this._foo;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
unused_var_in_catch: {
|
||||
options = { unused: true };
|
||||
input: {
|
||||
function foo() {
|
||||
try {
|
||||
foo();
|
||||
} catch(ex) {
|
||||
var x = 10;
|
||||
}
|
||||
}
|
||||
}
|
||||
expect: {
|
||||
function foo() {
|
||||
try {
|
||||
foo();
|
||||
} catch(ex) {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
used_var_in_catch: {
|
||||
options = { unused: true };
|
||||
input: {
|
||||
function foo() {
|
||||
try {
|
||||
foo();
|
||||
} catch(ex) {
|
||||
var x = 10;
|
||||
}
|
||||
return x;
|
||||
}
|
||||
}
|
||||
expect: {
|
||||
function foo() {
|
||||
try {
|
||||
foo();
|
||||
} catch(ex) {
|
||||
var x = 10;
|
||||
}
|
||||
return x;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
typeof_eq_undefined: {
|
||||
options = {
|
||||
comparisons: true,
|
||||
unsafe: false
|
||||
comparisons: true
|
||||
};
|
||||
input: { a = typeof b.c != "undefined" }
|
||||
expect: { a = "undefined" != typeof b.c }
|
||||
@@ -13,5 +12,14 @@ typeof_eq_undefined_unsafe: {
|
||||
unsafe: true
|
||||
};
|
||||
input: { a = typeof b.c != "undefined" }
|
||||
expect: { a = b.c !== void 0 }
|
||||
expect: { a = void 0 !== b.c }
|
||||
}
|
||||
|
||||
typeof_eq_undefined_unsafe2: {
|
||||
options = {
|
||||
comparisons: true,
|
||||
unsafe: true
|
||||
};
|
||||
input: { a = "undefined" != typeof b.c }
|
||||
expect: { a = void 0 !== b.c }
|
||||
}
|
||||
|
||||
24
test/compress/issue-126.js
Normal file
24
test/compress/issue-126.js
Normal file
@@ -0,0 +1,24 @@
|
||||
concatenate_rhs_strings: {
|
||||
options = {
|
||||
evaluate: true,
|
||||
unsafe: true,
|
||||
}
|
||||
input: {
|
||||
foo(bar() + 123 + "Hello" + "World");
|
||||
foo(bar() + (123 + "Hello") + "World");
|
||||
foo((bar() + 123) + "Hello" + "World");
|
||||
foo(bar() + 123 + "Hello" + "World" + ("Foo" + "Bar"));
|
||||
foo("Foo" + "Bar" + bar() + 123 + "Hello" + "World" + ("Foo" + "Bar"));
|
||||
foo("Hello" + bar() + 123 + "World");
|
||||
foo(bar() + 'Foo' + (10 + parseInt('10')));
|
||||
}
|
||||
expect: {
|
||||
foo(bar() + 123 + "HelloWorld");
|
||||
foo(bar() + "123HelloWorld");
|
||||
foo((bar() + 123) + "HelloWorld");
|
||||
foo(bar() + 123 + "HelloWorldFooBar");
|
||||
foo("FooBar" + bar() + "123HelloWorldFooBar");
|
||||
foo("Hello" + bar() + "123World");
|
||||
foo(bar() + 'Foo' + (10 + parseInt('10')));
|
||||
}
|
||||
}
|
||||
48
test/compress/issue-143.js
Normal file
48
test/compress/issue-143.js
Normal file
@@ -0,0 +1,48 @@
|
||||
/**
|
||||
* There was an incorrect sort behaviour documented in issue #143:
|
||||
* (x = f(…)) <= x → x >= (x = f(…))
|
||||
*
|
||||
* For example, let the equation be:
|
||||
* (a = parseInt('100')) <= a
|
||||
*
|
||||
* If a was an integer and has the value of 99,
|
||||
* (a = parseInt('100')) <= a → 100 <= 100 → true
|
||||
*
|
||||
* When transformed incorrectly:
|
||||
* a >= (a = parseInt('100')) → 99 >= 100 → false
|
||||
*/
|
||||
|
||||
tranformation_sort_order_equal: {
|
||||
options = {
|
||||
comparisons: true,
|
||||
};
|
||||
|
||||
input: { (a = parseInt('100')) == a }
|
||||
expect: { (a = parseInt('100')) == a }
|
||||
}
|
||||
|
||||
tranformation_sort_order_unequal: {
|
||||
options = {
|
||||
comparisons: true,
|
||||
};
|
||||
|
||||
input: { (a = parseInt('100')) != a }
|
||||
expect: { (a = parseInt('100')) != a }
|
||||
}
|
||||
|
||||
tranformation_sort_order_lesser_or_equal: {
|
||||
options = {
|
||||
comparisons: true,
|
||||
};
|
||||
|
||||
input: { (a = parseInt('100')) <= a }
|
||||
expect: { (a = parseInt('100')) <= a }
|
||||
}
|
||||
tranformation_sort_order_greater_or_equal: {
|
||||
options = {
|
||||
comparisons: true,
|
||||
};
|
||||
|
||||
input: { (a = parseInt('100')) >= a }
|
||||
expect: { (a = parseInt('100')) >= a }
|
||||
}
|
||||
11
test/compress/issue-267.js
Normal file
11
test/compress/issue-267.js
Normal file
@@ -0,0 +1,11 @@
|
||||
issue_267: {
|
||||
options = { comparisons: true };
|
||||
input: {
|
||||
x = a % b / b * c * 2;
|
||||
x = a % b * 2
|
||||
}
|
||||
expect: {
|
||||
x = a % b / b * c * 2;
|
||||
x = a % b * 2;
|
||||
}
|
||||
}
|
||||
66
test/compress/issue-269.js
Normal file
66
test/compress/issue-269.js
Normal file
@@ -0,0 +1,66 @@
|
||||
issue_269_1: {
|
||||
options = {unsafe: true};
|
||||
input: {
|
||||
f(
|
||||
String(x),
|
||||
Number(x),
|
||||
Boolean(x),
|
||||
|
||||
String(),
|
||||
Number(),
|
||||
Boolean()
|
||||
);
|
||||
}
|
||||
expect: {
|
||||
f(
|
||||
x + '', +x, !!x,
|
||||
'', 0, false
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
issue_269_dangers: {
|
||||
options = {unsafe: true};
|
||||
input: {
|
||||
f(
|
||||
String(x, x),
|
||||
Number(x, x),
|
||||
Boolean(x, x)
|
||||
);
|
||||
}
|
||||
expect: {
|
||||
f(String(x, x), Number(x, x), Boolean(x, x));
|
||||
}
|
||||
}
|
||||
|
||||
issue_269_in_scope: {
|
||||
options = {unsafe: true};
|
||||
input: {
|
||||
var String, Number, Boolean;
|
||||
f(
|
||||
String(x),
|
||||
Number(x, x),
|
||||
Boolean(x)
|
||||
);
|
||||
}
|
||||
expect: {
|
||||
var String, Number, Boolean;
|
||||
f(String(x), Number(x, x), Boolean(x));
|
||||
}
|
||||
}
|
||||
|
||||
strings_concat: {
|
||||
options = {unsafe: true};
|
||||
input: {
|
||||
f(
|
||||
String(x + 'str'),
|
||||
String('str' + x)
|
||||
);
|
||||
}
|
||||
expect: {
|
||||
f(
|
||||
x + 'str',
|
||||
'str' + x
|
||||
);
|
||||
}
|
||||
}
|
||||
76
test/compress/negate-iife.js
Normal file
76
test/compress/negate-iife.js
Normal file
@@ -0,0 +1,76 @@
|
||||
negate_iife_1: {
|
||||
options = {
|
||||
negate_iife: true
|
||||
};
|
||||
input: {
|
||||
(function(){ stuff() })();
|
||||
}
|
||||
expect: {
|
||||
!function(){ stuff() }();
|
||||
}
|
||||
}
|
||||
|
||||
negate_iife_2: {
|
||||
options = {
|
||||
negate_iife: true
|
||||
};
|
||||
input: {
|
||||
(function(){ return {} })().x = 10; // should not transform this one
|
||||
}
|
||||
expect: {
|
||||
(function(){ return {} })().x = 10;
|
||||
}
|
||||
}
|
||||
|
||||
negate_iife_3: {
|
||||
options = {
|
||||
negate_iife: true,
|
||||
};
|
||||
input: {
|
||||
(function(){ return true })() ? console.log(true) : console.log(false);
|
||||
}
|
||||
expect: {
|
||||
!function(){ return true }() ? console.log(false) : console.log(true);
|
||||
}
|
||||
}
|
||||
|
||||
negate_iife_3: {
|
||||
options = {
|
||||
negate_iife: true,
|
||||
sequences: true
|
||||
};
|
||||
input: {
|
||||
(function(){ return true })() ? console.log(true) : console.log(false);
|
||||
(function(){
|
||||
console.log("something");
|
||||
})();
|
||||
}
|
||||
expect: {
|
||||
!function(){ return true }() ? console.log(false) : console.log(true), function(){
|
||||
console.log("something");
|
||||
}();
|
||||
}
|
||||
}
|
||||
|
||||
negate_iife_4: {
|
||||
options = {
|
||||
negate_iife: true,
|
||||
sequences: true,
|
||||
conditionals: true,
|
||||
};
|
||||
input: {
|
||||
if ((function(){ return true })()) {
|
||||
foo(true);
|
||||
} else {
|
||||
bar(false);
|
||||
}
|
||||
(function(){
|
||||
console.log("something");
|
||||
})();
|
||||
}
|
||||
expect: {
|
||||
!function(){ return true }() ? bar(false) : foo(true), function(){
|
||||
console.log("something");
|
||||
}();
|
||||
}
|
||||
}
|
||||
@@ -17,10 +17,18 @@ dot_properties: {
|
||||
input: {
|
||||
a["foo"] = "bar";
|
||||
a["if"] = "if";
|
||||
a["*"] = "asterisk";
|
||||
a["\u0EB3"] = "unicode";
|
||||
a[""] = "whitespace";
|
||||
a["1_1"] = "foo";
|
||||
}
|
||||
expect: {
|
||||
a.foo = "bar";
|
||||
a["if"] = "if";
|
||||
a["*"] = "asterisk";
|
||||
a["\u0EB3"] = "unicode";
|
||||
a[""] = "whitespace";
|
||||
a["1_1"] = "foo";
|
||||
}
|
||||
}
|
||||
|
||||
@@ -32,9 +40,35 @@ dot_properties_es5: {
|
||||
input: {
|
||||
a["foo"] = "bar";
|
||||
a["if"] = "if";
|
||||
a["*"] = "asterisk";
|
||||
a["\u0EB3"] = "unicode";
|
||||
a[""] = "whitespace";
|
||||
}
|
||||
expect: {
|
||||
a.foo = "bar";
|
||||
a.if = "if";
|
||||
a["*"] = "asterisk";
|
||||
a["\u0EB3"] = "unicode";
|
||||
a[""] = "whitespace";
|
||||
}
|
||||
}
|
||||
|
||||
evaluate_length: {
|
||||
options = {
|
||||
properties: true,
|
||||
unsafe: true,
|
||||
evaluate: true
|
||||
};
|
||||
input: {
|
||||
a = "foo".length;
|
||||
a = ("foo" + "bar")["len" + "gth"];
|
||||
a = b.length;
|
||||
a = ("foo" + b).length;
|
||||
}
|
||||
expect: {
|
||||
a = 3;
|
||||
a = 6;
|
||||
a = b.length;
|
||||
a = ("foo" + b).length;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -91,9 +91,11 @@ make_sequences_4: {
|
||||
lift_sequences_1: {
|
||||
options = { sequences: true };
|
||||
input: {
|
||||
var foo, x, y, bar;
|
||||
foo = !(x(), y(), bar());
|
||||
}
|
||||
expect: {
|
||||
var foo, x, y, bar;
|
||||
x(), y(), foo = !bar();
|
||||
}
|
||||
}
|
||||
@@ -101,19 +103,25 @@ lift_sequences_1: {
|
||||
lift_sequences_2: {
|
||||
options = { sequences: true, evaluate: true };
|
||||
input: {
|
||||
q = 1 + (foo(), bar(), 5) + 7 * (5 / (3 - (a(), (QW=ER), c(), 2))) - (x(), y(), 5);
|
||||
var foo, bar;
|
||||
foo.x = (foo = {}, 10);
|
||||
bar = (bar = {}, 10);
|
||||
}
|
||||
expect: {
|
||||
foo(), bar(), a(), QW = ER, c(), x(), y(), q = 36
|
||||
var foo, bar;
|
||||
foo.x = (foo = {}, 10),
|
||||
bar = {}, bar = 10;
|
||||
}
|
||||
}
|
||||
|
||||
lift_sequences_3: {
|
||||
options = { sequences: true, conditionals: true };
|
||||
input: {
|
||||
var x, foo, bar, baz;
|
||||
x = (foo(), bar(), baz()) ? 10 : 20;
|
||||
}
|
||||
expect: {
|
||||
var x, foo, bar, baz;
|
||||
foo(), bar(), x = baz() ? 10 : 20;
|
||||
}
|
||||
}
|
||||
@@ -121,9 +129,11 @@ lift_sequences_3: {
|
||||
lift_sequences_4: {
|
||||
options = { side_effects: true };
|
||||
input: {
|
||||
var x, foo, bar, baz;
|
||||
x = (foo, bar, baz);
|
||||
}
|
||||
expect: {
|
||||
var x, foo, bar, baz;
|
||||
x = baz;
|
||||
}
|
||||
}
|
||||
|
||||
103
test/mozilla-ast.js
Normal file
103
test/mozilla-ast.js
Normal file
@@ -0,0 +1,103 @@
|
||||
// Testing UglifyJS <-> SpiderMonkey AST conversion
|
||||
// through generative testing.
|
||||
|
||||
var UglifyJS = require(".."),
|
||||
escodegen = require("escodegen"),
|
||||
esfuzz = require("esfuzz"),
|
||||
estraverse = require("estraverse"),
|
||||
prefix = Array(20).join("\b") + " ";
|
||||
|
||||
// Normalizes input AST for UglifyJS in order to get correct comparison.
|
||||
|
||||
function normalizeInput(ast) {
|
||||
return estraverse.replace(ast, {
|
||||
enter: function(node, parent) {
|
||||
switch (node.type) {
|
||||
// Internally mark all the properties with semi-standard type "Property".
|
||||
case "ObjectExpression":
|
||||
node.properties.forEach(function (property) {
|
||||
property.type = "Property";
|
||||
});
|
||||
break;
|
||||
|
||||
// Since UglifyJS doesn"t recognize different types of property keys,
|
||||
// decision on SpiderMonkey node type is based on check whether key
|
||||
// can be valid identifier or not - so we do in input AST.
|
||||
case "Property":
|
||||
var key = node.key;
|
||||
if (key.type === "Literal" && typeof key.value === "string" && UglifyJS.is_identifier(key.value)) {
|
||||
node.key = {
|
||||
type: "Identifier",
|
||||
name: key.value
|
||||
};
|
||||
} else if (key.type === "Identifier" && !UglifyJS.is_identifier(key.name)) {
|
||||
node.key = {
|
||||
type: "Literal",
|
||||
value: key.name
|
||||
};
|
||||
}
|
||||
break;
|
||||
|
||||
// UglifyJS internally flattens all the expression sequences - either
|
||||
// to one element (if sequence contains only one element) or flat list.
|
||||
case "SequenceExpression":
|
||||
node.expressions = node.expressions.reduce(function flatten(list, expr) {
|
||||
return list.concat(expr.type === "SequenceExpression" ? expr.expressions.reduce(flatten, []) : [expr]);
|
||||
}, []);
|
||||
if (node.expressions.length === 1) {
|
||||
return node.expressions[0];
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = function(options) {
|
||||
console.log("--- UglifyJS <-> Mozilla AST conversion");
|
||||
|
||||
for (var counter = 0; counter < options.iterations; counter++) {
|
||||
process.stdout.write(prefix + counter + "/" + options.iterations);
|
||||
|
||||
var ast1 = normalizeInput(esfuzz.generate({
|
||||
maxDepth: options.maxDepth
|
||||
}));
|
||||
|
||||
var ast2 =
|
||||
UglifyJS
|
||||
.AST_Node
|
||||
.from_mozilla_ast(ast1)
|
||||
.to_mozilla_ast();
|
||||
|
||||
var astPair = [
|
||||
{name: 'expected', value: ast1},
|
||||
{name: 'actual', value: ast2}
|
||||
];
|
||||
|
||||
var jsPair = astPair.map(function(item) {
|
||||
return {
|
||||
name: item.name,
|
||||
value: escodegen.generate(item.value)
|
||||
}
|
||||
});
|
||||
|
||||
if (jsPair[0].value !== jsPair[1].value) {
|
||||
var fs = require("fs");
|
||||
var acorn = require("acorn");
|
||||
|
||||
fs.existsSync("tmp") || fs.mkdirSync("tmp");
|
||||
|
||||
jsPair.forEach(function (item) {
|
||||
var fileName = "tmp/dump_" + item.name;
|
||||
var ast = acorn.parse(item.value);
|
||||
fs.writeFileSync(fileName + ".js", item.value);
|
||||
fs.writeFileSync(fileName + ".json", JSON.stringify(ast, null, 2));
|
||||
});
|
||||
|
||||
process.stdout.write("\n");
|
||||
throw new Error("Got different outputs, check out tmp/dump_*.{js,json} for codes and ASTs.");
|
||||
}
|
||||
}
|
||||
|
||||
process.stdout.write(prefix + "Probability of error is less than " + (100 / options.iterations) + "%, stopping.\n");
|
||||
};
|
||||
@@ -7,8 +7,21 @@ var assert = require("assert");
|
||||
var sys = require("util");
|
||||
|
||||
var tests_dir = path.dirname(module.filename);
|
||||
var failures = 0;
|
||||
var failed_files = {};
|
||||
|
||||
run_compress_tests();
|
||||
if (failures) {
|
||||
sys.error("\n!!! Failed " + failures + " test cases.");
|
||||
sys.error("!!! " + Object.keys(failed_files).join(", "));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
var run_ast_conversion_tests = require("./mozilla-ast");
|
||||
|
||||
run_ast_conversion_tests({
|
||||
iterations: 1000
|
||||
});
|
||||
|
||||
/* -----[ utils ]----- */
|
||||
|
||||
@@ -83,6 +96,8 @@ function run_compress_tests() {
|
||||
output: output,
|
||||
expected: expect
|
||||
});
|
||||
failures++;
|
||||
failed_files[file] = 1;
|
||||
}
|
||||
}
|
||||
var tests = parse_test(path.resolve(dir, file));
|
||||
|
||||
@@ -51,6 +51,7 @@ for (var i in UglifyJS) {
|
||||
|
||||
exports.minify = function(files, options) {
|
||||
options = UglifyJS.defaults(options, {
|
||||
spidermonkey : false,
|
||||
outSourceMap : null,
|
||||
sourceRoot : null,
|
||||
inSourceMap : null,
|
||||
@@ -60,20 +61,28 @@ exports.minify = function(files, options) {
|
||||
output : null,
|
||||
compress : {}
|
||||
});
|
||||
if (typeof files == "string")
|
||||
files = [ files ];
|
||||
UglifyJS.base54.reset();
|
||||
|
||||
// 1. parse
|
||||
var toplevel = null;
|
||||
var toplevel = null,
|
||||
sourcesContent = {};
|
||||
|
||||
if (options.spidermonkey) {
|
||||
toplevel = UglifyJS.AST_Node.from_mozilla_ast(files);
|
||||
} else {
|
||||
if (typeof files == "string")
|
||||
files = [ files ];
|
||||
files.forEach(function(file){
|
||||
var code = options.fromString
|
||||
? file
|
||||
: fs.readFileSync(file, "utf8");
|
||||
sourcesContent[file] = code;
|
||||
toplevel = UglifyJS.parse(code, {
|
||||
filename: options.fromString ? "?" : file,
|
||||
toplevel: toplevel
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// 2. compress
|
||||
if (options.compress) {
|
||||
@@ -103,12 +112,25 @@ exports.minify = function(files, options) {
|
||||
orig: inMap,
|
||||
root: options.sourceRoot
|
||||
});
|
||||
if (options.sourceMapIncludeSources) {
|
||||
for (var file in sourcesContent) {
|
||||
if (sourcesContent.hasOwnProperty(file)) {
|
||||
output.source_map.get().setSourceContent(file, sourcesContent[file]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
if (options.output) {
|
||||
UglifyJS.merge(output, options.output);
|
||||
}
|
||||
var stream = UglifyJS.OutputStream(output);
|
||||
toplevel.print(stream);
|
||||
|
||||
if(options.outSourceMap){
|
||||
stream += "\n//# sourceMappingURL=" + options.outSourceMap;
|
||||
}
|
||||
|
||||
return {
|
||||
code : stream + "",
|
||||
map : output.source_map + ""
|
||||
|
||||
Reference in New Issue
Block a user