Compare commits
105 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a9fa178f86 | ||
|
|
53355bdb24 | ||
|
|
f05c99d89f | ||
|
|
b49230ab8d | ||
|
|
78856a3dab | ||
|
|
1e5e13ed81 | ||
|
|
64270b9778 | ||
|
|
e312c5c2a7 | ||
|
|
1a5fd3e052 | ||
|
|
5a7e54cf72 | ||
|
|
39f8a62703 | ||
|
|
46be3f2bf1 | ||
|
|
258b46f4dc | ||
|
|
80da21dab4 | ||
|
|
bb0e4d7126 | ||
|
|
5276a4a873 | ||
|
|
a1ae0c8609 | ||
|
|
a90c1aeafe | ||
|
|
ff388a8d2d | ||
|
|
5346fb94bb | ||
|
|
a4f6d46118 | ||
|
|
7f5f4d60b7 | ||
|
|
ffccb233e5 | ||
|
|
fba0c1aafe | ||
|
|
774f2ded94 | ||
|
|
85af942d64 | ||
|
|
8413787efc | ||
|
|
dde57452aa | ||
|
|
cf409800be | ||
|
|
18270dd9f3 | ||
|
|
d4c25c571b | ||
|
|
5248b79506 | ||
|
|
abe0ebbf02 | ||
|
|
0852f5595e | ||
|
|
cb3cafa14d | ||
|
|
202fb93799 | ||
|
|
7b87d2ef83 | ||
|
|
70fd2b1f33 | ||
|
|
30faaf13ed | ||
|
|
41be8632d3 | ||
|
|
bee01dc1be | ||
|
|
12f71e01d0 | ||
|
|
3a72deacab | ||
|
|
fc8314e810 | ||
|
|
11dffe950e | ||
|
|
6f45928a73 | ||
|
|
afb7faa6fa | ||
|
|
6aa56f92fe | ||
|
|
4fe4257c69 | ||
|
|
a5e75c5a21 | ||
|
|
4482fdd63f | ||
|
|
253bd8559b | ||
|
|
6a099fba66 | ||
|
|
a21f3c6cdd | ||
|
|
8f66458598 | ||
|
|
6472f9410e | ||
|
|
8957b3a694 | ||
|
|
1ffd526554 | ||
|
|
fcc0229087 | ||
|
|
b071c9d079 | ||
|
|
851b48e4a3 | ||
|
|
708abb1ab1 | ||
|
|
370d3e0917 | ||
|
|
b51fe0dcc3 | ||
|
|
70d205c447 | ||
|
|
8149be551e | ||
|
|
ba3df646c0 | ||
|
|
1b6f8d463f | ||
|
|
731fa9c236 | ||
|
|
72cb5328ee | ||
|
|
fc39553714 | ||
|
|
d9d67317b1 | ||
|
|
fb5c01c073 | ||
|
|
f4584af42c | ||
|
|
172aa7a93c | ||
|
|
5053a29bc0 | ||
|
|
f322b32e0e | ||
|
|
9cdaed9860 | ||
|
|
dacce1b1fa | ||
|
|
f26f3b44bc | ||
|
|
c5ecbfc756 | ||
|
|
3799ac8973 | ||
|
|
86182afa7f | ||
|
|
4807c6e756 | ||
|
|
a84d07e312 | ||
|
|
88beddfa91 | ||
|
|
1b0aab2ce9 | ||
|
|
9ead49641d | ||
|
|
e1862cd36f | ||
|
|
2c025f23db | ||
|
|
9dfcd47ec8 | ||
|
|
203ecaf85b | ||
|
|
c967f0b0fe | ||
|
|
dfc04e6677 | ||
|
|
42ea3c95e0 | ||
|
|
d4970b35ac | ||
|
|
dd8286bce1 | ||
|
|
093a9031dc | ||
|
|
80a18fe2fa | ||
|
|
fe1411bba1 | ||
|
|
455ac5435d | ||
|
|
4a2b91220a | ||
|
|
a1e0885930 | ||
|
|
7ae09120ed | ||
|
|
42c25d901c |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1 +1,2 @@
|
|||||||
tmp/
|
tmp/
|
||||||
|
node_modules/
|
||||||
|
|||||||
271
README.md
271
README.md
@@ -3,8 +3,24 @@ UglifyJS 2
|
|||||||
|
|
||||||
UglifyJS is a JavaScript parser, minifier, compressor or beautifier toolkit.
|
UglifyJS is a JavaScript parser, minifier, compressor or beautifier toolkit.
|
||||||
|
|
||||||
For now this page documents the command line utility. More advanced
|
This page documents the command line utility. For
|
||||||
API documentation will be made available later.
|
[API and internals documentation see my website](http://lisperator.net/uglifyjs/).
|
||||||
|
There's also an
|
||||||
|
[in-browser online demo](http://lisperator.net/uglifyjs/#demo) (for Firefox,
|
||||||
|
Chrome and probably Safari).
|
||||||
|
|
||||||
|
Install
|
||||||
|
-------
|
||||||
|
|
||||||
|
From NPM:
|
||||||
|
|
||||||
|
npm install uglify-js2
|
||||||
|
|
||||||
|
From Git:
|
||||||
|
|
||||||
|
git clone git://github.com/mishoo/UglifyJS2.git
|
||||||
|
cd UglifyJS2
|
||||||
|
npm link .
|
||||||
|
|
||||||
Usage
|
Usage
|
||||||
-----
|
-----
|
||||||
@@ -53,6 +69,15 @@ The available options are:
|
|||||||
--acorn Use Acorn for parsing. [boolean]
|
--acorn Use Acorn for parsing. [boolean]
|
||||||
--spidermonkey Assume input fles are SpiderMonkey AST format (as JSON).
|
--spidermonkey Assume input fles are SpiderMonkey AST format (as JSON).
|
||||||
[boolean]
|
[boolean]
|
||||||
|
--self Build itself (UglifyJS2) as a library (implies
|
||||||
|
--wrap=UglifyJS --export-all) [boolean]
|
||||||
|
--wrap Embed everything in a big function, making the “exports”
|
||||||
|
and “global” variables available. You need to pass an
|
||||||
|
argument to this option to specify the name that your
|
||||||
|
module will take when included in, say, a browser.
|
||||||
|
[string]
|
||||||
|
--export-all Only used when --wrap, this tells UglifyJS to add code to
|
||||||
|
automatically export all globals. [boolean]
|
||||||
-v, --verbose Verbose [boolean]
|
-v, --verbose Verbose [boolean]
|
||||||
|
|
||||||
Specify `--output` (`-o`) to declare the output file. Otherwise the output
|
Specify `--output` (`-o`) to declare the output file. Otherwise the output
|
||||||
@@ -219,15 +244,19 @@ can pass additional arguments that control the code output:
|
|||||||
- `bracketize` (default `false`) -- always insert brackets in `if`, `for`,
|
- `bracketize` (default `false`) -- always insert brackets in `if`, `for`,
|
||||||
`do`, `while` or `with` statements, even if their body is a single
|
`do`, `while` or `with` statements, even if their body is a single
|
||||||
statement.
|
statement.
|
||||||
|
- `semicolons` (default `true`) -- separate statements with semicolons. If
|
||||||
|
you pass `false` then whenever possible we will use a newline instead of a
|
||||||
|
semicolon, leading to more readable output of uglified code (size before
|
||||||
|
gzip could be smaller; size after gzip insignificantly larger).
|
||||||
|
|
||||||
### Keeping copyright notices or other comments
|
### Keeping copyright notices or other comments
|
||||||
|
|
||||||
You can pass `--comments` to retain certain comments in the output. By
|
You can pass `--comments` to retain certain comments in the output. By
|
||||||
default it will keep JSDoc-style comments that contain "@preserve" or
|
default it will keep JSDoc-style comments that contain "@preserve",
|
||||||
"@license". You can pass `--comments all` to keep all the comments, or a
|
"@license" or "@cc_on" (conditional compilation for IE). You can pass
|
||||||
valid JavaScript regexp to keep only comments that match this regexp. For
|
`--comments all` to keep all the comments, or a valid JavaScript regexp to
|
||||||
example `--comments '/foo|bar/'` will keep only comments that contain "foo"
|
keep only comments that match this regexp. For example `--comments
|
||||||
or "bar".
|
'/foo|bar/'` will keep only comments that contain "foo" or "bar".
|
||||||
|
|
||||||
Note, however, that there might be situations where comments are lost. For
|
Note, however, that there might be situations where comments are lost. For
|
||||||
example:
|
example:
|
||||||
@@ -245,7 +274,7 @@ function `g` (which is the AST node to which the comment is attached to) is
|
|||||||
discarded by the compressor as not referenced.
|
discarded by the compressor as not referenced.
|
||||||
|
|
||||||
The safest comments where to place copyright information (or other info that
|
The safest comments where to place copyright information (or other info that
|
||||||
needs to me kept in the output) are comments attached to toplevel nodes.
|
needs to be kept in the output) are comments attached to toplevel nodes.
|
||||||
|
|
||||||
## Support for the SpiderMonkey AST
|
## Support for the SpiderMonkey AST
|
||||||
|
|
||||||
@@ -254,10 +283,10 @@ UglifyJS2 has its own abstract syntax tree format; for
|
|||||||
we can't easily change to using the SpiderMonkey AST internally. However,
|
we can't easily change to using the SpiderMonkey AST internally. However,
|
||||||
UglifyJS now has a converter which can import a SpiderMonkey AST.
|
UglifyJS now has a converter which can import a SpiderMonkey AST.
|
||||||
|
|
||||||
For example [Acorn](https://github.com/marijnh/acorn) is a super-fast parser
|
For example [Acorn][acorn] is a super-fast parser that produces a
|
||||||
that produces a SpiderMonkey AST. It has a small CLI utility that parses
|
SpiderMonkey AST. It has a small CLI utility that parses one file and dumps
|
||||||
one file and dumps the AST in JSON on the standard output. To use UglifyJS
|
the AST in JSON on the standard output. To use UglifyJS to mangle and
|
||||||
to mangle and compress that:
|
compress that:
|
||||||
|
|
||||||
acorn file.js | uglifyjs2 --spidermonkey -m -c
|
acorn file.js | uglifyjs2 --spidermonkey -m -c
|
||||||
|
|
||||||
@@ -269,11 +298,221 @@ internal AST.
|
|||||||
### Use Acorn for parsing
|
### Use Acorn for parsing
|
||||||
|
|
||||||
More for fun, I added the `--acorn` option which will use Acorn to do all
|
More for fun, I added the `--acorn` option which will use Acorn to do all
|
||||||
the parsing. If you pass this option, UglifyJS will `require("acorn")`. At
|
the parsing. If you pass this option, UglifyJS will `require("acorn")`.
|
||||||
the time I'm writing this it needs
|
|
||||||
[this commit](https://github.com/mishoo/acorn/commit/17c0d189c7f9ce5447293569036949b5d0a05fef)
|
|
||||||
in Acorn to support multiple input files and properly generate source maps.
|
|
||||||
|
|
||||||
Acorn is really fast (e.g. 250ms instead of 380ms on some 650K code), but
|
Acorn is really fast (e.g. 250ms instead of 380ms on some 650K code), but
|
||||||
converting the SpiderMonkey tree that Acorn produces takes another 150ms so
|
converting the SpiderMonkey tree that Acorn produces takes another 150ms so
|
||||||
in total it's a bit more than just using UglifyJS's own parser.
|
in total it's a bit more than just using UglifyJS's own parser.
|
||||||
|
|
||||||
|
API Reference
|
||||||
|
-------------
|
||||||
|
|
||||||
|
Assuming installation via NPM, you can load UglifyJS in your application
|
||||||
|
like this:
|
||||||
|
|
||||||
|
var UglifyJS = require("uglify-js2");
|
||||||
|
|
||||||
|
It exports a lot of names, but I'll discuss here the basics that are needed
|
||||||
|
for parsing, mangling and compressing a piece of code. The sequence is (1)
|
||||||
|
parse, (2) compress, (3) mangle, (4) generate output code.
|
||||||
|
|
||||||
|
### The simple way
|
||||||
|
|
||||||
|
There's a single toplevel function which combines all the steps. If you
|
||||||
|
don't need additional customization, you might want to go with `minify`.
|
||||||
|
Example:
|
||||||
|
|
||||||
|
// see "fromString" below if you need to pass code instead of file name
|
||||||
|
var result = UglifyJS.minify("/path/to/file.js");
|
||||||
|
console.log(result.code); // minified output
|
||||||
|
|
||||||
|
You can also compress multiple files:
|
||||||
|
|
||||||
|
var result = UglifyJS.minify([ "file1.js", "file2.js", "file3.js" ]);
|
||||||
|
console.log(result.code);
|
||||||
|
|
||||||
|
To generate a source map:
|
||||||
|
|
||||||
|
var result = UglifyJS.minify([ "file1.js", "file2.js", "file3.js" ], {
|
||||||
|
outSourceMap: "out.js.map"
|
||||||
|
});
|
||||||
|
console.log(result.code); // minified output
|
||||||
|
console.log(result.map);
|
||||||
|
|
||||||
|
Note that the source map is not saved in a file, it's just returned in
|
||||||
|
`result.map`. The value passed for `outSourceMap` is only used to set the
|
||||||
|
`file` attribute in the source map (see [the spec][sm-spec]).
|
||||||
|
|
||||||
|
You can also specify sourceRoot property to be included in source map:
|
||||||
|
|
||||||
|
var result = UglifyJS.minify([ "file1.js", "file2.js", "file3.js" ], {
|
||||||
|
outSourceMap: "out.js.map",
|
||||||
|
sourceRoot: "http://example.com/src"
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
If you're compressing compiled JavaScript and have a source map for it, you
|
||||||
|
can use the `inSourceMap` argument:
|
||||||
|
|
||||||
|
var result = UglifyJS.minify("compiled.js", {
|
||||||
|
inSourceMap: "compiled.js.map",
|
||||||
|
outSourceMap: "minified.js.map"
|
||||||
|
});
|
||||||
|
// same as before, it returns `code` and `map`
|
||||||
|
|
||||||
|
The `inSourceMap` is only used if you also request `outSourceMap` (it makes
|
||||||
|
no sense otherwise).
|
||||||
|
|
||||||
|
Other options:
|
||||||
|
|
||||||
|
- `warnings` (default `false`) — pass `true` to display compressor warnings.
|
||||||
|
- `fromString` (default `false`) — if you pass `true` then you can pass
|
||||||
|
JavaScript source code, rather than file names.
|
||||||
|
|
||||||
|
We could add more options to `UglifyJS.minify` — if you need additional
|
||||||
|
functionality please suggest!
|
||||||
|
|
||||||
|
### The hard way
|
||||||
|
|
||||||
|
Following there's more detailed API info, in case the `minify` function is
|
||||||
|
too simple for your needs.
|
||||||
|
|
||||||
|
#### The parser
|
||||||
|
|
||||||
|
var toplevel_ast = UglifyJS.parse(code, options);
|
||||||
|
|
||||||
|
`options` is optional and if present it must be an object. The following
|
||||||
|
properties are available:
|
||||||
|
|
||||||
|
- `strict` — disable automatic semicolon insertion and support for trailing
|
||||||
|
comma in arrays and objects
|
||||||
|
- `filename` — the name of the file where this code is coming from
|
||||||
|
- `toplevel` — a `toplevel` node (as returned by a previous invocation of
|
||||||
|
`parse`)
|
||||||
|
|
||||||
|
The last two options are useful when you'd like to minify multiple files and
|
||||||
|
get a single file as the output and a proper source map. Our CLI tool does
|
||||||
|
something like this:
|
||||||
|
|
||||||
|
var toplevel = null;
|
||||||
|
files.forEach(function(file){
|
||||||
|
var code = fs.readFileSync(file);
|
||||||
|
toplevel = UglifyJS.parse(code, {
|
||||||
|
filename: file,
|
||||||
|
toplevel: toplevel
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
After this, we have in `toplevel` a big AST containing all our files, with
|
||||||
|
each token having proper information about where it came from.
|
||||||
|
|
||||||
|
#### Scope information
|
||||||
|
|
||||||
|
UglifyJS contains a scope analyzer that you need to call manually before
|
||||||
|
compressing or mangling. Basically it augments various nodes in the AST
|
||||||
|
with information about where is a name defined, how many times is a name
|
||||||
|
referenced, if it is a global or not, if a function is using `eval` or the
|
||||||
|
`with` statement etc. I will discuss this some place else, for now what's
|
||||||
|
important to know is that you need to call the following before doing
|
||||||
|
anything with the tree:
|
||||||
|
|
||||||
|
toplevel.figure_out_scope()
|
||||||
|
|
||||||
|
#### Compression
|
||||||
|
|
||||||
|
Like this:
|
||||||
|
|
||||||
|
var compressor = UglifyJS.Compressor(options);
|
||||||
|
var compressed_ast = toplevel.transform(compressor);
|
||||||
|
|
||||||
|
The `options` can be missing. Available options are discussed above in
|
||||||
|
“Compressor options”. Defaults should lead to best compression in most
|
||||||
|
scripts.
|
||||||
|
|
||||||
|
The compressor is destructive, so don't rely that `toplevel` remains the
|
||||||
|
original tree.
|
||||||
|
|
||||||
|
#### Mangling
|
||||||
|
|
||||||
|
After compression it is a good idea to call again `figure_out_scope` (since
|
||||||
|
the compressor might drop unused variables / unreachable code and this might
|
||||||
|
change the number of identifiers or their position). Optionally, you can
|
||||||
|
call a trick that helps after Gzip (counting character frequency in
|
||||||
|
non-mangleable words). Example:
|
||||||
|
|
||||||
|
compressed_ast.figure_out_scope();
|
||||||
|
compressed_ast.compute_char_frequency();
|
||||||
|
compressed_ast.mangle_names();
|
||||||
|
|
||||||
|
#### Generating output
|
||||||
|
|
||||||
|
AST nodes have a `print` method that takes an output stream. Essentially,
|
||||||
|
to generate code you do this:
|
||||||
|
|
||||||
|
var stream = UglifyJS.OutputStream(options);
|
||||||
|
compressed_ast.print(stream);
|
||||||
|
var code = stream.toString(); // this is your minified code
|
||||||
|
|
||||||
|
or, for a shortcut you can do:
|
||||||
|
|
||||||
|
var code = compressed_ast.print_to_string(options);
|
||||||
|
|
||||||
|
As usual, `options` is optional. The output stream accepts a lot of otions,
|
||||||
|
most of them documented above in section “Beautifier options”. The two
|
||||||
|
which we care about here are `source_map` and `comments`.
|
||||||
|
|
||||||
|
#### Keeping comments in the output
|
||||||
|
|
||||||
|
In order to keep certain comments in the output you need to pass the
|
||||||
|
`comments` option. Pass a RegExp or a function. If you pass a RegExp, only
|
||||||
|
those comments whose body matches the regexp will be kept. Note that body
|
||||||
|
means without the initial `//` or `/*`. If you pass a function, it will be
|
||||||
|
called for every comment in the tree and will receive two arguments: the
|
||||||
|
node that the comment is attached to, and the comment token itself.
|
||||||
|
|
||||||
|
The comment token has these properties:
|
||||||
|
|
||||||
|
- `type`: "comment1" for single-line comments or "comment2" for multi-line
|
||||||
|
comments
|
||||||
|
- `value`: the comment body
|
||||||
|
- `pos` and `endpos`: the start/end positions (zero-based indexes) in the
|
||||||
|
original code where this comment appears
|
||||||
|
- `line` and `col`: the line and column where this comment appears in the
|
||||||
|
original code
|
||||||
|
- `file` — the file name of the original file
|
||||||
|
- `nlb` — true if there was a newline before this comment in the original
|
||||||
|
code, or if this comment contains a newline.
|
||||||
|
|
||||||
|
Your function should return `true` to keep the comment, or a falsy value
|
||||||
|
otherwise.
|
||||||
|
|
||||||
|
#### Generating a source mapping
|
||||||
|
|
||||||
|
You need to pass the `source_map` argument when calling `print`. It needs
|
||||||
|
to be a `SourceMap` object (which is a thin wrapper on top of the
|
||||||
|
[source-map][source-map] library).
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
var source_map = UglifyJS.SourceMap(source_map_options);
|
||||||
|
var stream = UglifyJS.OutputStream({
|
||||||
|
...
|
||||||
|
source_map: source_map
|
||||||
|
});
|
||||||
|
compressed_ast.print(stream);
|
||||||
|
|
||||||
|
var code = stream.toString();
|
||||||
|
var map = source_map.toString(); // json output for your source map
|
||||||
|
|
||||||
|
The `source_map_options` (optional) can contain the following properties:
|
||||||
|
|
||||||
|
- `file`: the name of the JavaScript output file that this mapping refers to
|
||||||
|
- `root`: the `sourceRoot` property (see the [spec][sm-spec])
|
||||||
|
- `orig`: the "original source map", handy when you compress generated JS
|
||||||
|
and want to map the minified output back to the original code where it
|
||||||
|
came from. It can be simply a string in JSON, or a JSON object containing
|
||||||
|
the original source map.
|
||||||
|
|
||||||
|
[acorn]: https://github.com/marijnh/acorn
|
||||||
|
[source-map]: https://github.com/mozilla/source-map
|
||||||
|
[sm-spec]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit
|
||||||
|
|||||||
@@ -43,6 +43,11 @@ because of dead code removal or cascading statements into sequences.")
|
|||||||
.describe("stats", "Display operations run time on STDERR.")
|
.describe("stats", "Display operations run time on STDERR.")
|
||||||
.describe("acorn", "Use Acorn for parsing.")
|
.describe("acorn", "Use Acorn for parsing.")
|
||||||
.describe("spidermonkey", "Assume input fles are SpiderMonkey AST format (as JSON).")
|
.describe("spidermonkey", "Assume input fles are SpiderMonkey AST format (as JSON).")
|
||||||
|
.describe("self", "Build itself (UglifyJS2) as a library (implies --wrap=UglifyJS --export-all)")
|
||||||
|
.describe("wrap", "Embed everything in a big function, making the “exports” and “global” variables available. \
|
||||||
|
You need to pass an argument to this option to specify the name that your module will take when included in, say, a browser.")
|
||||||
|
.describe("export-all", "Only used when --wrap, this tells UglifyJS to add code to automatically export all globals.")
|
||||||
|
.describe("lint", "Display some scope warnings")
|
||||||
.describe("v", "Verbose")
|
.describe("v", "Verbose")
|
||||||
|
|
||||||
.alias("p", "prefix")
|
.alias("p", "prefix")
|
||||||
@@ -61,16 +66,28 @@ because of dead code removal or cascading statements into sequences.")
|
|||||||
.string("c")
|
.string("c")
|
||||||
.string("d")
|
.string("d")
|
||||||
.string("comments")
|
.string("comments")
|
||||||
|
.string("wrap")
|
||||||
|
.boolean("export-all")
|
||||||
|
.boolean("self")
|
||||||
.boolean("v")
|
.boolean("v")
|
||||||
.boolean("stats")
|
.boolean("stats")
|
||||||
.boolean("acorn")
|
.boolean("acorn")
|
||||||
.boolean("spidermonkey")
|
.boolean("spidermonkey")
|
||||||
|
.boolean("lint")
|
||||||
|
|
||||||
.wrap(80)
|
.wrap(80)
|
||||||
|
|
||||||
.argv
|
.argv
|
||||||
;
|
;
|
||||||
|
|
||||||
|
normalize(ARGS);
|
||||||
|
|
||||||
|
if (ARGS.ast_help) {
|
||||||
|
var desc = UglifyJS.describe_ast();
|
||||||
|
sys.puts(typeof desc == "string" ? desc : JSON.stringify(desc, null, 2));
|
||||||
|
process.exit(0);
|
||||||
|
}
|
||||||
|
|
||||||
if (ARGS.h || ARGS.help) {
|
if (ARGS.h || ARGS.help) {
|
||||||
sys.puts(optimist.help());
|
sys.puts(optimist.help());
|
||||||
process.exit(0);
|
process.exit(0);
|
||||||
@@ -80,11 +97,9 @@ if (ARGS.acorn) {
|
|||||||
acorn = require("acorn");
|
acorn = require("acorn");
|
||||||
}
|
}
|
||||||
|
|
||||||
normalize(ARGS);
|
var COMPRESS = getOptions("c", true);
|
||||||
|
var MANGLE = getOptions("m", true);
|
||||||
var COMPRESS = getOptions("c");
|
var BEAUTIFY = getOptions("b", true);
|
||||||
var MANGLE = getOptions("m");
|
|
||||||
var BEAUTIFY = getOptions("b");
|
|
||||||
|
|
||||||
if (COMPRESS && ARGS.d) {
|
if (COMPRESS && ARGS.d) {
|
||||||
COMPRESS.global_defs = getOptions("d");
|
COMPRESS.global_defs = getOptions("d");
|
||||||
@@ -112,8 +127,7 @@ if (ARGS.comments) {
|
|||||||
var type = comment.type;
|
var type = comment.type;
|
||||||
if (type == "comment2") {
|
if (type == "comment2") {
|
||||||
// multiline comment
|
// multiline comment
|
||||||
return text.indexOf("@preserve") >= 0
|
return /@preserve|@license|@cc_on/i.test(text);
|
||||||
|| text.indexOf("@license") >= 0;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -121,6 +135,15 @@ if (ARGS.comments) {
|
|||||||
|
|
||||||
var files = ARGS._.slice();
|
var files = ARGS._.slice();
|
||||||
|
|
||||||
|
if (ARGS.self) {
|
||||||
|
if (files.length > 0) {
|
||||||
|
sys.error("WARN: Ignoring input files since --self was passed");
|
||||||
|
}
|
||||||
|
files = UglifyJS.FILES;
|
||||||
|
if (!ARGS.wrap) ARGS.wrap = "UglifyJS";
|
||||||
|
ARGS.export_all = true;
|
||||||
|
}
|
||||||
|
|
||||||
var ORIG_MAP = ARGS.in_source_map;
|
var ORIG_MAP = ARGS.in_source_map;
|
||||||
|
|
||||||
if (ORIG_MAP) {
|
if (ORIG_MAP) {
|
||||||
@@ -204,11 +227,18 @@ if (ARGS.acorn || ARGS.spidermonkey) time_it("convert_ast", function(){
|
|||||||
TOPLEVEL = UglifyJS.AST_Node.from_mozilla_ast(TOPLEVEL);
|
TOPLEVEL = UglifyJS.AST_Node.from_mozilla_ast(TOPLEVEL);
|
||||||
});
|
});
|
||||||
|
|
||||||
var SCOPE_IS_NEEDED = COMPRESS || MANGLE;
|
if (ARGS.wrap) {
|
||||||
|
TOPLEVEL = TOPLEVEL.wrap_commonjs(ARGS.wrap, ARGS.export_all);
|
||||||
|
}
|
||||||
|
|
||||||
|
var SCOPE_IS_NEEDED = COMPRESS || MANGLE || ARGS.lint;
|
||||||
|
|
||||||
if (SCOPE_IS_NEEDED) {
|
if (SCOPE_IS_NEEDED) {
|
||||||
time_it("scope", function(){
|
time_it("scope", function(){
|
||||||
TOPLEVEL.figure_out_scope();
|
TOPLEVEL.figure_out_scope();
|
||||||
|
if (ARGS.lint) {
|
||||||
|
TOPLEVEL.scope_warnings();
|
||||||
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -222,8 +252,7 @@ if (SCOPE_IS_NEEDED) {
|
|||||||
time_it("scope", function(){
|
time_it("scope", function(){
|
||||||
TOPLEVEL.figure_out_scope();
|
TOPLEVEL.figure_out_scope();
|
||||||
if (MANGLE) {
|
if (MANGLE) {
|
||||||
TOPLEVEL.compute_char_frequency();
|
TOPLEVEL.compute_char_frequency(MANGLE);
|
||||||
UglifyJS.base54.sort();
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@@ -270,16 +299,36 @@ function normalize(o) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function getOptions(x) {
|
function getOptions(x, constants) {
|
||||||
x = ARGS[x];
|
x = ARGS[x];
|
||||||
if (!x) return null;
|
if (!x) return null;
|
||||||
var ret = {};
|
var ret = {};
|
||||||
if (x !== true) {
|
if (x !== true) {
|
||||||
x.replace(/^\s+|\s+$/g).split(/\s*,+\s*/).forEach(function(opt){
|
var ast;
|
||||||
var a = opt.split(/\s*[=:]\s*/);
|
try {
|
||||||
ret[a[0]] = a.length > 1 ? new Function("return(" + a[1] + ")")() : true;
|
ast = UglifyJS.parse(x);
|
||||||
});
|
} catch(ex) {
|
||||||
normalize(ret);
|
if (ex instanceof UglifyJS.JS_Parse_Error) {
|
||||||
|
sys.error("Error parsing arguments in: " + x);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ast.walk(new UglifyJS.TreeWalker(function(node){
|
||||||
|
if (node instanceof UglifyJS.AST_Toplevel) return; // descend
|
||||||
|
if (node instanceof UglifyJS.AST_SimpleStatement) return; // descend
|
||||||
|
if (node instanceof UglifyJS.AST_Seq) return; // descend
|
||||||
|
if (node instanceof UglifyJS.AST_Assign) {
|
||||||
|
var name = node.left.print_to_string({ beautify: false }).replace(/-/g, "_");
|
||||||
|
var value = node.right;
|
||||||
|
if (constants)
|
||||||
|
value = new Function("return (" + value.print_to_string() + ")")();
|
||||||
|
ret[name] = value;
|
||||||
|
return true; // no descend
|
||||||
|
}
|
||||||
|
sys.error(node.TYPE)
|
||||||
|
sys.error("Error parsing arguments in: " + x);
|
||||||
|
process.exit(1);
|
||||||
|
}));
|
||||||
}
|
}
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|||||||
294
lib/ast.js
294
lib/ast.js
@@ -43,8 +43,6 @@
|
|||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
var NODE_HIERARCHY = {};
|
|
||||||
|
|
||||||
function DEFNODE(type, props, methods, base) {
|
function DEFNODE(type, props, methods, base) {
|
||||||
if (arguments.length < 4) base = AST_Node;
|
if (arguments.length < 4) base = AST_Node;
|
||||||
if (!props) props = [];
|
if (!props) props = [];
|
||||||
@@ -59,21 +57,21 @@ function DEFNODE(type, props, methods, base) {
|
|||||||
var proto = base && new base;
|
var proto = base && new base;
|
||||||
if (proto && proto.initialize || (methods && methods.initialize))
|
if (proto && proto.initialize || (methods && methods.initialize))
|
||||||
code += "this.initialize();";
|
code += "this.initialize();";
|
||||||
code += " } ";
|
code += "}}";
|
||||||
code += "if (!this.$self) this.$self = this;";
|
|
||||||
code += " } ";
|
|
||||||
var ctor = new Function(code)();
|
var ctor = new Function(code)();
|
||||||
if (proto) {
|
if (proto) {
|
||||||
ctor.prototype = proto;
|
ctor.prototype = proto;
|
||||||
ctor.BASE = base;
|
ctor.BASE = base;
|
||||||
}
|
}
|
||||||
|
if (base) base.SUBCLASSES.push(ctor);
|
||||||
ctor.prototype.CTOR = ctor;
|
ctor.prototype.CTOR = ctor;
|
||||||
ctor.PROPS = props || null;
|
ctor.PROPS = props || null;
|
||||||
ctor.SELF_PROPS = self_props;
|
ctor.SELF_PROPS = self_props;
|
||||||
|
ctor.SUBCLASSES = [];
|
||||||
if (type) {
|
if (type) {
|
||||||
ctor.prototype.TYPE = ctor.TYPE = type;
|
ctor.prototype.TYPE = ctor.TYPE = type;
|
||||||
}
|
}
|
||||||
if (methods) for (i in methods) if (HOP(methods, i)) {
|
if (methods) for (i in methods) if (methods.hasOwnProperty(i)) {
|
||||||
if (/^\$/.test(i)) {
|
if (/^\$/.test(i)) {
|
||||||
ctor[i.substr(1)] = methods[i];
|
ctor[i.substr(1)] = methods[i];
|
||||||
} else {
|
} else {
|
||||||
@@ -83,21 +81,21 @@ function DEFNODE(type, props, methods, base) {
|
|||||||
ctor.DEFMETHOD = function(name, method) {
|
ctor.DEFMETHOD = function(name, method) {
|
||||||
this.prototype[name] = method;
|
this.prototype[name] = method;
|
||||||
};
|
};
|
||||||
NODE_HIERARCHY[type] = {
|
|
||||||
def: ctor,
|
|
||||||
base: base
|
|
||||||
};
|
|
||||||
return ctor;
|
return ctor;
|
||||||
};
|
};
|
||||||
|
|
||||||
var AST_Token = DEFNODE("Token", "type value line col pos endpos nlb comments_before file", {
|
var AST_Token = DEFNODE("Token", "type value line col pos endpos nlb comments_before file", {
|
||||||
}, null);
|
}, null);
|
||||||
|
|
||||||
var AST_Node = DEFNODE("Node", "$self start end", {
|
var AST_Node = DEFNODE("Node", "start end", {
|
||||||
clone: function() {
|
clone: function() {
|
||||||
return new this.CTOR(this);
|
return new this.CTOR(this);
|
||||||
},
|
},
|
||||||
$documentation: "Base class of all AST nodes",
|
$documentation: "Base class of all AST nodes",
|
||||||
|
$propdoc: {
|
||||||
|
start: "[AST_Token] The first token of this node",
|
||||||
|
end: "[AST_Token] The last token of this node"
|
||||||
|
},
|
||||||
_walk: function(visitor) {
|
_walk: function(visitor) {
|
||||||
return visitor._visit(this);
|
return visitor._visit(this);
|
||||||
},
|
},
|
||||||
@@ -124,10 +122,17 @@ var AST_Debugger = DEFNODE("Debugger", null, {
|
|||||||
|
|
||||||
var AST_Directive = DEFNODE("Directive", "value scope", {
|
var AST_Directive = DEFNODE("Directive", "value scope", {
|
||||||
$documentation: "Represents a directive, like \"use strict\";",
|
$documentation: "Represents a directive, like \"use strict\";",
|
||||||
|
$propdoc: {
|
||||||
|
value: "[string] The value of this directive as a plain string (it's not an AST_String!)",
|
||||||
|
scope: "[AST_Scope/S] The scope that this directive affects"
|
||||||
|
},
|
||||||
}, AST_Statement);
|
}, AST_Statement);
|
||||||
|
|
||||||
var AST_SimpleStatement = DEFNODE("SimpleStatement", "body", {
|
var AST_SimpleStatement = DEFNODE("SimpleStatement", "body", {
|
||||||
$documentation: "A statement consisting of an expression, i.e. a = 1 + 2.",
|
$documentation: "A statement consisting of an expression, i.e. a = 1 + 2",
|
||||||
|
$propdoc: {
|
||||||
|
body: "[AST_Node] an expression node (should not be instanceof AST_Statement)"
|
||||||
|
},
|
||||||
_walk: function(visitor) {
|
_walk: function(visitor) {
|
||||||
return visitor._visit(this, function(){
|
return visitor._visit(this, function(){
|
||||||
this.body._walk(visitor);
|
this.body._walk(visitor);
|
||||||
@@ -135,17 +140,6 @@ var AST_SimpleStatement = DEFNODE("SimpleStatement", "body", {
|
|||||||
}
|
}
|
||||||
}, AST_Statement);
|
}, AST_Statement);
|
||||||
|
|
||||||
var AST_BlockStatement = DEFNODE("BlockStatement", "body", {
|
|
||||||
$documentation: "A block statement.",
|
|
||||||
_walk: function(visitor) {
|
|
||||||
return visitor._visit(this, function(){
|
|
||||||
this.body.forEach(function(stat){
|
|
||||||
stat._walk(visitor);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}, AST_Statement);
|
|
||||||
|
|
||||||
function walk_body(node, visitor) {
|
function walk_body(node, visitor) {
|
||||||
if (node.body instanceof AST_Statement) {
|
if (node.body instanceof AST_Statement) {
|
||||||
node.body._walk(visitor);
|
node.body._walk(visitor);
|
||||||
@@ -156,7 +150,10 @@ function walk_body(node, visitor) {
|
|||||||
};
|
};
|
||||||
|
|
||||||
var AST_Block = DEFNODE("Block", "body", {
|
var AST_Block = DEFNODE("Block", "body", {
|
||||||
$documentation: "A block of statements (usually always bracketed)",
|
$documentation: "A body of statements (usually bracketed)",
|
||||||
|
$propdoc: {
|
||||||
|
body: "[AST_Statement*] an array of statements"
|
||||||
|
},
|
||||||
_walk: function(visitor) {
|
_walk: function(visitor) {
|
||||||
return visitor._visit(this, function(){
|
return visitor._visit(this, function(){
|
||||||
walk_body(this, visitor);
|
walk_body(this, visitor);
|
||||||
@@ -164,15 +161,22 @@ var AST_Block = DEFNODE("Block", "body", {
|
|||||||
}
|
}
|
||||||
}, AST_Statement);
|
}, AST_Statement);
|
||||||
|
|
||||||
|
var AST_BlockStatement = DEFNODE("BlockStatement", null, {
|
||||||
|
$documentation: "A block statement",
|
||||||
|
}, AST_Block);
|
||||||
|
|
||||||
var AST_EmptyStatement = DEFNODE("EmptyStatement", null, {
|
var AST_EmptyStatement = DEFNODE("EmptyStatement", null, {
|
||||||
$documentation: "The empty statement (empty block or simply a semicolon).",
|
$documentation: "The empty statement (empty block or simply a semicolon)",
|
||||||
_walk: function(visitor) {
|
_walk: function(visitor) {
|
||||||
return visitor._visit(this);
|
return visitor._visit(this);
|
||||||
}
|
}
|
||||||
}, AST_Statement);
|
}, AST_Statement);
|
||||||
|
|
||||||
var AST_StatementWithBody = DEFNODE("StatementWithBody", "body", {
|
var AST_StatementWithBody = DEFNODE("StatementWithBody", "body", {
|
||||||
$documentation: "Base class for all statements that contain one nested body: `For`, `ForIn`, `Do`, `While`, `With`.",
|
$documentation: "Base class for all statements that contain one nested body: `For`, `ForIn`, `Do`, `While`, `With`",
|
||||||
|
$propdoc: {
|
||||||
|
body: "[AST_Statement] the body; this should always be present, even if it's an AST_EmptyStatement"
|
||||||
|
},
|
||||||
_walk: function(visitor) {
|
_walk: function(visitor) {
|
||||||
return visitor._visit(this, function(){
|
return visitor._visit(this, function(){
|
||||||
this.body._walk(visitor);
|
this.body._walk(visitor);
|
||||||
@@ -182,6 +186,9 @@ var AST_StatementWithBody = DEFNODE("StatementWithBody", "body", {
|
|||||||
|
|
||||||
var AST_LabeledStatement = DEFNODE("LabeledStatement", "label", {
|
var AST_LabeledStatement = DEFNODE("LabeledStatement", "label", {
|
||||||
$documentation: "Statement with a label",
|
$documentation: "Statement with a label",
|
||||||
|
$propdoc: {
|
||||||
|
label: "[AST_Label] a label definition"
|
||||||
|
},
|
||||||
_walk: function(visitor) {
|
_walk: function(visitor) {
|
||||||
return visitor._visit(this, function(){
|
return visitor._visit(this, function(){
|
||||||
this.label._walk(visitor);
|
this.label._walk(visitor);
|
||||||
@@ -191,7 +198,10 @@ var AST_LabeledStatement = DEFNODE("LabeledStatement", "label", {
|
|||||||
}, AST_StatementWithBody);
|
}, AST_StatementWithBody);
|
||||||
|
|
||||||
var AST_DWLoop = DEFNODE("DWLoop", "condition", {
|
var AST_DWLoop = DEFNODE("DWLoop", "condition", {
|
||||||
$documentation: "Base class for do/while statements.",
|
$documentation: "Base class for do/while statements",
|
||||||
|
$propdoc: {
|
||||||
|
condition: "[AST_Node] the loop condition. Should not be instanceof AST_Statement"
|
||||||
|
},
|
||||||
_walk: function(visitor) {
|
_walk: function(visitor) {
|
||||||
return visitor._visit(this, function(){
|
return visitor._visit(this, function(){
|
||||||
this.condition._walk(visitor);
|
this.condition._walk(visitor);
|
||||||
@@ -201,15 +211,20 @@ var AST_DWLoop = DEFNODE("DWLoop", "condition", {
|
|||||||
}, AST_StatementWithBody);
|
}, AST_StatementWithBody);
|
||||||
|
|
||||||
var AST_Do = DEFNODE("Do", null, {
|
var AST_Do = DEFNODE("Do", null, {
|
||||||
$documentation: "A `do` statement"
|
$documentation: "A `do` statement",
|
||||||
}, AST_DWLoop);
|
}, AST_DWLoop);
|
||||||
|
|
||||||
var AST_While = DEFNODE("While", null, {
|
var AST_While = DEFNODE("While", null, {
|
||||||
$documentation: "A `while` statement"
|
$documentation: "A `while` statement",
|
||||||
}, AST_DWLoop);
|
}, AST_DWLoop);
|
||||||
|
|
||||||
var AST_For = DEFNODE("For", "init condition step", {
|
var AST_For = DEFNODE("For", "init condition step", {
|
||||||
$documentation: "A `for` statement",
|
$documentation: "A `for` statement",
|
||||||
|
$propdoc: {
|
||||||
|
init: "[AST_Node?] the `for` initialization code, or null if empty",
|
||||||
|
condition: "[AST_Node?] the `for` termination clause, or null if empty",
|
||||||
|
step: "[AST_Node?] the `for` update clause, or null if empty"
|
||||||
|
},
|
||||||
_walk: function(visitor) {
|
_walk: function(visitor) {
|
||||||
return visitor._visit(this, function(){
|
return visitor._visit(this, function(){
|
||||||
if (this.init) this.init._walk(visitor);
|
if (this.init) this.init._walk(visitor);
|
||||||
@@ -222,6 +237,11 @@ var AST_For = DEFNODE("For", "init condition step", {
|
|||||||
|
|
||||||
var AST_ForIn = DEFNODE("ForIn", "init name object", {
|
var AST_ForIn = DEFNODE("ForIn", "init name object", {
|
||||||
$documentation: "A `for ... in` statement",
|
$documentation: "A `for ... in` statement",
|
||||||
|
$propdoc: {
|
||||||
|
init: "[AST_Node] the `for/in` initialization code",
|
||||||
|
name: "[AST_SymbolRef?] the loop variable, only if `init` is AST_Var",
|
||||||
|
object: "[AST_Node] the object that we're looping through"
|
||||||
|
},
|
||||||
_walk: function(visitor) {
|
_walk: function(visitor) {
|
||||||
return visitor._visit(this, function(){
|
return visitor._visit(this, function(){
|
||||||
this.init._walk(visitor);
|
this.init._walk(visitor);
|
||||||
@@ -233,6 +253,9 @@ var AST_ForIn = DEFNODE("ForIn", "init name object", {
|
|||||||
|
|
||||||
var AST_With = DEFNODE("With", "expression", {
|
var AST_With = DEFNODE("With", "expression", {
|
||||||
$documentation: "A `with` statement",
|
$documentation: "A `with` statement",
|
||||||
|
$propdoc: {
|
||||||
|
expression: "[AST_Node] the `with` expression"
|
||||||
|
},
|
||||||
_walk: function(visitor) {
|
_walk: function(visitor) {
|
||||||
return visitor._visit(this, function(){
|
return visitor._visit(this, function(){
|
||||||
this.expression._walk(visitor);
|
this.expression._walk(visitor);
|
||||||
@@ -245,14 +268,72 @@ var AST_With = DEFNODE("With", "expression", {
|
|||||||
|
|
||||||
var AST_Scope = DEFNODE("Scope", "directives variables functions uses_with uses_eval parent_scope enclosed cname", {
|
var AST_Scope = DEFNODE("Scope", "directives variables functions uses_with uses_eval parent_scope enclosed cname", {
|
||||||
$documentation: "Base class for all statements introducing a lexical scope",
|
$documentation: "Base class for all statements introducing a lexical scope",
|
||||||
|
$propdoc: {
|
||||||
|
directives: "[string*/S] an array of directives declared in this scope",
|
||||||
|
variables: "[Object/S] a map of name -> SymbolDef for all variables/functions defined in this scope",
|
||||||
|
functions: "[Object/S] like `variables`, but only lists function declarations",
|
||||||
|
uses_with: "[boolean/S] tells whether this scope uses the `with` statement",
|
||||||
|
uses_eval: "[boolean/S] tells whether this scope contains a direct call to the global `eval`",
|
||||||
|
parent_scope: "[AST_Scope?/S] link to the parent scope",
|
||||||
|
enclosed: "[SymbolDef*/S] a list of all symbol definitions that are accessed from this scope or any subscopes",
|
||||||
|
cname: "[integer/S] current index for mangling variables (used internally by the mangler)",
|
||||||
|
},
|
||||||
}, AST_Block);
|
}, AST_Block);
|
||||||
|
|
||||||
var AST_Toplevel = DEFNODE("Toplevel", "globals", {
|
var AST_Toplevel = DEFNODE("Toplevel", "globals", {
|
||||||
$documentation: "The toplevel scope"
|
$documentation: "The toplevel scope",
|
||||||
|
$propdoc: {
|
||||||
|
globals: "[Object/S] a map of name -> SymbolDef for all undeclared names",
|
||||||
|
},
|
||||||
|
wrap_commonjs: function(name, export_all) {
|
||||||
|
var self = this;
|
||||||
|
if (export_all) {
|
||||||
|
self.figure_out_scope();
|
||||||
|
var to_export = [];
|
||||||
|
self.walk(new TreeWalker(function(node){
|
||||||
|
if (node instanceof AST_SymbolDeclaration && node.definition().global) {
|
||||||
|
if (!find_if(function(n){ return n.name == node.name }, to_export))
|
||||||
|
to_export.push(node);
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
var wrapped_tl = "(function(exports, global){ global['" + name + "'] = exports; '$ORIG'; '$EXPORTS'; }({}, (function(){return this}())))";
|
||||||
|
wrapped_tl = parse(wrapped_tl);
|
||||||
|
wrapped_tl = wrapped_tl.transform(new TreeTransformer(function before(node){
|
||||||
|
if (node instanceof AST_SimpleStatement) {
|
||||||
|
node = node.body;
|
||||||
|
if (node instanceof AST_String) switch (node.getValue()) {
|
||||||
|
case "$ORIG":
|
||||||
|
return MAP.splice(self.body);
|
||||||
|
case "$EXPORTS":
|
||||||
|
var body = [];
|
||||||
|
to_export.forEach(function(sym){
|
||||||
|
body.push(new AST_SimpleStatement({
|
||||||
|
body: new AST_Assign({
|
||||||
|
left: new AST_Sub({
|
||||||
|
expression: new AST_SymbolRef({ name: "exports" }),
|
||||||
|
property: new AST_String({ value: sym.name }),
|
||||||
|
}),
|
||||||
|
operator: "=",
|
||||||
|
right: new AST_SymbolRef(sym),
|
||||||
|
}),
|
||||||
|
}));
|
||||||
|
});
|
||||||
|
return MAP.splice(body);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
return wrapped_tl;
|
||||||
|
}
|
||||||
}, AST_Scope);
|
}, AST_Scope);
|
||||||
|
|
||||||
var AST_Lambda = DEFNODE("Lambda", "name argnames uses_arguments", {
|
var AST_Lambda = DEFNODE("Lambda", "name argnames uses_arguments", {
|
||||||
$documentation: "Base class for functions",
|
$documentation: "Base class for functions",
|
||||||
|
$propdoc: {
|
||||||
|
name: "[AST_SymbolDeclaration?] the name of this function",
|
||||||
|
argnames: "[AST_SymbolFunarg*] array of function arguments",
|
||||||
|
uses_arguments: "[boolean/S] tells whether this function accesses the arguments array"
|
||||||
|
},
|
||||||
_walk: function(visitor) {
|
_walk: function(visitor) {
|
||||||
return visitor._visit(this, function(){
|
return visitor._visit(this, function(){
|
||||||
if (this.name) this.name._walk(visitor);
|
if (this.name) this.name._walk(visitor);
|
||||||
@@ -264,6 +345,10 @@ var AST_Lambda = DEFNODE("Lambda", "name argnames uses_arguments", {
|
|||||||
}
|
}
|
||||||
}, AST_Scope);
|
}, AST_Scope);
|
||||||
|
|
||||||
|
var AST_Accessor = DEFNODE("Accessor", null, {
|
||||||
|
$documentation: "A setter/getter function"
|
||||||
|
}, AST_Lambda);
|
||||||
|
|
||||||
var AST_Function = DEFNODE("Function", null, {
|
var AST_Function = DEFNODE("Function", null, {
|
||||||
$documentation: "A function expression"
|
$documentation: "A function expression"
|
||||||
}, AST_Lambda);
|
}, AST_Lambda);
|
||||||
@@ -280,6 +365,9 @@ var AST_Jump = DEFNODE("Jump", null, {
|
|||||||
|
|
||||||
var AST_Exit = DEFNODE("Exit", "value", {
|
var AST_Exit = DEFNODE("Exit", "value", {
|
||||||
$documentation: "Base class for “exits” (`return` and `throw`)",
|
$documentation: "Base class for “exits” (`return` and `throw`)",
|
||||||
|
$propdoc: {
|
||||||
|
value: "[AST_Node?] the value returned or thrown by this statement; could be null for AST_Return"
|
||||||
|
},
|
||||||
_walk: function(visitor) {
|
_walk: function(visitor) {
|
||||||
return visitor._visit(this, this.value && function(){
|
return visitor._visit(this, this.value && function(){
|
||||||
this.value._walk(visitor);
|
this.value._walk(visitor);
|
||||||
@@ -295,8 +383,11 @@ var AST_Throw = DEFNODE("Throw", null, {
|
|||||||
$documentation: "A `throw` statement"
|
$documentation: "A `throw` statement"
|
||||||
}, AST_Exit);
|
}, AST_Exit);
|
||||||
|
|
||||||
var AST_LoopControl = DEFNODE("LoopControl", "label loopcontrol_target", {
|
var AST_LoopControl = DEFNODE("LoopControl", "label", {
|
||||||
$documentation: "Base class for loop control statements (`break` and `continue`)",
|
$documentation: "Base class for loop control statements (`break` and `continue`)",
|
||||||
|
$propdoc: {
|
||||||
|
label: "[AST_LabelRef?] the label, or null if none",
|
||||||
|
},
|
||||||
_walk: function(visitor) {
|
_walk: function(visitor) {
|
||||||
return visitor._visit(this, this.label && function(){
|
return visitor._visit(this, this.label && function(){
|
||||||
this.label._walk(visitor);
|
this.label._walk(visitor);
|
||||||
@@ -316,6 +407,10 @@ var AST_Continue = DEFNODE("Continue", null, {
|
|||||||
|
|
||||||
var AST_If = DEFNODE("If", "condition alternative", {
|
var AST_If = DEFNODE("If", "condition alternative", {
|
||||||
$documentation: "A `if` statement",
|
$documentation: "A `if` statement",
|
||||||
|
$propdoc: {
|
||||||
|
condition: "[AST_Node] the `if` condition",
|
||||||
|
alternative: "[AST_Statement?] the `else` part, or null if not present"
|
||||||
|
},
|
||||||
_walk: function(visitor) {
|
_walk: function(visitor) {
|
||||||
return visitor._visit(this, function(){
|
return visitor._visit(this, function(){
|
||||||
this.condition._walk(visitor);
|
this.condition._walk(visitor);
|
||||||
@@ -327,15 +422,18 @@ var AST_If = DEFNODE("If", "condition alternative", {
|
|||||||
|
|
||||||
/* -----[ SWITCH ]----- */
|
/* -----[ SWITCH ]----- */
|
||||||
|
|
||||||
var AST_Switch = DEFNODE("Switch", "body expression", {
|
var AST_Switch = DEFNODE("Switch", "expression", {
|
||||||
$documentation: "A `switch` statement",
|
$documentation: "A `switch` statement",
|
||||||
|
$propdoc: {
|
||||||
|
expression: "[AST_Node] the `switch` “discriminant”"
|
||||||
|
},
|
||||||
_walk: function(visitor) {
|
_walk: function(visitor) {
|
||||||
return visitor._visit(this, function(){
|
return visitor._visit(this, function(){
|
||||||
this.expression._walk(visitor);
|
this.expression._walk(visitor);
|
||||||
walk_body(this, visitor);
|
walk_body(this, visitor);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}, AST_Statement);
|
}, AST_Block);
|
||||||
|
|
||||||
var AST_SwitchBranch = DEFNODE("SwitchBranch", null, {
|
var AST_SwitchBranch = DEFNODE("SwitchBranch", null, {
|
||||||
$documentation: "Base class for `switch` branches",
|
$documentation: "Base class for `switch` branches",
|
||||||
@@ -347,6 +445,9 @@ var AST_Default = DEFNODE("Default", null, {
|
|||||||
|
|
||||||
var AST_Case = DEFNODE("Case", "expression", {
|
var AST_Case = DEFNODE("Case", "expression", {
|
||||||
$documentation: "A `case` switch branch",
|
$documentation: "A `case` switch branch",
|
||||||
|
$propdoc: {
|
||||||
|
expression: "[AST_Node] the `case` expression"
|
||||||
|
},
|
||||||
_walk: function(visitor) {
|
_walk: function(visitor) {
|
||||||
return visitor._visit(this, function(){
|
return visitor._visit(this, function(){
|
||||||
this.expression._walk(visitor);
|
this.expression._walk(visitor);
|
||||||
@@ -359,6 +460,10 @@ var AST_Case = DEFNODE("Case", "expression", {
|
|||||||
|
|
||||||
var AST_Try = DEFNODE("Try", "bcatch bfinally", {
|
var AST_Try = DEFNODE("Try", "bcatch bfinally", {
|
||||||
$documentation: "A `try` statement",
|
$documentation: "A `try` statement",
|
||||||
|
$propdoc: {
|
||||||
|
bcatch: "[AST_Catch?] the catch block, or null if not present",
|
||||||
|
bfinally: "[AST_Finally?] the finally block, or null if not present"
|
||||||
|
},
|
||||||
_walk: function(visitor) {
|
_walk: function(visitor) {
|
||||||
return visitor._visit(this, function(){
|
return visitor._visit(this, function(){
|
||||||
walk_body(this, visitor);
|
walk_body(this, visitor);
|
||||||
@@ -376,6 +481,9 @@ var AST_Try = DEFNODE("Try", "bcatch bfinally", {
|
|||||||
// AST_Scope.
|
// AST_Scope.
|
||||||
var AST_Catch = DEFNODE("Catch", "argname", {
|
var AST_Catch = DEFNODE("Catch", "argname", {
|
||||||
$documentation: "A `catch` node; only makes sense as part of a `try` statement",
|
$documentation: "A `catch` node; only makes sense as part of a `try` statement",
|
||||||
|
$propdoc: {
|
||||||
|
argname: "[AST_SymbolCatch] symbol for the exception"
|
||||||
|
},
|
||||||
_walk: function(visitor) {
|
_walk: function(visitor) {
|
||||||
return visitor._visit(this, function(){
|
return visitor._visit(this, function(){
|
||||||
this.argname._walk(visitor);
|
this.argname._walk(visitor);
|
||||||
@@ -392,6 +500,9 @@ var AST_Finally = DEFNODE("Finally", null, {
|
|||||||
|
|
||||||
var AST_Definitions = DEFNODE("Definitions", "definitions", {
|
var AST_Definitions = DEFNODE("Definitions", "definitions", {
|
||||||
$documentation: "Base class for `var` or `const` nodes (variable declarations/initializations)",
|
$documentation: "Base class for `var` or `const` nodes (variable declarations/initializations)",
|
||||||
|
$propdoc: {
|
||||||
|
definitions: "[AST_VarDef*] array of variable definitions"
|
||||||
|
},
|
||||||
_walk: function(visitor) {
|
_walk: function(visitor) {
|
||||||
return visitor._visit(this, function(){
|
return visitor._visit(this, function(){
|
||||||
this.definitions.forEach(function(def){
|
this.definitions.forEach(function(def){
|
||||||
@@ -411,6 +522,10 @@ var AST_Const = DEFNODE("Const", null, {
|
|||||||
|
|
||||||
var AST_VarDef = DEFNODE("VarDef", "name value", {
|
var AST_VarDef = DEFNODE("VarDef", "name value", {
|
||||||
$documentation: "A variable declaration; only appears in a AST_Definitions node",
|
$documentation: "A variable declaration; only appears in a AST_Definitions node",
|
||||||
|
$propdoc: {
|
||||||
|
name: "[AST_SymbolVar|AST_SymbolConst] name of the variable",
|
||||||
|
value: "[AST_Node?] initializer, or null of there's no initializer"
|
||||||
|
},
|
||||||
_walk: function(visitor) {
|
_walk: function(visitor) {
|
||||||
return visitor._visit(this, function(){
|
return visitor._visit(this, function(){
|
||||||
this.name._walk(visitor);
|
this.name._walk(visitor);
|
||||||
@@ -423,6 +538,10 @@ var AST_VarDef = DEFNODE("VarDef", "name value", {
|
|||||||
|
|
||||||
var AST_Call = DEFNODE("Call", "expression args", {
|
var AST_Call = DEFNODE("Call", "expression args", {
|
||||||
$documentation: "A function call expression",
|
$documentation: "A function call expression",
|
||||||
|
$propdoc: {
|
||||||
|
expression: "[AST_Node] expression to invoke as function",
|
||||||
|
args: "[AST_Node*] array of arguments"
|
||||||
|
},
|
||||||
_walk: function(visitor) {
|
_walk: function(visitor) {
|
||||||
return visitor._visit(this, function(){
|
return visitor._visit(this, function(){
|
||||||
this.expression._walk(visitor);
|
this.expression._walk(visitor);
|
||||||
@@ -434,11 +553,15 @@ var AST_Call = DEFNODE("Call", "expression args", {
|
|||||||
});
|
});
|
||||||
|
|
||||||
var AST_New = DEFNODE("New", null, {
|
var AST_New = DEFNODE("New", null, {
|
||||||
$documentation: "An object instantiation. Derives from a function call since it has exactly the same properties."
|
$documentation: "An object instantiation. Derives from a function call since it has exactly the same properties"
|
||||||
}, AST_Call);
|
}, AST_Call);
|
||||||
|
|
||||||
var AST_Seq = DEFNODE("Seq", "car cdr", {
|
var AST_Seq = DEFNODE("Seq", "car cdr", {
|
||||||
$documentation: "A sequence expression (two comma-separated expressions)",
|
$documentation: "A sequence expression (two comma-separated expressions)",
|
||||||
|
$propdoc: {
|
||||||
|
car: "[AST_Node] first element in sequence",
|
||||||
|
cdr: "[AST_Node] second element in sequence"
|
||||||
|
},
|
||||||
$cons: function(x, y) {
|
$cons: function(x, y) {
|
||||||
var seq = new AST_Seq(x);
|
var seq = new AST_Seq(x);
|
||||||
seq.car = x;
|
seq.car = x;
|
||||||
@@ -462,6 +585,18 @@ var AST_Seq = DEFNODE("Seq", "car cdr", {
|
|||||||
}
|
}
|
||||||
return list;
|
return list;
|
||||||
},
|
},
|
||||||
|
to_array: function() {
|
||||||
|
var p = this, a = [];
|
||||||
|
while (p) {
|
||||||
|
a.push(p.car);
|
||||||
|
if (p.cdr && !(p.cdr instanceof AST_Seq)) {
|
||||||
|
a.push(p.cdr);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
p = p.cdr;
|
||||||
|
}
|
||||||
|
return a;
|
||||||
|
},
|
||||||
add: function(node) {
|
add: function(node) {
|
||||||
var p = this;
|
var p = this;
|
||||||
while (p) {
|
while (p) {
|
||||||
@@ -481,7 +616,11 @@ var AST_Seq = DEFNODE("Seq", "car cdr", {
|
|||||||
});
|
});
|
||||||
|
|
||||||
var AST_PropAccess = DEFNODE("PropAccess", "expression property", {
|
var AST_PropAccess = DEFNODE("PropAccess", "expression property", {
|
||||||
$documentation: "Base class for property access expressions, i.e. `a.foo` or `a[\"foo\"]`"
|
$documentation: "Base class for property access expressions, i.e. `a.foo` or `a[\"foo\"]`",
|
||||||
|
$propdoc: {
|
||||||
|
expression: "[AST_Node] the “container” expression",
|
||||||
|
property: "[AST_Node|string] the property to access. For AST_Dot this is always a plain string, while for AST_Sub it's an arbitrary AST_Node"
|
||||||
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
var AST_Dot = DEFNODE("Dot", null, {
|
var AST_Dot = DEFNODE("Dot", null, {
|
||||||
@@ -505,6 +644,10 @@ var AST_Sub = DEFNODE("Sub", null, {
|
|||||||
|
|
||||||
var AST_Unary = DEFNODE("Unary", "operator expression", {
|
var AST_Unary = DEFNODE("Unary", "operator expression", {
|
||||||
$documentation: "Base class for unary expressions",
|
$documentation: "Base class for unary expressions",
|
||||||
|
$propdoc: {
|
||||||
|
operator: "[string] the operator",
|
||||||
|
expression: "[AST_Node] expression that this unary operator applies to"
|
||||||
|
},
|
||||||
_walk: function(visitor) {
|
_walk: function(visitor) {
|
||||||
return visitor._visit(this, function(){
|
return visitor._visit(this, function(){
|
||||||
this.expression._walk(visitor);
|
this.expression._walk(visitor);
|
||||||
@@ -522,6 +665,11 @@ var AST_UnaryPostfix = DEFNODE("UnaryPostfix", null, {
|
|||||||
|
|
||||||
var AST_Binary = DEFNODE("Binary", "left operator right", {
|
var AST_Binary = DEFNODE("Binary", "left operator right", {
|
||||||
$documentation: "Binary expression, i.e. `a + b`",
|
$documentation: "Binary expression, i.e. `a + b`",
|
||||||
|
$propdoc: {
|
||||||
|
left: "[AST_Node] left-hand side expression",
|
||||||
|
operator: "[string] the operator",
|
||||||
|
right: "[AST_Node] right-hand side expression"
|
||||||
|
},
|
||||||
_walk: function(visitor) {
|
_walk: function(visitor) {
|
||||||
return visitor._visit(this, function(){
|
return visitor._visit(this, function(){
|
||||||
this.left._walk(visitor);
|
this.left._walk(visitor);
|
||||||
@@ -532,6 +680,11 @@ var AST_Binary = DEFNODE("Binary", "left operator right", {
|
|||||||
|
|
||||||
var AST_Conditional = DEFNODE("Conditional", "condition consequent alternative", {
|
var AST_Conditional = DEFNODE("Conditional", "condition consequent alternative", {
|
||||||
$documentation: "Conditional expression using the ternary operator, i.e. `a ? b : c`",
|
$documentation: "Conditional expression using the ternary operator, i.e. `a ? b : c`",
|
||||||
|
$propdoc: {
|
||||||
|
condition: "[AST_Node]",
|
||||||
|
consequent: "[AST_Node]",
|
||||||
|
alternative: "[AST_Node]"
|
||||||
|
},
|
||||||
_walk: function(visitor) {
|
_walk: function(visitor) {
|
||||||
return visitor._visit(this, function(){
|
return visitor._visit(this, function(){
|
||||||
this.condition._walk(visitor);
|
this.condition._walk(visitor);
|
||||||
@@ -541,7 +694,7 @@ var AST_Conditional = DEFNODE("Conditional", "condition consequent alternative",
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
var AST_Assign = DEFNODE("Assign", "left operator right", {
|
var AST_Assign = DEFNODE("Assign", null, {
|
||||||
$documentation: "An assignment expression — `a = b + 5`",
|
$documentation: "An assignment expression — `a = b + 5`",
|
||||||
}, AST_Binary);
|
}, AST_Binary);
|
||||||
|
|
||||||
@@ -549,6 +702,9 @@ var AST_Assign = DEFNODE("Assign", "left operator right", {
|
|||||||
|
|
||||||
var AST_Array = DEFNODE("Array", "elements", {
|
var AST_Array = DEFNODE("Array", "elements", {
|
||||||
$documentation: "An array literal",
|
$documentation: "An array literal",
|
||||||
|
$propdoc: {
|
||||||
|
elements: "[AST_Node*] array of elements"
|
||||||
|
},
|
||||||
_walk: function(visitor) {
|
_walk: function(visitor) {
|
||||||
return visitor._visit(this, function(){
|
return visitor._visit(this, function(){
|
||||||
this.elements.forEach(function(el){
|
this.elements.forEach(function(el){
|
||||||
@@ -560,6 +716,9 @@ var AST_Array = DEFNODE("Array", "elements", {
|
|||||||
|
|
||||||
var AST_Object = DEFNODE("Object", "properties", {
|
var AST_Object = DEFNODE("Object", "properties", {
|
||||||
$documentation: "An object literal",
|
$documentation: "An object literal",
|
||||||
|
$propdoc: {
|
||||||
|
properties: "[AST_ObjectProperty*] array of properties"
|
||||||
|
},
|
||||||
_walk: function(visitor) {
|
_walk: function(visitor) {
|
||||||
return visitor._visit(this, function(){
|
return visitor._visit(this, function(){
|
||||||
this.properties.forEach(function(prop){
|
this.properties.forEach(function(prop){
|
||||||
@@ -571,6 +730,10 @@ var AST_Object = DEFNODE("Object", "properties", {
|
|||||||
|
|
||||||
var AST_ObjectProperty = DEFNODE("ObjectProperty", "key value", {
|
var AST_ObjectProperty = DEFNODE("ObjectProperty", "key value", {
|
||||||
$documentation: "Base class for literal object properties",
|
$documentation: "Base class for literal object properties",
|
||||||
|
$propdoc: {
|
||||||
|
key: "[string] the property name; it's always a plain string in our AST, no matter if it was a string, number or identifier in original code",
|
||||||
|
value: "[AST_Node] property value. For setters and getters this is an AST_Function."
|
||||||
|
},
|
||||||
_walk: function(visitor) {
|
_walk: function(visitor) {
|
||||||
return visitor._visit(this, function(){
|
return visitor._visit(this, function(){
|
||||||
this.value._walk(visitor);
|
this.value._walk(visitor);
|
||||||
@@ -578,7 +741,7 @@ var AST_ObjectProperty = DEFNODE("ObjectProperty", "key value", {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
var AST_ObjectKeyVal = DEFNODE("ObjectKeyval", null, {
|
var AST_ObjectKeyVal = DEFNODE("ObjectKeyVal", null, {
|
||||||
$documentation: "A key: value object property",
|
$documentation: "A key: value object property",
|
||||||
}, AST_ObjectProperty);
|
}, AST_ObjectProperty);
|
||||||
|
|
||||||
@@ -591,11 +754,23 @@ var AST_ObjectGetter = DEFNODE("ObjectGetter", null, {
|
|||||||
}, AST_ObjectProperty);
|
}, AST_ObjectProperty);
|
||||||
|
|
||||||
var AST_Symbol = DEFNODE("Symbol", "scope name thedef", {
|
var AST_Symbol = DEFNODE("Symbol", "scope name thedef", {
|
||||||
|
$propdoc: {
|
||||||
|
name: "[string] name of this symbol",
|
||||||
|
scope: "[AST_Scope/S] the current scope (not necessarily the definition scope)",
|
||||||
|
thedef: "[SymbolDef/S] the definition of this symbol"
|
||||||
|
},
|
||||||
$documentation: "Base class for all symbols",
|
$documentation: "Base class for all symbols",
|
||||||
});
|
});
|
||||||
|
|
||||||
|
var AST_SymbolAccessor = DEFNODE("SymbolAccessor", null, {
|
||||||
|
$documentation: "The name of a property accessor (setter/getter function)"
|
||||||
|
}, AST_Symbol);
|
||||||
|
|
||||||
var AST_SymbolDeclaration = DEFNODE("SymbolDeclaration", "init", {
|
var AST_SymbolDeclaration = DEFNODE("SymbolDeclaration", "init", {
|
||||||
$documentation: "A declaration symbol (symbol in var/const, function name or argument, symbol in catch)",
|
$documentation: "A declaration symbol (symbol in var/const, function name or argument, symbol in catch)",
|
||||||
|
$propdoc: {
|
||||||
|
init: "[AST_Node*/S] array of initializers for this declaration."
|
||||||
|
}
|
||||||
}, AST_Symbol);
|
}, AST_Symbol);
|
||||||
|
|
||||||
var AST_SymbolVar = DEFNODE("SymbolVar", null, {
|
var AST_SymbolVar = DEFNODE("SymbolVar", null, {
|
||||||
@@ -622,9 +797,12 @@ var AST_SymbolCatch = DEFNODE("SymbolCatch", null, {
|
|||||||
$documentation: "Symbol naming the exception in catch",
|
$documentation: "Symbol naming the exception in catch",
|
||||||
}, AST_SymbolDeclaration);
|
}, AST_SymbolDeclaration);
|
||||||
|
|
||||||
var AST_Label = DEFNODE("Label", "references label_target", {
|
var AST_Label = DEFNODE("Label", "references", {
|
||||||
$documentation: "Symbol naming a label (declaration)",
|
$documentation: "Symbol naming a label (declaration)",
|
||||||
}, AST_SymbolDeclaration);
|
$propdoc: {
|
||||||
|
references: "[AST_LabelRef*] a list of nodes referring to this label"
|
||||||
|
}
|
||||||
|
}, AST_Symbol);
|
||||||
|
|
||||||
var AST_SymbolRef = DEFNODE("SymbolRef", null, {
|
var AST_SymbolRef = DEFNODE("SymbolRef", null, {
|
||||||
$documentation: "Reference to some symbol (not definition/declaration)",
|
$documentation: "Reference to some symbol (not definition/declaration)",
|
||||||
@@ -632,7 +810,7 @@ var AST_SymbolRef = DEFNODE("SymbolRef", null, {
|
|||||||
|
|
||||||
var AST_LabelRef = DEFNODE("LabelRef", null, {
|
var AST_LabelRef = DEFNODE("LabelRef", null, {
|
||||||
$documentation: "Reference to a label symbol",
|
$documentation: "Reference to a label symbol",
|
||||||
}, AST_SymbolRef);
|
}, AST_Symbol);
|
||||||
|
|
||||||
var AST_This = DEFNODE("This", null, {
|
var AST_This = DEFNODE("This", null, {
|
||||||
$documentation: "The `this` symbol",
|
$documentation: "The `this` symbol",
|
||||||
@@ -647,16 +825,22 @@ var AST_Constant = DEFNODE("Constant", null, {
|
|||||||
|
|
||||||
var AST_String = DEFNODE("String", "value", {
|
var AST_String = DEFNODE("String", "value", {
|
||||||
$documentation: "A string literal",
|
$documentation: "A string literal",
|
||||||
|
$propdoc: {
|
||||||
|
value: "[string] the contents of this string"
|
||||||
|
}
|
||||||
}, AST_Constant);
|
}, AST_Constant);
|
||||||
|
|
||||||
var AST_Number = DEFNODE("Number", "value", {
|
var AST_Number = DEFNODE("Number", "value", {
|
||||||
$documentation: "A number literal",
|
$documentation: "A number literal",
|
||||||
|
$propdoc: {
|
||||||
|
value: "[number] the numeric value"
|
||||||
|
}
|
||||||
}, AST_Constant);
|
}, AST_Constant);
|
||||||
|
|
||||||
var AST_RegExp = DEFNODE("Regexp", "pattern mods", {
|
var AST_RegExp = DEFNODE("RegExp", "value", {
|
||||||
$documentation: "A regexp literal",
|
$documentation: "A regexp literal",
|
||||||
initialize: function() {
|
$propdoc: {
|
||||||
this.value = new RegExp(this.pattern, this.mods);
|
value: "[RegExp] the actual regexp"
|
||||||
}
|
}
|
||||||
}, AST_Constant);
|
}, AST_Constant);
|
||||||
|
|
||||||
@@ -679,6 +863,11 @@ var AST_Undefined = DEFNODE("Undefined", null, {
|
|||||||
value: (function(){}())
|
value: (function(){}())
|
||||||
}, AST_Atom);
|
}, AST_Atom);
|
||||||
|
|
||||||
|
var AST_Infinity = DEFNODE("Infinity", null, {
|
||||||
|
$documentation: "The `Infinity` value",
|
||||||
|
value: 1/0
|
||||||
|
}, AST_Atom);
|
||||||
|
|
||||||
var AST_Boolean = DEFNODE("Boolean", null, {
|
var AST_Boolean = DEFNODE("Boolean", null, {
|
||||||
$documentation: "Base class for booleans",
|
$documentation: "Base class for booleans",
|
||||||
}, AST_Atom);
|
}, AST_Atom);
|
||||||
@@ -748,4 +937,23 @@ TreeWalker.prototype = {
|
|||||||
self = p;
|
self = p;
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
loopcontrol_target: function(label) {
|
||||||
|
var stack = this.stack;
|
||||||
|
if (label) {
|
||||||
|
for (var i = stack.length; --i >= 0;) {
|
||||||
|
var x = stack[i];
|
||||||
|
if (x instanceof AST_LabeledStatement && x.label.name == label.name) {
|
||||||
|
return x.body;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
for (var i = stack.length; --i >= 0;) {
|
||||||
|
var x = stack[i];
|
||||||
|
if (x instanceof AST_Switch
|
||||||
|
|| x instanceof AST_For
|
||||||
|
|| x instanceof AST_ForIn
|
||||||
|
|| x instanceof AST_DWLoop) return x;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|||||||
396
lib/compress.js
396
lib/compress.js
@@ -53,6 +53,7 @@ function Compressor(options, false_by_default) {
|
|||||||
dead_code : !false_by_default,
|
dead_code : !false_by_default,
|
||||||
drop_debugger : !false_by_default,
|
drop_debugger : !false_by_default,
|
||||||
unsafe : !false_by_default,
|
unsafe : !false_by_default,
|
||||||
|
unsafe_comps : false,
|
||||||
conditionals : !false_by_default,
|
conditionals : !false_by_default,
|
||||||
comparisons : !false_by_default,
|
comparisons : !false_by_default,
|
||||||
evaluate : !false_by_default,
|
evaluate : !false_by_default,
|
||||||
@@ -80,7 +81,6 @@ merge(Compressor.prototype, {
|
|||||||
},
|
},
|
||||||
before: function(node, descend, in_list) {
|
before: function(node, descend, in_list) {
|
||||||
if (node._squeezed) return node;
|
if (node._squeezed) return node;
|
||||||
node = node.clone();
|
|
||||||
if (node instanceof AST_Scope) {
|
if (node instanceof AST_Scope) {
|
||||||
node.drop_unused(this);
|
node.drop_unused(this);
|
||||||
node = node.hoist_declarations(this);
|
node = node.hoist_declarations(this);
|
||||||
@@ -103,7 +103,7 @@ merge(Compressor.prototype, {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
(function(undefined){
|
(function(){
|
||||||
|
|
||||||
function OPT(node, optimizer) {
|
function OPT(node, optimizer) {
|
||||||
node.DEFMETHOD("optimize", function(compressor){
|
node.DEFMETHOD("optimize", function(compressor){
|
||||||
@@ -135,6 +135,17 @@ merge(Compressor.prototype, {
|
|||||||
};
|
};
|
||||||
|
|
||||||
function make_node_from_constant(compressor, val, orig) {
|
function make_node_from_constant(compressor, val, orig) {
|
||||||
|
// XXX: WIP.
|
||||||
|
// if (val instanceof AST_Node) return val.transform(new TreeTransformer(null, function(node){
|
||||||
|
// if (node instanceof AST_SymbolRef) {
|
||||||
|
// var scope = compressor.find_parent(AST_Scope);
|
||||||
|
// var def = scope.find_variable(node);
|
||||||
|
// node.thedef = def;
|
||||||
|
// return node;
|
||||||
|
// }
|
||||||
|
// })).transform(compressor);
|
||||||
|
|
||||||
|
if (val instanceof AST_Node) return val.transform(compressor);
|
||||||
switch (typeof val) {
|
switch (typeof val) {
|
||||||
case "string":
|
case "string":
|
||||||
return make_node(AST_String, orig, {
|
return make_node(AST_String, orig, {
|
||||||
@@ -152,6 +163,9 @@ merge(Compressor.prototype, {
|
|||||||
if (val === null) {
|
if (val === null) {
|
||||||
return make_node(AST_Null, orig).optimize(compressor);
|
return make_node(AST_Null, orig).optimize(compressor);
|
||||||
}
|
}
|
||||||
|
if (val instanceof RegExp) {
|
||||||
|
return make_node(AST_RegExp, orig).optimize(compressor);
|
||||||
|
}
|
||||||
throw new Error(string_template("Can't handle constant of type: {type}", {
|
throw new Error(string_template("Can't handle constant of type: {type}", {
|
||||||
type: typeof val
|
type: typeof val
|
||||||
}));
|
}));
|
||||||
@@ -173,6 +187,14 @@ merge(Compressor.prototype, {
|
|||||||
return false;
|
return false;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
function loop_body(x) {
|
||||||
|
if (x instanceof AST_Switch) return x;
|
||||||
|
if (x instanceof AST_For || x instanceof AST_ForIn || x instanceof AST_DWLoop) {
|
||||||
|
return (x.body instanceof AST_BlockStatement ? x.body : x);
|
||||||
|
}
|
||||||
|
return x;
|
||||||
|
};
|
||||||
|
|
||||||
function tighten_body(statements, compressor) {
|
function tighten_body(statements, compressor) {
|
||||||
var CHANGED;
|
var CHANGED;
|
||||||
do {
|
do {
|
||||||
@@ -194,12 +216,20 @@ merge(Compressor.prototype, {
|
|||||||
return statements;
|
return statements;
|
||||||
|
|
||||||
function eliminate_spurious_blocks(statements) {
|
function eliminate_spurious_blocks(statements) {
|
||||||
|
var seen_dirs = [];
|
||||||
return statements.reduce(function(a, stat){
|
return statements.reduce(function(a, stat){
|
||||||
if (stat instanceof AST_BlockStatement) {
|
if (stat instanceof AST_BlockStatement) {
|
||||||
CHANGED = true;
|
CHANGED = true;
|
||||||
a.push.apply(a, eliminate_spurious_blocks(stat.body));
|
a.push.apply(a, eliminate_spurious_blocks(stat.body));
|
||||||
} else if (stat instanceof AST_EmptyStatement) {
|
} else if (stat instanceof AST_EmptyStatement) {
|
||||||
CHANGED = true;
|
CHANGED = true;
|
||||||
|
} else if (stat instanceof AST_Directive) {
|
||||||
|
if (seen_dirs.indexOf(stat.value) < 0) {
|
||||||
|
a.push(stat);
|
||||||
|
seen_dirs.push(stat.value);
|
||||||
|
} else {
|
||||||
|
CHANGED = true;
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
a.push(stat);
|
a.push(stat);
|
||||||
}
|
}
|
||||||
@@ -282,8 +312,13 @@ merge(Compressor.prototype, {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var ab = aborts(stat.body);
|
var ab = aborts(stat.body);
|
||||||
|
var lct = ab instanceof AST_LoopControl ? compressor.loopcontrol_target(ab.label) : null;
|
||||||
if (ab && ((ab instanceof AST_Return && !ab.value && in_lambda)
|
if (ab && ((ab instanceof AST_Return && !ab.value && in_lambda)
|
||||||
|| (ab instanceof AST_Continue && self === ab.target()))) {
|
|| (ab instanceof AST_Continue && self === loop_body(lct))
|
||||||
|
|| (ab instanceof AST_Break && lct instanceof AST_BlockStatement && self === lct))) {
|
||||||
|
if (ab.label) {
|
||||||
|
remove(ab.label.thedef.references, ab.label);
|
||||||
|
}
|
||||||
CHANGED = true;
|
CHANGED = true;
|
||||||
var body = as_statement_array(stat.body).slice(0, -1);
|
var body = as_statement_array(stat.body).slice(0, -1);
|
||||||
stat = stat.clone();
|
stat = stat.clone();
|
||||||
@@ -299,8 +334,13 @@ merge(Compressor.prototype, {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var ab = aborts(stat.alternative);
|
var ab = aborts(stat.alternative);
|
||||||
|
var lct = ab instanceof AST_LoopControl ? compressor.loopcontrol_target(ab.label) : null;
|
||||||
if (ab && ((ab instanceof AST_Return && !ab.value && in_lambda)
|
if (ab && ((ab instanceof AST_Return && !ab.value && in_lambda)
|
||||||
|| (ab instanceof AST_Continue && self === ab.target()))) {
|
|| (ab instanceof AST_Continue && self === loop_body(lct))
|
||||||
|
|| (ab instanceof AST_Break && lct instanceof AST_BlockStatement && self === lct))) {
|
||||||
|
if (ab.label) {
|
||||||
|
remove(ab.label.thedef.references, ab.label);
|
||||||
|
}
|
||||||
CHANGED = true;
|
CHANGED = true;
|
||||||
stat = stat.clone();
|
stat = stat.clone();
|
||||||
stat.body = make_node(AST_BlockStatement, stat.body, {
|
stat.body = make_node(AST_BlockStatement, stat.body, {
|
||||||
@@ -326,14 +366,27 @@ merge(Compressor.prototype, {
|
|||||||
function eliminate_dead_code(statements, compressor) {
|
function eliminate_dead_code(statements, compressor) {
|
||||||
var has_quit = false;
|
var has_quit = false;
|
||||||
var orig = statements.length;
|
var orig = statements.length;
|
||||||
|
var self = compressor.self();
|
||||||
statements = statements.reduce(function(a, stat){
|
statements = statements.reduce(function(a, stat){
|
||||||
if (has_quit) {
|
if (has_quit) {
|
||||||
extract_declarations_from_unreachable_code(compressor, stat, a);
|
extract_declarations_from_unreachable_code(compressor, stat, a);
|
||||||
} else {
|
} else {
|
||||||
a.push(stat);
|
if (stat instanceof AST_LoopControl) {
|
||||||
if (stat instanceof AST_Jump) {
|
var lct = compressor.loopcontrol_target(stat.label);
|
||||||
has_quit = true;
|
if ((stat instanceof AST_Break
|
||||||
|
&& lct instanceof AST_BlockStatement
|
||||||
|
&& loop_body(lct) === self) || (stat instanceof AST_Continue
|
||||||
|
&& loop_body(lct) === self)) {
|
||||||
|
if (stat.label) {
|
||||||
|
remove(stat.label.thedef.references, stat.label);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
a.push(stat);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
a.push(stat);
|
||||||
}
|
}
|
||||||
|
if (aborts(stat)) has_quit = true;
|
||||||
}
|
}
|
||||||
return a;
|
return a;
|
||||||
}, []);
|
}, []);
|
||||||
@@ -375,12 +428,23 @@ merge(Compressor.prototype, {
|
|||||||
var ret = [], prev = null;
|
var ret = [], prev = null;
|
||||||
statements.forEach(function(stat){
|
statements.forEach(function(stat){
|
||||||
if (prev) {
|
if (prev) {
|
||||||
if (stat instanceof AST_For && stat.init && !(stat.init instanceof AST_Definitions)) {
|
if (stat instanceof AST_For) {
|
||||||
stat.init = cons_seq(stat.init);
|
var opera = {};
|
||||||
}
|
try {
|
||||||
else if (stat instanceof AST_For && !stat.init) {
|
prev.body.walk(new TreeWalker(function(node){
|
||||||
stat.init = prev.body;
|
if (node instanceof AST_Binary && node.operator == "in")
|
||||||
ret.pop();
|
throw opera;
|
||||||
|
}));
|
||||||
|
if (stat.init && !(stat.init instanceof AST_Definitions)) {
|
||||||
|
stat.init = cons_seq(stat.init);
|
||||||
|
}
|
||||||
|
else if (!stat.init) {
|
||||||
|
stat.init = prev.body;
|
||||||
|
ret.pop();
|
||||||
|
}
|
||||||
|
} catch(ex) {
|
||||||
|
if (ex !== opera) throw ex;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
else if (stat instanceof AST_If) {
|
else if (stat instanceof AST_If) {
|
||||||
stat.condition = cons_seq(stat.condition);
|
stat.condition = cons_seq(stat.condition);
|
||||||
@@ -439,7 +503,6 @@ merge(Compressor.prototype, {
|
|||||||
stat.walk(new TreeWalker(function(node){
|
stat.walk(new TreeWalker(function(node){
|
||||||
if (node instanceof AST_Definitions) {
|
if (node instanceof AST_Definitions) {
|
||||||
compressor.warn("Declarations in unreachable code! [{file}:{line},{col}]", node.start);
|
compressor.warn("Declarations in unreachable code! [{file}:{line},{col}]", node.start);
|
||||||
node = node.clone();
|
|
||||||
node.remove_initializers();
|
node.remove_initializers();
|
||||||
target.push(node);
|
target.push(node);
|
||||||
return true;
|
return true;
|
||||||
@@ -528,7 +591,14 @@ merge(Compressor.prototype, {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
def(AST_Statement, function(){
|
def(AST_Statement, function(){
|
||||||
throw new Error("Cannot evaluate a statement");
|
throw new Error(string_template("Cannot evaluate a statement [{file}:{line},{col}]", this.start));
|
||||||
|
});
|
||||||
|
def(AST_Function, function(){
|
||||||
|
// XXX: AST_Function inherits from AST_Scope, which itself
|
||||||
|
// inherits from AST_Statement; however, an AST_Function
|
||||||
|
// isn't really a statement. This could byte in other
|
||||||
|
// places too. :-( Wish JS had multiple inheritance.
|
||||||
|
return [ this ];
|
||||||
});
|
});
|
||||||
function ev(node) {
|
function ev(node) {
|
||||||
return node._eval();
|
return node._eval();
|
||||||
@@ -634,7 +704,7 @@ merge(Compressor.prototype, {
|
|||||||
});
|
});
|
||||||
def(AST_Binary, function(compressor){
|
def(AST_Binary, function(compressor){
|
||||||
var self = this.clone(), op = this.operator;
|
var self = this.clone(), op = this.operator;
|
||||||
if (compressor.option("comparisons") && compressor.option("unsafe")) {
|
if (compressor.option("unsafe_comps")) {
|
||||||
switch (op) {
|
switch (op) {
|
||||||
case "<=" : self.operator = ">" ; return self;
|
case "<=" : self.operator = ">" ; return self;
|
||||||
case "<" : self.operator = ">=" ; return self;
|
case "<" : self.operator = ">=" ; return self;
|
||||||
@@ -673,8 +743,8 @@ merge(Compressor.prototype, {
|
|||||||
def(AST_EmptyStatement, function(){ return false });
|
def(AST_EmptyStatement, function(){ return false });
|
||||||
def(AST_Constant, function(){ return false });
|
def(AST_Constant, function(){ return false });
|
||||||
def(AST_This, function(){ return false });
|
def(AST_This, function(){ return false });
|
||||||
def(AST_Function, function(){ return false });
|
|
||||||
def(AST_BlockStatement, function(){
|
def(AST_Block, function(){
|
||||||
for (var i = this.body.length; --i >= 0;) {
|
for (var i = this.body.length; --i >= 0;) {
|
||||||
if (this.body[i].has_side_effects())
|
if (this.body[i].has_side_effects())
|
||||||
return true;
|
return true;
|
||||||
@@ -685,6 +755,8 @@ merge(Compressor.prototype, {
|
|||||||
def(AST_SimpleStatement, function(){
|
def(AST_SimpleStatement, function(){
|
||||||
return this.body.has_side_effects();
|
return this.body.has_side_effects();
|
||||||
});
|
});
|
||||||
|
def(AST_Defun, function(){ return true });
|
||||||
|
def(AST_Function, function(){ return false });
|
||||||
def(AST_Binary, function(){
|
def(AST_Binary, function(){
|
||||||
return this.left.has_side_effects()
|
return this.left.has_side_effects()
|
||||||
|| this.right.has_side_effects();
|
|| this.right.has_side_effects();
|
||||||
@@ -746,6 +818,9 @@ merge(Compressor.prototype, {
|
|||||||
var n = this.body.length;
|
var n = this.body.length;
|
||||||
return n > 0 && aborts(this.body[n - 1]);
|
return n > 0 && aborts(this.body[n - 1]);
|
||||||
});
|
});
|
||||||
|
def(AST_If, function(){
|
||||||
|
return this.alternative && aborts(this.body) && aborts(this.alternative);
|
||||||
|
});
|
||||||
})(function(node, func){
|
})(function(node, func){
|
||||||
node.DEFMETHOD("aborts", func);
|
node.DEFMETHOD("aborts", func);
|
||||||
});
|
});
|
||||||
@@ -766,9 +841,18 @@ merge(Compressor.prototype, {
|
|||||||
});
|
});
|
||||||
|
|
||||||
OPT(AST_LabeledStatement, function(self, compressor){
|
OPT(AST_LabeledStatement, function(self, compressor){
|
||||||
|
if (self.body instanceof AST_Break
|
||||||
|
&& compressor.loopcontrol_target(self.body.label) === self.body) {
|
||||||
|
return make_node(AST_EmptyStatement, self);
|
||||||
|
}
|
||||||
return self.label.references.length == 0 ? self.body : self;
|
return self.label.references.length == 0 ? self.body : self;
|
||||||
});
|
});
|
||||||
|
|
||||||
|
OPT(AST_Block, function(self, compressor){
|
||||||
|
self.body = tighten_body(self.body, compressor);
|
||||||
|
return self;
|
||||||
|
});
|
||||||
|
|
||||||
OPT(AST_BlockStatement, function(self, compressor){
|
OPT(AST_BlockStatement, function(self, compressor){
|
||||||
self.body = tighten_body(self.body, compressor);
|
self.body = tighten_body(self.body, compressor);
|
||||||
switch (self.body.length) {
|
switch (self.body.length) {
|
||||||
@@ -778,16 +862,6 @@ merge(Compressor.prototype, {
|
|||||||
return self;
|
return self;
|
||||||
});
|
});
|
||||||
|
|
||||||
OPT(AST_Block, function(self, compressor){
|
|
||||||
self.body = tighten_body(self.body, compressor);
|
|
||||||
return self;
|
|
||||||
});
|
|
||||||
|
|
||||||
OPT(AST_Scope, function(self, compressor){
|
|
||||||
self.body = tighten_body(self.body, compressor);
|
|
||||||
return self;
|
|
||||||
});
|
|
||||||
|
|
||||||
AST_Scope.DEFMETHOD("drop_unused", function(compressor){
|
AST_Scope.DEFMETHOD("drop_unused", function(compressor){
|
||||||
var self = this;
|
var self = this;
|
||||||
if (compressor.option("unused")
|
if (compressor.option("unused")
|
||||||
@@ -811,7 +885,7 @@ merge(Compressor.prototype, {
|
|||||||
});
|
});
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
if (node instanceof AST_SymbolRef && !(node instanceof AST_LabelRef)) {
|
if (node instanceof AST_SymbolRef) {
|
||||||
push_uniq(in_use, node.definition());
|
push_uniq(in_use, node.definition());
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
@@ -834,8 +908,7 @@ merge(Compressor.prototype, {
|
|||||||
if (decl instanceof AST_SymbolDeclaration) {
|
if (decl instanceof AST_SymbolDeclaration) {
|
||||||
decl.init.forEach(function(init){
|
decl.init.forEach(function(init){
|
||||||
var tw = new TreeWalker(function(node){
|
var tw = new TreeWalker(function(node){
|
||||||
if (node instanceof AST_SymbolRef
|
if (node instanceof AST_SymbolRef) {
|
||||||
&& node.definition().scope.$self === self.$self) {
|
|
||||||
push_uniq(in_use, node.definition());
|
push_uniq(in_use, node.definition());
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@@ -846,7 +919,22 @@ merge(Compressor.prototype, {
|
|||||||
}
|
}
|
||||||
// pass 3: we should drop declarations not in_use
|
// pass 3: we should drop declarations not in_use
|
||||||
var tt = new TreeTransformer(
|
var tt = new TreeTransformer(
|
||||||
function before(node, descend) {
|
function before(node, descend, in_list) {
|
||||||
|
if (node instanceof AST_Lambda) {
|
||||||
|
for (var a = node.argnames, i = a.length; --i >= 0;) {
|
||||||
|
var sym = a[i];
|
||||||
|
if (sym.unreferenced()) {
|
||||||
|
a.pop();
|
||||||
|
compressor.warn("Dropping unused function argument {name} [{file}:{line},{col}]", {
|
||||||
|
name : sym.name,
|
||||||
|
file : sym.start.file,
|
||||||
|
line : sym.start.line,
|
||||||
|
col : sym.start.col
|
||||||
|
});
|
||||||
|
}
|
||||||
|
else break;
|
||||||
|
}
|
||||||
|
}
|
||||||
if (node instanceof AST_Defun && node !== self) {
|
if (node instanceof AST_Defun && node !== self) {
|
||||||
if (!member(node.name.definition(), in_use)) {
|
if (!member(node.name.definition(), in_use)) {
|
||||||
compressor.warn("Dropping unused function {name} [{file}:{line},{col}]", {
|
compressor.warn("Dropping unused function {name} [{file}:{line},{col}]", {
|
||||||
@@ -922,6 +1010,19 @@ merge(Compressor.prototype, {
|
|||||||
}
|
}
|
||||||
return node;
|
return node;
|
||||||
}
|
}
|
||||||
|
if (node instanceof AST_For && node.init instanceof AST_BlockStatement) {
|
||||||
|
descend(node, this);
|
||||||
|
// certain combination of unused name + side effect leads to:
|
||||||
|
// https://github.com/mishoo/UglifyJS2/issues/44
|
||||||
|
// that's an invalid AST.
|
||||||
|
// We fix it at this stage by moving the `var` outside the `for`.
|
||||||
|
var body = node.init.body.slice(0, -1);
|
||||||
|
node.init = node.init.body.slice(-1)[0].body;
|
||||||
|
body.push(node);
|
||||||
|
return in_list ? MAP.splice(body) : make_node(AST_BlockStatement, node, {
|
||||||
|
body: body
|
||||||
|
});
|
||||||
|
}
|
||||||
if (node instanceof AST_Scope && node !== self)
|
if (node instanceof AST_Scope && node !== self)
|
||||||
return node;
|
return node;
|
||||||
}
|
}
|
||||||
@@ -937,7 +1038,7 @@ merge(Compressor.prototype, {
|
|||||||
if (hoist_funs || hoist_vars) {
|
if (hoist_funs || hoist_vars) {
|
||||||
var dirs = [];
|
var dirs = [];
|
||||||
var hoisted = [];
|
var hoisted = [];
|
||||||
var vars = {}, vars_found = 0, var_decl = 0;
|
var vars = new Dictionary(), vars_found = 0, var_decl = 0;
|
||||||
// let's count var_decl first, we seem to waste a lot of
|
// let's count var_decl first, we seem to waste a lot of
|
||||||
// space if we hoist `var` when there's only one.
|
// space if we hoist `var` when there's only one.
|
||||||
self.walk(new TreeWalker(function(node){
|
self.walk(new TreeWalker(function(node){
|
||||||
@@ -962,7 +1063,7 @@ merge(Compressor.prototype, {
|
|||||||
}
|
}
|
||||||
if (node instanceof AST_Var && hoist_vars) {
|
if (node instanceof AST_Var && hoist_vars) {
|
||||||
node.definitions.forEach(function(def){
|
node.definitions.forEach(function(def){
|
||||||
vars[def.name.name] = def;
|
vars.set(def.name.name, def);
|
||||||
++vars_found;
|
++vars_found;
|
||||||
});
|
});
|
||||||
var seq = node.to_assignments();
|
var seq = node.to_assignments();
|
||||||
@@ -986,8 +1087,8 @@ merge(Compressor.prototype, {
|
|||||||
);
|
);
|
||||||
self = self.transform(tt);
|
self = self.transform(tt);
|
||||||
if (vars_found > 0) hoisted.unshift(make_node(AST_Var, self, {
|
if (vars_found > 0) hoisted.unshift(make_node(AST_Var, self, {
|
||||||
definitions: Object.keys(vars).map(function(name){
|
definitions: vars.map(function(def){
|
||||||
var def = vars[name].clone();
|
def = def.clone();
|
||||||
def.value = null;
|
def.value = null;
|
||||||
return def;
|
return def;
|
||||||
})
|
})
|
||||||
@@ -1029,6 +1130,61 @@ merge(Compressor.prototype, {
|
|||||||
return self;
|
return self;
|
||||||
});
|
});
|
||||||
|
|
||||||
|
function if_break_in_loop(self, compressor) {
|
||||||
|
function drop_it(rest) {
|
||||||
|
rest = as_statement_array(rest);
|
||||||
|
if (self.body instanceof AST_BlockStatement) {
|
||||||
|
self.body = self.body.clone();
|
||||||
|
self.body.body = rest.concat(self.body.body.slice(1));
|
||||||
|
self.body = self.body.transform(compressor);
|
||||||
|
} else {
|
||||||
|
self.body = make_node(AST_BlockStatement, self.body, {
|
||||||
|
body: rest
|
||||||
|
}).transform(compressor);
|
||||||
|
}
|
||||||
|
if_break_in_loop(self, compressor);
|
||||||
|
}
|
||||||
|
var first = self.body instanceof AST_BlockStatement ? self.body.body[0] : self.body;
|
||||||
|
if (first instanceof AST_If) {
|
||||||
|
if (first.body instanceof AST_Break
|
||||||
|
&& compressor.loopcontrol_target(first.body.label) === self) {
|
||||||
|
if (self.condition) {
|
||||||
|
self.condition = make_node(AST_Binary, self.condition, {
|
||||||
|
left: self.condition,
|
||||||
|
operator: "&&",
|
||||||
|
right: first.condition.negate(compressor),
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
self.condition = first.condition.negate(compressor);
|
||||||
|
}
|
||||||
|
drop_it(first.alternative);
|
||||||
|
}
|
||||||
|
else if (first.alternative instanceof AST_Break
|
||||||
|
&& compressor.loopcontrol_target(first.alternative.label) === self) {
|
||||||
|
if (self.condition) {
|
||||||
|
self.condition = make_node(AST_Binary, self.condition, {
|
||||||
|
left: self.condition,
|
||||||
|
operator: "&&",
|
||||||
|
right: first.condition,
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
self.condition = first.condition;
|
||||||
|
}
|
||||||
|
drop_it(first.body);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
OPT(AST_While, function(self, compressor) {
|
||||||
|
if (!compressor.option("loops")) return self;
|
||||||
|
self = AST_DWLoop.prototype.optimize.call(self, compressor);
|
||||||
|
if (self instanceof AST_While) {
|
||||||
|
if_break_in_loop(self, compressor);
|
||||||
|
self = make_node(AST_For, self, self).transform(compressor);
|
||||||
|
}
|
||||||
|
return self;
|
||||||
|
});
|
||||||
|
|
||||||
OPT(AST_For, function(self, compressor){
|
OPT(AST_For, function(self, compressor){
|
||||||
var cond = self.condition;
|
var cond = self.condition;
|
||||||
if (cond) {
|
if (cond) {
|
||||||
@@ -1053,6 +1209,7 @@ merge(Compressor.prototype, {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if_break_in_loop(self, compressor);
|
||||||
return self;
|
return self;
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -1073,7 +1230,7 @@ merge(Compressor.prototype, {
|
|||||||
extract_declarations_from_unreachable_code(compressor, self.alternative, a);
|
extract_declarations_from_unreachable_code(compressor, self.alternative, a);
|
||||||
}
|
}
|
||||||
a.push(self.body);
|
a.push(self.body);
|
||||||
return make_node(AST_BlockStatement, self, { body: a });
|
return make_node(AST_BlockStatement, self, { body: a }).transform(compressor);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
compressor.warn("Condition always false [{file}:{line},{col}]", self.condition.start);
|
compressor.warn("Condition always false [{file}:{line},{col}]", self.condition.start);
|
||||||
@@ -1081,7 +1238,7 @@ merge(Compressor.prototype, {
|
|||||||
var a = [];
|
var a = [];
|
||||||
extract_declarations_from_unreachable_code(compressor, self.body, a);
|
extract_declarations_from_unreachable_code(compressor, self.body, a);
|
||||||
if (self.alternative) a.push(self.alternative);
|
if (self.alternative) a.push(self.alternative);
|
||||||
return make_node(AST_BlockStatement, self, { body: a });
|
return make_node(AST_BlockStatement, self, { body: a }).transform(compressor);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1098,7 +1255,7 @@ merge(Compressor.prototype, {
|
|||||||
if (is_empty(self.body) && is_empty(self.alternative)) {
|
if (is_empty(self.body) && is_empty(self.alternative)) {
|
||||||
return make_node(AST_SimpleStatement, self.condition, {
|
return make_node(AST_SimpleStatement, self.condition, {
|
||||||
body: self.condition
|
body: self.condition
|
||||||
});
|
}).transform(compressor);
|
||||||
}
|
}
|
||||||
if (self.body instanceof AST_SimpleStatement
|
if (self.body instanceof AST_SimpleStatement
|
||||||
&& self.alternative instanceof AST_SimpleStatement) {
|
&& self.alternative instanceof AST_SimpleStatement) {
|
||||||
@@ -1108,7 +1265,7 @@ merge(Compressor.prototype, {
|
|||||||
consequent : self.body.body,
|
consequent : self.body.body,
|
||||||
alternative : self.alternative.body
|
alternative : self.alternative.body
|
||||||
})
|
})
|
||||||
});
|
}).transform(compressor);
|
||||||
}
|
}
|
||||||
if (is_empty(self.alternative) && self.body instanceof AST_SimpleStatement) {
|
if (is_empty(self.alternative) && self.body instanceof AST_SimpleStatement) {
|
||||||
if (negated_is_best) return make_node(AST_SimpleStatement, self, {
|
if (negated_is_best) return make_node(AST_SimpleStatement, self, {
|
||||||
@@ -1117,14 +1274,14 @@ merge(Compressor.prototype, {
|
|||||||
left : negated,
|
left : negated,
|
||||||
right : self.body.body
|
right : self.body.body
|
||||||
})
|
})
|
||||||
});
|
}).transform(compressor);
|
||||||
return make_node(AST_SimpleStatement, self, {
|
return make_node(AST_SimpleStatement, self, {
|
||||||
body: make_node(AST_Binary, self, {
|
body: make_node(AST_Binary, self, {
|
||||||
operator : "&&",
|
operator : "&&",
|
||||||
left : self.condition,
|
left : self.condition,
|
||||||
right : self.body.body
|
right : self.body.body
|
||||||
})
|
})
|
||||||
});
|
}).transform(compressor);
|
||||||
}
|
}
|
||||||
if (self.body instanceof AST_EmptyStatement
|
if (self.body instanceof AST_EmptyStatement
|
||||||
&& self.alternative
|
&& self.alternative
|
||||||
@@ -1135,7 +1292,7 @@ merge(Compressor.prototype, {
|
|||||||
left : self.condition,
|
left : self.condition,
|
||||||
right : self.alternative.body
|
right : self.alternative.body
|
||||||
})
|
})
|
||||||
});
|
}).transform(compressor);
|
||||||
}
|
}
|
||||||
if (self.body instanceof AST_Exit
|
if (self.body instanceof AST_Exit
|
||||||
&& self.alternative instanceof AST_Exit
|
&& self.alternative instanceof AST_Exit
|
||||||
@@ -1143,10 +1300,10 @@ merge(Compressor.prototype, {
|
|||||||
return make_node(self.body.CTOR, self, {
|
return make_node(self.body.CTOR, self, {
|
||||||
value: make_node(AST_Conditional, self, {
|
value: make_node(AST_Conditional, self, {
|
||||||
condition : self.condition,
|
condition : self.condition,
|
||||||
consequent : self.body.value,
|
consequent : self.body.value || make_node(AST_Undefined, self.body).optimize(compressor),
|
||||||
alternative : self.alternative.value || make_node(AST_Undefined, self).optimize(compressor)
|
alternative : self.alternative.value || make_node(AST_Undefined, self.alternative).optimize(compressor)
|
||||||
})
|
})
|
||||||
});
|
}).transform(compressor);
|
||||||
}
|
}
|
||||||
if (self.body instanceof AST_If
|
if (self.body instanceof AST_If
|
||||||
&& !self.body.alternative
|
&& !self.body.alternative
|
||||||
@@ -1164,7 +1321,7 @@ merge(Compressor.prototype, {
|
|||||||
self.alternative = null;
|
self.alternative = null;
|
||||||
return make_node(AST_BlockStatement, self, {
|
return make_node(AST_BlockStatement, self, {
|
||||||
body: [ self, alt ]
|
body: [ self, alt ]
|
||||||
});
|
}).transform(compressor);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (aborts(self.alternative)) {
|
if (aborts(self.alternative)) {
|
||||||
@@ -1174,16 +1331,21 @@ merge(Compressor.prototype, {
|
|||||||
self.alternative = null;
|
self.alternative = null;
|
||||||
return make_node(AST_BlockStatement, self, {
|
return make_node(AST_BlockStatement, self, {
|
||||||
body: [ self, body ]
|
body: [ self, body ]
|
||||||
});
|
}).transform(compressor);
|
||||||
}
|
}
|
||||||
return self;
|
return self;
|
||||||
});
|
});
|
||||||
|
|
||||||
OPT(AST_Switch, function(self, compressor){
|
OPT(AST_Switch, function(self, compressor){
|
||||||
|
if (self.body.length == 0 && compressor.option("conditionals")) {
|
||||||
|
return make_node(AST_SimpleStatement, self, {
|
||||||
|
body: self.expression
|
||||||
|
}).transform(compressor);
|
||||||
|
}
|
||||||
var last_branch = self.body[self.body.length - 1];
|
var last_branch = self.body[self.body.length - 1];
|
||||||
if (last_branch) {
|
if (last_branch) {
|
||||||
var stat = last_branch.body[last_branch.body.length - 1]; // last statement
|
var stat = last_branch.body[last_branch.body.length - 1]; // last statement
|
||||||
if (stat instanceof AST_Break && !stat.label)
|
if (stat instanceof AST_Break && loop_body(compressor.loopcontrol_target(stat.label)) === self)
|
||||||
last_branch.body.pop();
|
last_branch.body.pop();
|
||||||
}
|
}
|
||||||
return self;
|
return self;
|
||||||
@@ -1200,11 +1362,7 @@ merge(Compressor.prototype, {
|
|||||||
});
|
});
|
||||||
|
|
||||||
AST_Definitions.DEFMETHOD("remove_initializers", function(){
|
AST_Definitions.DEFMETHOD("remove_initializers", function(){
|
||||||
this.definitions = this.definitions.map(function(def){
|
this.definitions.forEach(function(def){ def.value = null });
|
||||||
def = def.clone();
|
|
||||||
def.value = null;
|
|
||||||
return def;
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
AST_Definitions.DEFMETHOD("to_assignments", function(){
|
AST_Definitions.DEFMETHOD("to_assignments", function(){
|
||||||
@@ -1220,14 +1378,7 @@ merge(Compressor.prototype, {
|
|||||||
return a;
|
return a;
|
||||||
}, []);
|
}, []);
|
||||||
if (assignments.length == 0) return null;
|
if (assignments.length == 0) return null;
|
||||||
return (function seq(list){
|
return AST_Seq.from_array(assignments);
|
||||||
var first = list[0];
|
|
||||||
if (list.length == 1) return first;
|
|
||||||
return make_node(AST_Seq, first, {
|
|
||||||
car: first,
|
|
||||||
cdr: seq(list.slice(1))
|
|
||||||
});
|
|
||||||
})(assignments);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
OPT(AST_Definitions, function(self, compressor){
|
OPT(AST_Definitions, function(self, compressor){
|
||||||
@@ -1275,10 +1426,17 @@ merge(Compressor.prototype, {
|
|||||||
}
|
}
|
||||||
else if (exp instanceof AST_Dot && exp.property == "toString" && self.args.length == 0) {
|
else if (exp instanceof AST_Dot && exp.property == "toString" && self.args.length == 0) {
|
||||||
return make_node(AST_Binary, self, {
|
return make_node(AST_Binary, self, {
|
||||||
left: exp.expression,
|
left: make_node(AST_String, self, { value: "" }),
|
||||||
operator: "+",
|
operator: "+",
|
||||||
right: make_node(AST_String, self, { value: "" })
|
right: exp.expression
|
||||||
});
|
}).transform(compressor);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (compressor.option("side_effects")) {
|
||||||
|
if (self.expression instanceof AST_Function
|
||||||
|
&& self.args.length == 0
|
||||||
|
&& !AST_Block.prototype.has_side_effects.call(self.expression)) {
|
||||||
|
return make_node(AST_Undefined, self).transform(compressor);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return self;
|
return self;
|
||||||
@@ -1302,6 +1460,10 @@ merge(Compressor.prototype, {
|
|||||||
});
|
});
|
||||||
|
|
||||||
OPT(AST_Seq, function(self, compressor){
|
OPT(AST_Seq, function(self, compressor){
|
||||||
|
if (!compressor.option("side_effects"))
|
||||||
|
return self;
|
||||||
|
if (!self.car.has_side_effects())
|
||||||
|
return self.cdr;
|
||||||
if (compressor.option("cascade")) {
|
if (compressor.option("cascade")) {
|
||||||
if (self.car instanceof AST_Assign
|
if (self.car instanceof AST_Assign
|
||||||
&& !self.car.left.has_side_effects()
|
&& !self.car.left.has_side_effects()
|
||||||
@@ -1317,7 +1479,26 @@ merge(Compressor.prototype, {
|
|||||||
return self;
|
return self;
|
||||||
});
|
});
|
||||||
|
|
||||||
|
AST_Unary.DEFMETHOD("lift_sequences", function(compressor){
|
||||||
|
if (compressor.option("sequences")) {
|
||||||
|
if (this.expression instanceof AST_Seq) {
|
||||||
|
var seq = this.expression;
|
||||||
|
var x = seq.to_array();
|
||||||
|
this.expression = x.pop();
|
||||||
|
x.push(this);
|
||||||
|
seq = AST_Seq.from_array(x).transform(compressor);
|
||||||
|
return seq;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return this;
|
||||||
|
});
|
||||||
|
|
||||||
|
OPT(AST_UnaryPostfix, function(self, compressor){
|
||||||
|
return self.lift_sequences(compressor);
|
||||||
|
});
|
||||||
|
|
||||||
OPT(AST_UnaryPrefix, function(self, compressor){
|
OPT(AST_UnaryPrefix, function(self, compressor){
|
||||||
|
self = self.lift_sequences(compressor);
|
||||||
var e = self.expression;
|
var e = self.expression;
|
||||||
if (compressor.option("booleans") && compressor.in_boolean_context()) {
|
if (compressor.option("booleans") && compressor.in_boolean_context()) {
|
||||||
switch (self.operator) {
|
switch (self.operator) {
|
||||||
@@ -1333,14 +1514,53 @@ merge(Compressor.prototype, {
|
|||||||
compressor.warn("Boolean expression always true [{file}:{line},{col}]", self.start);
|
compressor.warn("Boolean expression always true [{file}:{line},{col}]", self.start);
|
||||||
return make_node(AST_True, self);
|
return make_node(AST_True, self);
|
||||||
}
|
}
|
||||||
}
|
if (e instanceof AST_Binary && self.operator == "!") {
|
||||||
if (e instanceof AST_Binary) {
|
self = best_of(self, e.negate(compressor));
|
||||||
self = best_of(self, e.negate(compressor));
|
}
|
||||||
}
|
}
|
||||||
return self.evaluate(compressor)[0];
|
return self.evaluate(compressor)[0];
|
||||||
});
|
});
|
||||||
|
|
||||||
|
AST_Binary.DEFMETHOD("lift_sequences", function(compressor){
|
||||||
|
if (compressor.option("sequences")) {
|
||||||
|
if (this.left instanceof AST_Seq) {
|
||||||
|
var seq = this.left;
|
||||||
|
var x = seq.to_array();
|
||||||
|
this.left = x.pop();
|
||||||
|
x.push(this);
|
||||||
|
seq = AST_Seq.from_array(x).transform(compressor);
|
||||||
|
return seq;
|
||||||
|
}
|
||||||
|
if (this.right instanceof AST_Seq
|
||||||
|
&& !(this.operator == "||" || this.operator == "&&")
|
||||||
|
&& !this.left.has_side_effects()) {
|
||||||
|
var seq = this.right;
|
||||||
|
var x = seq.to_array();
|
||||||
|
this.right = x.pop();
|
||||||
|
x.push(this);
|
||||||
|
seq = AST_Seq.from_array(x).transform(compressor);
|
||||||
|
return seq;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return this;
|
||||||
|
});
|
||||||
|
|
||||||
|
var commutativeOperators = makePredicate("== === != !== * & | ^");
|
||||||
|
|
||||||
OPT(AST_Binary, function(self, compressor){
|
OPT(AST_Binary, function(self, compressor){
|
||||||
|
function reverse(op) {
|
||||||
|
if (op) self.operator = op;
|
||||||
|
var tmp = self.left;
|
||||||
|
self.left = self.right;
|
||||||
|
self.right = tmp;
|
||||||
|
};
|
||||||
|
if (commutativeOperators(self.operator)) {
|
||||||
|
if (self.right instanceof AST_Constant
|
||||||
|
&& !(self.left instanceof AST_Constant)) {
|
||||||
|
reverse();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
self = self.lift_sequences(compressor);
|
||||||
if (compressor.option("comparisons")) switch (self.operator) {
|
if (compressor.option("comparisons")) switch (self.operator) {
|
||||||
case "===":
|
case "===":
|
||||||
case "!==":
|
case "!==":
|
||||||
@@ -1351,21 +1571,10 @@ merge(Compressor.prototype, {
|
|||||||
// XXX: intentionally falling down to the next case
|
// XXX: intentionally falling down to the next case
|
||||||
case "==":
|
case "==":
|
||||||
case "!=":
|
case "!=":
|
||||||
if (self.left instanceof AST_UnaryPrefix
|
if (self.left instanceof AST_String
|
||||||
&& self.left.operator == "typeof"
|
&& self.left.value == "undefined"
|
||||||
&& self.right instanceof AST_String
|
&& self.right instanceof AST_UnaryPrefix
|
||||||
&& self.right.value == "undefined") {
|
&& self.right.operator == "typeof") {
|
||||||
if (!(self.left.expression instanceof AST_SymbolRef)
|
|
||||||
|| !self.left.expression.undeclared()) {
|
|
||||||
self.left = self.left.expression;
|
|
||||||
self.right = make_node(AST_Undefined, self.right).optimize(compressor);
|
|
||||||
if (self.operator.length == 2) self.operator += "=";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else if (self.left instanceof AST_String
|
|
||||||
&& self.left.value == "undefined"
|
|
||||||
&& self.right instanceof AST_UnaryPrefix
|
|
||||||
&& self.right.operator == "typeof") {
|
|
||||||
if (!(self.right.expression instanceof AST_SymbolRef)
|
if (!(self.right.expression instanceof AST_SymbolRef)
|
||||||
|| !self.right.expression.undeclared()) {
|
|| !self.right.expression.undeclared()) {
|
||||||
self.left = self.right.expression;
|
self.left = self.right.expression;
|
||||||
@@ -1428,12 +1637,6 @@ merge(Compressor.prototype, {
|
|||||||
});
|
});
|
||||||
self = best_of(self, negated);
|
self = best_of(self, negated);
|
||||||
}
|
}
|
||||||
var reverse = function(op) {
|
|
||||||
self.operator = op;
|
|
||||||
var tmp = self.left;
|
|
||||||
self.left = self.right;
|
|
||||||
self.right = tmp;
|
|
||||||
};
|
|
||||||
switch (self.operator) {
|
switch (self.operator) {
|
||||||
case "<": reverse(">"); break;
|
case "<": reverse(">"); break;
|
||||||
case "<=": reverse(">="); break;
|
case "<=": reverse(">="); break;
|
||||||
@@ -1445,7 +1648,7 @@ merge(Compressor.prototype, {
|
|||||||
OPT(AST_SymbolRef, function(self, compressor){
|
OPT(AST_SymbolRef, function(self, compressor){
|
||||||
if (self.undeclared()) {
|
if (self.undeclared()) {
|
||||||
var defines = compressor.option("global_defs");
|
var defines = compressor.option("global_defs");
|
||||||
if (defines && HOP(defines, self.name)) {
|
if (defines && defines.hasOwnProperty(self.name)) {
|
||||||
return make_node_from_constant(compressor, defines[self.name], self);
|
return make_node_from_constant(compressor, defines[self.name], self);
|
||||||
}
|
}
|
||||||
switch (self.name) {
|
switch (self.name) {
|
||||||
@@ -1453,6 +1656,8 @@ merge(Compressor.prototype, {
|
|||||||
return make_node(AST_Undefined, self);
|
return make_node(AST_Undefined, self);
|
||||||
case "NaN":
|
case "NaN":
|
||||||
return make_node(AST_NaN, self);
|
return make_node(AST_NaN, self);
|
||||||
|
case "Infinity":
|
||||||
|
return make_node(AST_Infinity, self);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return self;
|
return self;
|
||||||
@@ -1477,6 +1682,7 @@ merge(Compressor.prototype, {
|
|||||||
|
|
||||||
var ASSIGN_OPS = [ '+', '-', '/', '*', '%', '>>', '<<', '>>>', '|', '^', '&' ];
|
var ASSIGN_OPS = [ '+', '-', '/', '*', '%', '>>', '<<', '>>>', '|', '^', '&' ];
|
||||||
OPT(AST_Assign, function(self, compressor){
|
OPT(AST_Assign, function(self, compressor){
|
||||||
|
self = self.lift_sequences(compressor);
|
||||||
if (self.operator == "="
|
if (self.operator == "="
|
||||||
&& self.left instanceof AST_SymbolRef
|
&& self.left instanceof AST_SymbolRef
|
||||||
&& self.right instanceof AST_Binary
|
&& self.right instanceof AST_Binary
|
||||||
@@ -1580,4 +1786,14 @@ merge(Compressor.prototype, {
|
|||||||
return self;
|
return self;
|
||||||
});
|
});
|
||||||
|
|
||||||
|
function literals_in_boolean_context(self, compressor) {
|
||||||
|
if (compressor.option("booleans") && compressor.in_boolean_context()) {
|
||||||
|
return make_node(AST_True, self);
|
||||||
|
}
|
||||||
|
return self;
|
||||||
|
};
|
||||||
|
OPT(AST_Array, literals_in_boolean_context);
|
||||||
|
OPT(AST_Object, literals_in_boolean_context);
|
||||||
|
OPT(AST_RegExp, literals_in_boolean_context);
|
||||||
|
|
||||||
})();
|
})();
|
||||||
|
|||||||
@@ -58,7 +58,7 @@
|
|||||||
CatchClause : function(M) {
|
CatchClause : function(M) {
|
||||||
return new AST_Catch({
|
return new AST_Catch({
|
||||||
start : my_start_token(M),
|
start : my_start_token(M),
|
||||||
end : my_start_token(M),
|
end : my_end_token(M),
|
||||||
argname : from_moz(M.param),
|
argname : from_moz(M.param),
|
||||||
body : from_moz(M.body).body
|
body : from_moz(M.body).body
|
||||||
});
|
});
|
||||||
@@ -95,7 +95,7 @@
|
|||||||
MemberExpression : function(M) {
|
MemberExpression : function(M) {
|
||||||
return new (M.computed ? AST_Sub : AST_Dot)({
|
return new (M.computed ? AST_Sub : AST_Dot)({
|
||||||
start : my_start_token(M),
|
start : my_start_token(M),
|
||||||
end : my_start_token(M),
|
end : my_end_token(M),
|
||||||
property : M.computed ? from_moz(M.property) : M.property.name,
|
property : M.computed ? from_moz(M.property) : M.property.name,
|
||||||
expression : from_moz(M.object)
|
expression : from_moz(M.object)
|
||||||
});
|
});
|
||||||
@@ -103,7 +103,7 @@
|
|||||||
SwitchCase : function(M) {
|
SwitchCase : function(M) {
|
||||||
return new (M.test ? AST_Case : AST_Default)({
|
return new (M.test ? AST_Case : AST_Default)({
|
||||||
start : my_start_token(M),
|
start : my_start_token(M),
|
||||||
end : my_start_token(M),
|
end : my_end_token(M),
|
||||||
expression : from_moz(M.test),
|
expression : from_moz(M.test),
|
||||||
body : M.consequent.map(from_moz)
|
body : M.consequent.map(from_moz)
|
||||||
});
|
});
|
||||||
@@ -125,9 +125,6 @@
|
|||||||
return new (val ? AST_True : AST_False)(args);
|
return new (val ? AST_True : AST_False)(args);
|
||||||
default:
|
default:
|
||||||
args.value = val;
|
args.value = val;
|
||||||
var m = /\/(.*)\/(.*)/.exec(val+"");
|
|
||||||
args.pattern = m[1];
|
|
||||||
args.mods = m[2];
|
|
||||||
return new AST_RegExp(args);
|
return new AST_RegExp(args);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -235,8 +232,6 @@
|
|||||||
moz_to_me += moz;
|
moz_to_me += moz;
|
||||||
} else if (how == "%") {
|
} else if (how == "%") {
|
||||||
moz_to_me += "from_moz(" + moz + ").body";
|
moz_to_me += "from_moz(" + moz + ").body";
|
||||||
} else if (how == "@>") {
|
|
||||||
moz_to_me += "from_moz(" + moz + "[0])";
|
|
||||||
} else throw new Error("Can't understand operator in propmap: " + prop);
|
} else throw new Error("Can't understand operator in propmap: " + prop);
|
||||||
});
|
});
|
||||||
moz_to_me += "\n})}";
|
moz_to_me += "\n})}";
|
||||||
|
|||||||
100
lib/output.js
100
lib/output.js
@@ -58,6 +58,7 @@ function OutputStream(options) {
|
|||||||
beautify : false,
|
beautify : false,
|
||||||
source_map : null,
|
source_map : null,
|
||||||
bracketize : false,
|
bracketize : false,
|
||||||
|
semicolons : true,
|
||||||
comments : false
|
comments : false
|
||||||
}, true);
|
}, true);
|
||||||
|
|
||||||
@@ -130,14 +131,23 @@ function OutputStream(options) {
|
|||||||
print("\n");
|
print("\n");
|
||||||
};
|
};
|
||||||
|
|
||||||
|
var requireSemicolonChars = makePredicate("( [ + * / - , .");
|
||||||
|
|
||||||
function print(str) {
|
function print(str) {
|
||||||
str = String(str);
|
str = String(str);
|
||||||
var ch = str.charAt(0);
|
var ch = str.charAt(0);
|
||||||
if (might_need_semicolon) {
|
if (might_need_semicolon) {
|
||||||
if (";}".indexOf(ch) < 0 && !/[;]$/.test(last)) {
|
if ((!ch || ";}".indexOf(ch) < 0) && !/[;]$/.test(last)) {
|
||||||
OUTPUT += ";";
|
if (options.semicolons || requireSemicolonChars(ch)) {
|
||||||
current_col++;
|
OUTPUT += ";";
|
||||||
current_pos++;
|
current_col++;
|
||||||
|
current_pos++;
|
||||||
|
} else {
|
||||||
|
OUTPUT += "\n";
|
||||||
|
current_pos++;
|
||||||
|
current_line++;
|
||||||
|
current_col = 0;
|
||||||
|
}
|
||||||
if (!options.beautify)
|
if (!options.beautify)
|
||||||
might_need_space = false;
|
might_need_space = false;
|
||||||
}
|
}
|
||||||
@@ -248,12 +258,23 @@ function OutputStream(options) {
|
|||||||
};
|
};
|
||||||
|
|
||||||
var add_mapping = options.source_map ? function(token, name) {
|
var add_mapping = options.source_map ? function(token, name) {
|
||||||
options.source_map.add(
|
try {
|
||||||
token.file,
|
if (token) options.source_map.add(
|
||||||
current_line, current_col,
|
token.file || "?",
|
||||||
token.line, token.col,
|
current_line, current_col,
|
||||||
(!name && token.type == "name") ? token.value : name
|
token.line, token.col,
|
||||||
);
|
(!name && token.type == "name") ? token.value : name
|
||||||
|
);
|
||||||
|
} catch(ex) {
|
||||||
|
AST_Node.warn("Couldn't figure out mapping for {file}:{line},{col} → {cline},{ccol} [{name}]", {
|
||||||
|
file: token.file,
|
||||||
|
line: token.line,
|
||||||
|
col: token.col,
|
||||||
|
cline: current_line,
|
||||||
|
ccol: current_col,
|
||||||
|
name: name || ""
|
||||||
|
})
|
||||||
|
}
|
||||||
} : noop;
|
} : noop;
|
||||||
|
|
||||||
function get() {
|
function get() {
|
||||||
@@ -276,6 +297,7 @@ function OutputStream(options) {
|
|||||||
last : function() { return last },
|
last : function() { return last },
|
||||||
semicolon : semicolon,
|
semicolon : semicolon,
|
||||||
force_semicolon : force_semicolon,
|
force_semicolon : force_semicolon,
|
||||||
|
to_ascii : to_ascii,
|
||||||
print_name : function(name) { print(make_name(name)) },
|
print_name : function(name) { print(make_name(name)) },
|
||||||
print_string : function(str) { print(encode_string(str)) },
|
print_string : function(str) { print(encode_string(str)) },
|
||||||
next_indent : next_indent,
|
next_indent : next_indent,
|
||||||
@@ -388,10 +410,16 @@ function OutputStream(options) {
|
|||||||
return first_in_statement(output);
|
return first_in_statement(output);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
PARENS(AST_Unary, function(output){
|
||||||
|
var p = output.parent();
|
||||||
|
return p instanceof AST_PropAccess && p.expression === this;
|
||||||
|
});
|
||||||
|
|
||||||
PARENS(AST_Seq, function(output){
|
PARENS(AST_Seq, function(output){
|
||||||
var p = output.parent();
|
var p = output.parent();
|
||||||
return p instanceof AST_Call // (foo, bar)() or foo(1, (2, 3), 4)
|
return p instanceof AST_Call // (foo, bar)() or foo(1, (2, 3), 4)
|
||||||
|| p instanceof AST_Binary // 1 + (2, 3) + 4 ==> 7
|
|| p instanceof AST_Unary // !(foo, bar, baz)
|
||||||
|
|| p instanceof AST_Binary // 1 + (2, 3) + 4 ==> 8
|
||||||
|| p instanceof AST_VarDef // var a = (1, 2), b = a + a; ==> b == 4
|
|| p instanceof AST_VarDef // var a = (1, 2), b = a + a; ==> b == 4
|
||||||
|| p instanceof AST_Dot // (1, {foo:2}).foo ==> 2
|
|| p instanceof AST_Dot // (1, {foo:2}).foo ==> 2
|
||||||
|| p instanceof AST_Array // [ 1, (2, 3), 4 ] ==> [ 1, 3, 4 ]
|
|| p instanceof AST_Array // [ 1, (2, 3), 4 ] ==> [ 1, 3, 4 ]
|
||||||
@@ -440,10 +468,36 @@ function OutputStream(options) {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
PARENS(AST_PropAccess, function(output){
|
||||||
|
var p = output.parent();
|
||||||
|
if (p instanceof AST_New && p.expression === this) {
|
||||||
|
// i.e. new (foo.bar().baz)
|
||||||
|
//
|
||||||
|
// if there's one call into this subtree, then we need
|
||||||
|
// parens around it too, otherwise the call will be
|
||||||
|
// interpreted as passing the arguments to the upper New
|
||||||
|
// expression.
|
||||||
|
try {
|
||||||
|
this.walk(new TreeWalker(function(node){
|
||||||
|
if (node instanceof AST_Call) throw p;
|
||||||
|
}));
|
||||||
|
} catch(ex) {
|
||||||
|
if (ex !== p) throw ex;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
PARENS(AST_Call, function(output){
|
||||||
|
var p = output.parent();
|
||||||
|
return p instanceof AST_New && p.expression === this;
|
||||||
|
});
|
||||||
|
|
||||||
PARENS(AST_New, function(output){
|
PARENS(AST_New, function(output){
|
||||||
var p = output.parent();
|
var p = output.parent();
|
||||||
// (new Date).getTime();
|
if (no_constructor_parens(this, output)
|
||||||
if (p instanceof AST_Dot && no_constructor_parens(this, output))
|
&& (p instanceof AST_Dot // (new Date).getTime()
|
||||||
|
|| p instanceof AST_Call && p.expression === this)) // (new foo)(bar)
|
||||||
return true;
|
return true;
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -506,6 +560,7 @@ function OutputStream(options) {
|
|||||||
});
|
});
|
||||||
DEFPRINT(AST_Toplevel, function(self, output){
|
DEFPRINT(AST_Toplevel, function(self, output){
|
||||||
display_body(self.body, true, output);
|
display_body(self.body, true, output);
|
||||||
|
output.print("");
|
||||||
});
|
});
|
||||||
DEFPRINT(AST_LabeledStatement, function(self, output){
|
DEFPRINT(AST_LabeledStatement, function(self, output){
|
||||||
self.label.print(output);
|
self.label.print(output);
|
||||||
@@ -857,6 +912,8 @@ function OutputStream(options) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
output.print(".");
|
output.print(".");
|
||||||
|
// the name after dot would be mapped about here.
|
||||||
|
output.add_mapping(self.end);
|
||||||
output.print_name(self.property);
|
output.print_name(self.property);
|
||||||
});
|
});
|
||||||
DEFPRINT(AST_Sub, function(self, output){
|
DEFPRINT(AST_Sub, function(self, output){
|
||||||
@@ -951,9 +1008,10 @@ function OutputStream(options) {
|
|||||||
output.print_name(def ? def.mangled_name || def.name : self.name);
|
output.print_name(def ? def.mangled_name || def.name : self.name);
|
||||||
});
|
});
|
||||||
DEFPRINT(AST_Undefined, function(self, output){
|
DEFPRINT(AST_Undefined, function(self, output){
|
||||||
// XXX: should add more options for this
|
|
||||||
output.print("void 0");
|
output.print("void 0");
|
||||||
//output.print("[][0]");
|
});
|
||||||
|
DEFPRINT(AST_Infinity, function(self, output){
|
||||||
|
output.print("1/0");
|
||||||
});
|
});
|
||||||
DEFPRINT(AST_NaN, function(self, output){
|
DEFPRINT(AST_NaN, function(self, output){
|
||||||
output.print("0/0");
|
output.print("0/0");
|
||||||
@@ -971,10 +1029,10 @@ function OutputStream(options) {
|
|||||||
output.print(make_num(self.getValue()));
|
output.print(make_num(self.getValue()));
|
||||||
});
|
});
|
||||||
DEFPRINT(AST_RegExp, function(self, output){
|
DEFPRINT(AST_RegExp, function(self, output){
|
||||||
output.print("/");
|
var str = self.getValue().toString();
|
||||||
output.print(self.pattern);
|
if (output.option("ascii_only"))
|
||||||
output.print("/");
|
str = output.to_ascii(str);
|
||||||
if (self.mods) output.print(self.mods);
|
output.print(str);
|
||||||
});
|
});
|
||||||
|
|
||||||
function force_statement(stat, output) {
|
function force_statement(stat, output) {
|
||||||
@@ -1092,11 +1150,13 @@ function OutputStream(options) {
|
|||||||
DEFMAP(AST_Symbol, basic_sourcemap_gen);
|
DEFMAP(AST_Symbol, basic_sourcemap_gen);
|
||||||
DEFMAP(AST_Jump, basic_sourcemap_gen);
|
DEFMAP(AST_Jump, basic_sourcemap_gen);
|
||||||
DEFMAP(AST_StatementWithBody, basic_sourcemap_gen);
|
DEFMAP(AST_StatementWithBody, basic_sourcemap_gen);
|
||||||
|
DEFMAP(AST_LabeledStatement, noop); // since the label symbol will mark it
|
||||||
DEFMAP(AST_Lambda, basic_sourcemap_gen);
|
DEFMAP(AST_Lambda, basic_sourcemap_gen);
|
||||||
DEFMAP(AST_PropAccess, basic_sourcemap_gen);
|
|
||||||
DEFMAP(AST_Switch, basic_sourcemap_gen);
|
DEFMAP(AST_Switch, basic_sourcemap_gen);
|
||||||
|
DEFMAP(AST_SwitchBranch, basic_sourcemap_gen);
|
||||||
DEFMAP(AST_BlockStatement, basic_sourcemap_gen);
|
DEFMAP(AST_BlockStatement, basic_sourcemap_gen);
|
||||||
DEFMAP(AST_Toplevel, noop);
|
DEFMAP(AST_Toplevel, noop);
|
||||||
|
DEFMAP(AST_New, basic_sourcemap_gen);
|
||||||
DEFMAP(AST_Try, basic_sourcemap_gen);
|
DEFMAP(AST_Try, basic_sourcemap_gen);
|
||||||
DEFMAP(AST_Catch, basic_sourcemap_gen);
|
DEFMAP(AST_Catch, basic_sourcemap_gen);
|
||||||
DEFMAP(AST_Finally, basic_sourcemap_gen);
|
DEFMAP(AST_Finally, basic_sourcemap_gen);
|
||||||
|
|||||||
487
lib/parse.js
487
lib/parse.js
@@ -44,89 +44,24 @@
|
|||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
var KEYWORDS = array_to_hash([
|
var KEYWORDS = 'break case catch const continue debugger default delete do else finally for function if in instanceof new return switch throw try typeof var void while with';
|
||||||
"break",
|
var KEYWORDS_ATOM = 'false null true';
|
||||||
"case",
|
var RESERVED_WORDS = 'abstract boolean byte char class double enum export extends final float goto implements import int interface long native package private protected public short static super synchronized this throws transient volatile'
|
||||||
"catch",
|
+ " " + KEYWORDS_ATOM + " " + KEYWORDS;
|
||||||
"const",
|
var KEYWORDS_BEFORE_EXPRESSION = 'return new delete throw else case';
|
||||||
"continue",
|
|
||||||
"debugger",
|
|
||||||
"default",
|
|
||||||
"delete",
|
|
||||||
"do",
|
|
||||||
"else",
|
|
||||||
"finally",
|
|
||||||
"for",
|
|
||||||
"function",
|
|
||||||
"if",
|
|
||||||
"in",
|
|
||||||
"instanceof",
|
|
||||||
"new",
|
|
||||||
"return",
|
|
||||||
"switch",
|
|
||||||
"throw",
|
|
||||||
"try",
|
|
||||||
"typeof",
|
|
||||||
"var",
|
|
||||||
"void",
|
|
||||||
"while",
|
|
||||||
"with"
|
|
||||||
]);
|
|
||||||
|
|
||||||
var RESERVED_WORDS = array_to_hash([
|
KEYWORDS = makePredicate(KEYWORDS);
|
||||||
"abstract",
|
RESERVED_WORDS = makePredicate(RESERVED_WORDS);
|
||||||
"boolean",
|
KEYWORDS_BEFORE_EXPRESSION = makePredicate(KEYWORDS_BEFORE_EXPRESSION);
|
||||||
"byte",
|
KEYWORDS_ATOM = makePredicate(KEYWORDS_ATOM);
|
||||||
"char",
|
|
||||||
"class",
|
|
||||||
"double",
|
|
||||||
"enum",
|
|
||||||
"export",
|
|
||||||
"extends",
|
|
||||||
"final",
|
|
||||||
"float",
|
|
||||||
"goto",
|
|
||||||
"implements",
|
|
||||||
"import",
|
|
||||||
"int",
|
|
||||||
"interface",
|
|
||||||
"long",
|
|
||||||
"native",
|
|
||||||
"package",
|
|
||||||
"private",
|
|
||||||
"protected",
|
|
||||||
"public",
|
|
||||||
"short",
|
|
||||||
"static",
|
|
||||||
"super",
|
|
||||||
"synchronized",
|
|
||||||
"throws",
|
|
||||||
"transient",
|
|
||||||
"volatile"
|
|
||||||
]);
|
|
||||||
|
|
||||||
var KEYWORDS_BEFORE_EXPRESSION = array_to_hash([
|
var OPERATOR_CHARS = makePredicate(characters("+-*&%=<>!?|~^"));
|
||||||
"return",
|
|
||||||
"new",
|
|
||||||
"delete",
|
|
||||||
"throw",
|
|
||||||
"else",
|
|
||||||
"case"
|
|
||||||
]);
|
|
||||||
|
|
||||||
var KEYWORDS_ATOM = array_to_hash([
|
|
||||||
"false",
|
|
||||||
"null",
|
|
||||||
"true"
|
|
||||||
]);
|
|
||||||
|
|
||||||
var OPERATOR_CHARS = array_to_hash(characters("+-*&%=<>!?|~^"));
|
|
||||||
|
|
||||||
var RE_HEX_NUMBER = /^0x[0-9a-f]+$/i;
|
var RE_HEX_NUMBER = /^0x[0-9a-f]+$/i;
|
||||||
var RE_OCT_NUMBER = /^0[0-7]+$/;
|
var RE_OCT_NUMBER = /^0[0-7]+$/;
|
||||||
var RE_DEC_NUMBER = /^\d*\.?\d*(?:e[+-]?\d*(?:\d\.?|\.?\d)\d*)?$/i;
|
var RE_DEC_NUMBER = /^\d*\.?\d*(?:e[+-]?\d*(?:\d\.?|\.?\d)\d*)?$/i;
|
||||||
|
|
||||||
var OPERATORS = array_to_hash([
|
var OPERATORS = makePredicate([
|
||||||
"in",
|
"in",
|
||||||
"instanceof",
|
"instanceof",
|
||||||
"typeof",
|
"typeof",
|
||||||
@@ -173,13 +108,13 @@ var OPERATORS = array_to_hash([
|
|||||||
"||"
|
"||"
|
||||||
]);
|
]);
|
||||||
|
|
||||||
var WHITESPACE_CHARS = array_to_hash(characters(" \u00a0\n\r\t\f\u000b\u200b\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000"));
|
var WHITESPACE_CHARS = makePredicate(characters(" \u00a0\n\r\t\f\u000b\u200b\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000"));
|
||||||
|
|
||||||
var PUNC_BEFORE_EXPRESSION = array_to_hash(characters("[{(,.;:"));
|
var PUNC_BEFORE_EXPRESSION = makePredicate(characters("[{(,.;:"));
|
||||||
|
|
||||||
var PUNC_CHARS = array_to_hash(characters("[]{}(),;:"));
|
var PUNC_CHARS = makePredicate(characters("[]{}(),;:"));
|
||||||
|
|
||||||
var REGEXP_MODIFIERS = array_to_hash(characters("gmsiy"));
|
var REGEXP_MODIFIERS = makePredicate(characters("gmsiy"));
|
||||||
|
|
||||||
/* -----[ Tokenizer ]----- */
|
/* -----[ Tokenizer ]----- */
|
||||||
|
|
||||||
@@ -191,17 +126,18 @@ var UNICODE = {
|
|||||||
connector_punctuation: new RegExp("[\\u005F\\u203F\\u2040\\u2054\\uFE33\\uFE34\\uFE4D-\\uFE4F\\uFF3F]")
|
connector_punctuation: new RegExp("[\\u005F\\u203F\\u2040\\u2054\\uFE33\\uFE34\\uFE4D-\\uFE4F\\uFF3F]")
|
||||||
};
|
};
|
||||||
|
|
||||||
function is_letter(ch) {
|
function is_letter(code) {
|
||||||
return UNICODE.letter.test(ch);
|
return (code >= 97 && code <= 122)
|
||||||
|
|| (code >= 65 && code <= 90)
|
||||||
|
|| (code >= 0xaa && UNICODE.letter.test(String.fromCharCode(code)));
|
||||||
};
|
};
|
||||||
|
|
||||||
function is_digit(ch) {
|
function is_digit(code) {
|
||||||
ch = ch.charCodeAt(0);
|
return code >= 48 && code <= 57; //XXX: find out if "UnicodeDigit" means something else than 0..9
|
||||||
return ch >= 48 && ch <= 57; //XXX: find out if "UnicodeDigit" means something else than 0..9
|
|
||||||
};
|
};
|
||||||
|
|
||||||
function is_alphanumeric_char(ch) {
|
function is_alphanumeric_char(code) {
|
||||||
return is_digit(ch) || is_letter(ch);
|
return is_digit(code) || is_letter(code);
|
||||||
};
|
};
|
||||||
|
|
||||||
function is_unicode_combining_mark(ch) {
|
function is_unicode_combining_mark(ch) {
|
||||||
@@ -213,24 +149,21 @@ function is_unicode_connector_punctuation(ch) {
|
|||||||
};
|
};
|
||||||
|
|
||||||
function is_identifier(name) {
|
function is_identifier(name) {
|
||||||
return /^[a-z_$][a-z0-9_$]*$/i.test(name)
|
return /^[a-z_$][a-z0-9_$]*$/i.test(name) && !RESERVED_WORDS(name);
|
||||||
&& name != "this"
|
|
||||||
&& !HOP(KEYWORDS_ATOM, name)
|
|
||||||
&& !HOP(RESERVED_WORDS, name)
|
|
||||||
&& !HOP(KEYWORDS, name);
|
|
||||||
};
|
};
|
||||||
|
|
||||||
function is_identifier_start(ch) {
|
function is_identifier_start(code) {
|
||||||
return ch == "$" || ch == "_" || is_letter(ch);
|
return code == 36 || code == 95 || is_letter(code);
|
||||||
};
|
};
|
||||||
|
|
||||||
function is_identifier_char(ch) {
|
function is_identifier_char(ch) {
|
||||||
return is_identifier_start(ch)
|
var code = ch.charCodeAt(0);
|
||||||
|
return is_identifier_start(code)
|
||||||
|
|| is_digit(code)
|
||||||
|
|| code == 8204 // \u200c: zero-width non-joiner <ZWNJ>
|
||||||
|
|| code == 8205 // \u200d: zero-width joiner <ZWJ> (in my ECMA-262 PDF, this is also 200c)
|
||||||
|| is_unicode_combining_mark(ch)
|
|| is_unicode_combining_mark(ch)
|
||||||
|| is_digit(ch)
|
|
||||||
|| is_unicode_connector_punctuation(ch)
|
|| is_unicode_connector_punctuation(ch)
|
||||||
|| ch == "\u200c" // zero-width non-joiner <ZWNJ>
|
|
||||||
|| ch == "\u200d" // zero-width joiner <ZWJ> (in my ECMA-262 PDF, this is also 200c)
|
|
||||||
;
|
;
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -248,7 +181,7 @@ function JS_Parse_Error(message, line, col, pos) {
|
|||||||
this.message = message;
|
this.message = message;
|
||||||
this.line = line;
|
this.line = line;
|
||||||
this.col = col;
|
this.col = col;
|
||||||
this.pos = pos + 1;
|
this.pos = pos;
|
||||||
this.stack = new Error().stack;
|
this.stack = new Error().stack;
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -317,9 +250,9 @@ function tokenizer($TEXT, filename) {
|
|||||||
};
|
};
|
||||||
|
|
||||||
function token(type, value, is_comment) {
|
function token(type, value, is_comment) {
|
||||||
S.regex_allowed = ((type == "operator" && !HOP(UNARY_POSTFIX, value)) ||
|
S.regex_allowed = ((type == "operator" && !UNARY_POSTFIX[value]) ||
|
||||||
(type == "keyword" && HOP(KEYWORDS_BEFORE_EXPRESSION, value)) ||
|
(type == "keyword" && KEYWORDS_BEFORE_EXPRESSION(value)) ||
|
||||||
(type == "punc" && HOP(PUNC_BEFORE_EXPRESSION, value)));
|
(type == "punc" && PUNC_BEFORE_EXPRESSION(value)));
|
||||||
var ret = {
|
var ret = {
|
||||||
type : type,
|
type : type,
|
||||||
value : value,
|
value : value,
|
||||||
@@ -343,16 +276,14 @@ function tokenizer($TEXT, filename) {
|
|||||||
};
|
};
|
||||||
|
|
||||||
function skip_whitespace() {
|
function skip_whitespace() {
|
||||||
while (HOP(WHITESPACE_CHARS, peek()))
|
while (WHITESPACE_CHARS(peek()))
|
||||||
next();
|
next();
|
||||||
};
|
};
|
||||||
|
|
||||||
function read_while(pred) {
|
function read_while(pred) {
|
||||||
var ret = "", ch = peek(), i = 0;
|
var ret = "", ch, i = 0;
|
||||||
while (ch && pred(ch, i++)) {
|
while ((ch = peek()) && pred(ch, i++))
|
||||||
ret += next();
|
ret += next();
|
||||||
ch = peek();
|
|
||||||
}
|
|
||||||
return ret;
|
return ret;
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -363,29 +294,22 @@ function tokenizer($TEXT, filename) {
|
|||||||
function read_num(prefix) {
|
function read_num(prefix) {
|
||||||
var has_e = false, after_e = false, has_x = false, has_dot = prefix == ".";
|
var has_e = false, after_e = false, has_x = false, has_dot = prefix == ".";
|
||||||
var num = read_while(function(ch, i){
|
var num = read_while(function(ch, i){
|
||||||
if (ch == "x" || ch == "X") {
|
var code = ch.charCodeAt(0);
|
||||||
if (has_x) return false;
|
switch (code) {
|
||||||
return has_x = true;
|
case 120: case 88: // xX
|
||||||
|
return has_x ? false : (has_x = true);
|
||||||
|
case 101: case 69: // eE
|
||||||
|
return has_x ? true : has_e ? false : (has_e = after_e = true);
|
||||||
|
case 45: // -
|
||||||
|
return after_e || (i == 0 && !prefix);
|
||||||
|
case 43: // +
|
||||||
|
return after_e;
|
||||||
|
case (after_e = false, 46): // .
|
||||||
|
return (!has_dot && !has_x && !has_e) ? (has_dot = true) : false;
|
||||||
}
|
}
|
||||||
if (!has_x && (ch == "E" || ch == "e")) {
|
return is_alphanumeric_char(code);
|
||||||
if (has_e) return false;
|
|
||||||
return has_e = after_e = true;
|
|
||||||
}
|
|
||||||
if (ch == "-") {
|
|
||||||
if (after_e || (i == 0 && !prefix)) return true;
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
if (ch == "+") return after_e;
|
|
||||||
after_e = false;
|
|
||||||
if (ch == ".") {
|
|
||||||
if (!has_dot && !has_x && !has_e)
|
|
||||||
return has_dot = true;
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
return is_alphanumeric_char(ch);
|
|
||||||
});
|
});
|
||||||
if (prefix)
|
if (prefix) num = prefix + num;
|
||||||
num = prefix + num;
|
|
||||||
var valid = parse_js_number(num);
|
var valid = parse_js_number(num);
|
||||||
if (!isNaN(valid)) {
|
if (!isNaN(valid)) {
|
||||||
return token("num", valid);
|
return token("num", valid);
|
||||||
@@ -396,17 +320,17 @@ function tokenizer($TEXT, filename) {
|
|||||||
|
|
||||||
function read_escaped_char(in_string) {
|
function read_escaped_char(in_string) {
|
||||||
var ch = next(true, in_string);
|
var ch = next(true, in_string);
|
||||||
switch (ch) {
|
switch (ch.charCodeAt(0)) {
|
||||||
case "n" : return "\n";
|
case 110 : return "\n";
|
||||||
case "r" : return "\r";
|
case 114 : return "\r";
|
||||||
case "t" : return "\t";
|
case 116 : return "\t";
|
||||||
case "b" : return "\b";
|
case 98 : return "\b";
|
||||||
case "v" : return "\u000b";
|
case 118 : return "\u000b"; // \v
|
||||||
case "f" : return "\f";
|
case 102 : return "\f";
|
||||||
case "0" : return "\0";
|
case 48 : return "\0";
|
||||||
case "x" : return String.fromCharCode(hex_bytes(2));
|
case 120 : return String.fromCharCode(hex_bytes(2)); // \x
|
||||||
case "u" : return String.fromCharCode(hex_bytes(4));
|
case 117 : return String.fromCharCode(hex_bytes(4)); // \u
|
||||||
case "\n": return "";
|
case 10 : return ""; // newline
|
||||||
default : return ch;
|
default : return ch;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@@ -422,35 +346,33 @@ function tokenizer($TEXT, filename) {
|
|||||||
return num;
|
return num;
|
||||||
};
|
};
|
||||||
|
|
||||||
function read_string() {
|
var read_string = with_eof_error("Unterminated string constant", function(){
|
||||||
return with_eof_error("Unterminated string constant", function(){
|
var quote = next(), ret = "";
|
||||||
var quote = next(), ret = "";
|
for (;;) {
|
||||||
for (;;) {
|
var ch = next(true);
|
||||||
var ch = next(true);
|
if (ch == "\\") {
|
||||||
if (ch == "\\") {
|
// read OctalEscapeSequence (XXX: deprecated if "strict mode")
|
||||||
// read OctalEscapeSequence (XXX: deprecated if "strict mode")
|
// https://github.com/mishoo/UglifyJS/issues/178
|
||||||
// https://github.com/mishoo/UglifyJS/issues/178
|
var octal_len = 0, first = null;
|
||||||
var octal_len = 0, first = null;
|
ch = read_while(function(ch){
|
||||||
ch = read_while(function(ch){
|
if (ch >= "0" && ch <= "7") {
|
||||||
if (ch >= "0" && ch <= "7") {
|
if (!first) {
|
||||||
if (!first) {
|
first = ch;
|
||||||
first = ch;
|
return ++octal_len;
|
||||||
return ++octal_len;
|
|
||||||
}
|
|
||||||
else if (first <= "3" && octal_len <= 2) return ++octal_len;
|
|
||||||
else if (first >= "4" && octal_len <= 1) return ++octal_len;
|
|
||||||
}
|
}
|
||||||
return false;
|
else if (first <= "3" && octal_len <= 2) return ++octal_len;
|
||||||
});
|
else if (first >= "4" && octal_len <= 1) return ++octal_len;
|
||||||
if (octal_len > 0) ch = String.fromCharCode(parseInt(ch, 8));
|
}
|
||||||
else ch = read_escaped_char(true);
|
return false;
|
||||||
}
|
});
|
||||||
else if (ch == quote) break;
|
if (octal_len > 0) ch = String.fromCharCode(parseInt(ch, 8));
|
||||||
ret += ch;
|
else ch = read_escaped_char(true);
|
||||||
}
|
}
|
||||||
return token("string", ret);
|
else if (ch == quote) break;
|
||||||
});
|
ret += ch;
|
||||||
};
|
}
|
||||||
|
return token("string", ret);
|
||||||
|
});
|
||||||
|
|
||||||
function read_line_comment() {
|
function read_line_comment() {
|
||||||
next();
|
next();
|
||||||
@@ -465,25 +387,20 @@ function tokenizer($TEXT, filename) {
|
|||||||
return token("comment1", ret, true);
|
return token("comment1", ret, true);
|
||||||
};
|
};
|
||||||
|
|
||||||
function read_multiline_comment() {
|
var read_multiline_comment = with_eof_error("Unterminated multiline comment", function(){
|
||||||
next();
|
next();
|
||||||
return with_eof_error("Unterminated multiline comment", function(){
|
var i = find("*/", true);
|
||||||
var i = find("*/", true),
|
var text = S.text.substring(S.pos, i);
|
||||||
text = S.text.substring(S.pos, i);
|
var a = text.split("\n"), n = a.length;
|
||||||
S.pos = i + 2;
|
// update stream position
|
||||||
S.line += text.split("\n").length - 1;
|
S.pos = i + 2;
|
||||||
S.newline_before = S.newline_before || text.indexOf("\n") >= 0;
|
S.line += n - 1;
|
||||||
|
if (n > 1) S.col = a[n - 1].length;
|
||||||
// https://github.com/mishoo/UglifyJS/issues/#issue/100
|
else S.col += a[n - 1].length;
|
||||||
if (/^@cc_on/i.test(text)) {
|
S.col += 2;
|
||||||
warn("WARNING: at line " + S.line);
|
S.newline_before = S.newline_before || text.indexOf("\n") >= 0;
|
||||||
warn("*** Found \"conditional comment\": " + text);
|
return token("comment2", text, true);
|
||||||
warn("*** UglifyJS DISCARDS ALL COMMENTS. This means your code might no longer work properly in Internet Explorer.");
|
});
|
||||||
}
|
|
||||||
|
|
||||||
return token("comment2", text, true);
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
function read_name() {
|
function read_name() {
|
||||||
var backslash = false, name = "", ch, escaped = false, hex;
|
var backslash = false, name = "", ch, escaped = false, hex;
|
||||||
@@ -501,42 +418,40 @@ function tokenizer($TEXT, filename) {
|
|||||||
backslash = false;
|
backslash = false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (HOP(KEYWORDS, name) && escaped) {
|
if (KEYWORDS(name) && escaped) {
|
||||||
hex = name.charCodeAt(0).toString(16).toUpperCase();
|
hex = name.charCodeAt(0).toString(16).toUpperCase();
|
||||||
name = "\\u" + "0000".substr(hex.length) + hex + name.slice(1);
|
name = "\\u" + "0000".substr(hex.length) + hex + name.slice(1);
|
||||||
}
|
}
|
||||||
return name;
|
return name;
|
||||||
};
|
};
|
||||||
|
|
||||||
function read_regexp(regexp) {
|
var read_regexp = with_eof_error("Unterminated regular expression", function(regexp){
|
||||||
return with_eof_error("Unterminated regular expression", function(){
|
var prev_backslash = false, ch, in_class = false;
|
||||||
var prev_backslash = false, ch, in_class = false;
|
while ((ch = next(true))) if (prev_backslash) {
|
||||||
while ((ch = next(true))) if (prev_backslash) {
|
regexp += "\\" + ch;
|
||||||
regexp += "\\" + ch;
|
prev_backslash = false;
|
||||||
prev_backslash = false;
|
} else if (ch == "[") {
|
||||||
} else if (ch == "[") {
|
in_class = true;
|
||||||
in_class = true;
|
regexp += ch;
|
||||||
regexp += ch;
|
} else if (ch == "]" && in_class) {
|
||||||
} else if (ch == "]" && in_class) {
|
in_class = false;
|
||||||
in_class = false;
|
regexp += ch;
|
||||||
regexp += ch;
|
} else if (ch == "/" && !in_class) {
|
||||||
} else if (ch == "/" && !in_class) {
|
break;
|
||||||
break;
|
} else if (ch == "\\") {
|
||||||
} else if (ch == "\\") {
|
prev_backslash = true;
|
||||||
prev_backslash = true;
|
} else {
|
||||||
} else {
|
regexp += ch;
|
||||||
regexp += ch;
|
}
|
||||||
}
|
var mods = read_name();
|
||||||
var mods = read_name();
|
return token("regexp", new RegExp(regexp, mods));
|
||||||
return token("regexp", [ regexp, mods ]);
|
});
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
function read_operator(prefix) {
|
function read_operator(prefix) {
|
||||||
function grow(op) {
|
function grow(op) {
|
||||||
if (!peek()) return op;
|
if (!peek()) return op;
|
||||||
var bigger = op + peek();
|
var bigger = op + peek();
|
||||||
if (HOP(OPERATORS, bigger)) {
|
if (OPERATORS(bigger)) {
|
||||||
next();
|
next();
|
||||||
return grow(bigger);
|
return grow(bigger);
|
||||||
} else {
|
} else {
|
||||||
@@ -564,29 +479,28 @@ function tokenizer($TEXT, filename) {
|
|||||||
|
|
||||||
function handle_dot() {
|
function handle_dot() {
|
||||||
next();
|
next();
|
||||||
return is_digit(peek())
|
return is_digit(peek().charCodeAt(0))
|
||||||
? read_num(".")
|
? read_num(".")
|
||||||
: token("punc", ".");
|
: token("punc", ".");
|
||||||
};
|
};
|
||||||
|
|
||||||
function read_word() {
|
function read_word() {
|
||||||
var word = read_name();
|
var word = read_name();
|
||||||
return HOP(KEYWORDS_ATOM, word)
|
return KEYWORDS_ATOM(word) ? token("atom", word)
|
||||||
? token("atom", word)
|
: !KEYWORDS(word) ? token("name", word)
|
||||||
: !HOP(KEYWORDS, word)
|
: OPERATORS(word) ? token("operator", word)
|
||||||
? token("name", word)
|
|
||||||
: HOP(OPERATORS, word)
|
|
||||||
? token("operator", word)
|
|
||||||
: token("keyword", word);
|
: token("keyword", word);
|
||||||
};
|
};
|
||||||
|
|
||||||
function with_eof_error(eof_error, cont) {
|
function with_eof_error(eof_error, cont) {
|
||||||
try {
|
return function(x) {
|
||||||
return cont();
|
try {
|
||||||
} catch(ex) {
|
return cont(x);
|
||||||
if (ex === EX_EOF) parse_error(eof_error);
|
} catch(ex) {
|
||||||
else throw ex;
|
if (ex === EX_EOF) parse_error(eof_error);
|
||||||
}
|
else throw ex;
|
||||||
|
}
|
||||||
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
function next_token(force_regexp) {
|
function next_token(force_regexp) {
|
||||||
@@ -596,13 +510,16 @@ function tokenizer($TEXT, filename) {
|
|||||||
start_token();
|
start_token();
|
||||||
var ch = peek();
|
var ch = peek();
|
||||||
if (!ch) return token("eof");
|
if (!ch) return token("eof");
|
||||||
if (is_digit(ch)) return read_num();
|
var code = ch.charCodeAt(0);
|
||||||
if (ch == '"' || ch == "'") return read_string();
|
switch (code) {
|
||||||
if (HOP(PUNC_CHARS, ch)) return token("punc", next());
|
case 34: case 39: return read_string();
|
||||||
if (ch == ".") return handle_dot();
|
case 46: return handle_dot();
|
||||||
if (ch == "/") return handle_slash();
|
case 47: return handle_slash();
|
||||||
if (HOP(OPERATOR_CHARS, ch)) return read_operator();
|
}
|
||||||
if (ch == "\\" || is_identifier_start(ch)) return read_word();
|
if (is_digit(code)) return read_num();
|
||||||
|
if (PUNC_CHARS(ch)) return token("punc", next());
|
||||||
|
if (OPERATOR_CHARS(ch)) return read_operator();
|
||||||
|
if (code == 92 || is_identifier_start(code)) return read_word();
|
||||||
parse_error("Unexpected character '" + ch + "'");
|
parse_error("Unexpected character '" + ch + "'");
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -617,7 +534,7 @@ function tokenizer($TEXT, filename) {
|
|||||||
|
|
||||||
/* -----[ Parser (constants) ]----- */
|
/* -----[ Parser (constants) ]----- */
|
||||||
|
|
||||||
var UNARY_PREFIX = array_to_hash([
|
var UNARY_PREFIX = makePredicate([
|
||||||
"typeof",
|
"typeof",
|
||||||
"void",
|
"void",
|
||||||
"delete",
|
"delete",
|
||||||
@@ -629,19 +546,9 @@ var UNARY_PREFIX = array_to_hash([
|
|||||||
"+"
|
"+"
|
||||||
]);
|
]);
|
||||||
|
|
||||||
var UNARY_POSTFIX = array_to_hash([ "--", "++" ]);
|
var UNARY_POSTFIX = makePredicate([ "--", "++" ]);
|
||||||
|
|
||||||
var ASSIGNMENT = (function(a, ret, i){
|
var ASSIGNMENT = makePredicate([ "=", "+=", "-=", "/=", "*=", "%=", ">>=", "<<=", ">>>=", "|=", "^=", "&=" ]);
|
||||||
while (i < a.length) {
|
|
||||||
ret[a[i]] = a[i];
|
|
||||||
i++;
|
|
||||||
}
|
|
||||||
return ret;
|
|
||||||
})(
|
|
||||||
[ "=", "+=", "-=", "/=", "*=", "%=", ">>=", "<<=", ">>>=", "|=", "^=", "&=" ],
|
|
||||||
{},
|
|
||||||
0
|
|
||||||
);
|
|
||||||
|
|
||||||
var PRECEDENCE = (function(a, ret){
|
var PRECEDENCE = (function(a, ret){
|
||||||
for (var i = 0, n = 1; i < a.length; ++i, ++n) {
|
for (var i = 0, n = 1; i < a.length; ++i, ++n) {
|
||||||
@@ -759,7 +666,7 @@ function parse($TEXT, options) {
|
|||||||
|
|
||||||
function parenthesised() {
|
function parenthesised() {
|
||||||
expect("(");
|
expect("(");
|
||||||
var exp = expression();
|
var exp = expression(true);
|
||||||
expect(")");
|
expect(")");
|
||||||
return exp;
|
return exp;
|
||||||
};
|
};
|
||||||
@@ -767,7 +674,7 @@ function parse($TEXT, options) {
|
|||||||
function embed_tokens(parser) {
|
function embed_tokens(parser) {
|
||||||
return function() {
|
return function() {
|
||||||
var start = S.token;
|
var start = S.token;
|
||||||
var expr = parser.apply(this, arguments);
|
var expr = parser();
|
||||||
var end = prev();
|
var end = prev();
|
||||||
expr.start = start;
|
expr.start = start;
|
||||||
expr.end = end;
|
expr.end = end;
|
||||||
@@ -776,6 +683,7 @@ function parse($TEXT, options) {
|
|||||||
};
|
};
|
||||||
|
|
||||||
var statement = embed_tokens(function() {
|
var statement = embed_tokens(function() {
|
||||||
|
var tmp;
|
||||||
if (is("operator", "/") || is("operator", "/=")) {
|
if (is("operator", "/") || is("operator", "/=")) {
|
||||||
S.peeked = null;
|
S.peeked = null;
|
||||||
S.token = S.input(S.token.value.substr(1)); // force regexp
|
S.token = S.input(S.token.value.substr(1)); // force regexp
|
||||||
@@ -817,7 +725,7 @@ function parse($TEXT, options) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
case "keyword":
|
case "keyword":
|
||||||
switch (prog1(S.token.value, next)) {
|
switch (tmp = S.token.value, next(), tmp) {
|
||||||
case "break":
|
case "break":
|
||||||
return break_cont(AST_Break);
|
return break_cont(AST_Break);
|
||||||
|
|
||||||
@@ -831,7 +739,7 @@ function parse($TEXT, options) {
|
|||||||
case "do":
|
case "do":
|
||||||
return new AST_Do({
|
return new AST_Do({
|
||||||
body : in_loop(statement),
|
body : in_loop(statement),
|
||||||
condition : (expect_token("keyword", "while"), prog1(parenthesised, semicolon))
|
condition : (expect_token("keyword", "while"), tmp = parenthesised(), semicolon(), tmp)
|
||||||
});
|
});
|
||||||
|
|
||||||
case "while":
|
case "while":
|
||||||
@@ -857,30 +765,30 @@ function parse($TEXT, options) {
|
|||||||
? (next(), null)
|
? (next(), null)
|
||||||
: can_insert_semicolon()
|
: can_insert_semicolon()
|
||||||
? null
|
? null
|
||||||
: prog1(expression, semicolon) )
|
: (tmp = expression(true), semicolon(), tmp) )
|
||||||
});
|
});
|
||||||
|
|
||||||
case "switch":
|
case "switch":
|
||||||
return new AST_Switch({
|
return new AST_Switch({
|
||||||
expression : parenthesised(),
|
expression : parenthesised(),
|
||||||
body : switch_body_()
|
body : in_loop(switch_body_)
|
||||||
});
|
});
|
||||||
|
|
||||||
case "throw":
|
case "throw":
|
||||||
if (S.token.nlb)
|
if (S.token.nlb)
|
||||||
croak("Illegal newline after 'throw'");
|
croak("Illegal newline after 'throw'");
|
||||||
return new AST_Throw({
|
return new AST_Throw({
|
||||||
value: prog1(expression, semicolon)
|
value: (tmp = expression(true), semicolon(), tmp)
|
||||||
});
|
});
|
||||||
|
|
||||||
case "try":
|
case "try":
|
||||||
return try_();
|
return try_();
|
||||||
|
|
||||||
case "var":
|
case "var":
|
||||||
return prog1(var_, semicolon);
|
return tmp = var_(), semicolon(), tmp;
|
||||||
|
|
||||||
case "const":
|
case "const":
|
||||||
return prog1(const_, semicolon);
|
return tmp = const_(), semicolon(), tmp;
|
||||||
|
|
||||||
case "with":
|
case "with":
|
||||||
return new AST_With({
|
return new AST_With({
|
||||||
@@ -910,8 +818,8 @@ function parse($TEXT, options) {
|
|||||||
return new AST_LabeledStatement({ body: stat, label: label });
|
return new AST_LabeledStatement({ body: stat, label: label });
|
||||||
};
|
};
|
||||||
|
|
||||||
function simple_statement() {
|
function simple_statement(tmp) {
|
||||||
return new AST_SimpleStatement({ body: prog1(expression, semicolon) });
|
return new AST_SimpleStatement({ body: (tmp = expression(true), semicolon(), tmp) });
|
||||||
};
|
};
|
||||||
|
|
||||||
function break_cont(type) {
|
function break_cont(type) {
|
||||||
@@ -948,9 +856,9 @@ function parse($TEXT, options) {
|
|||||||
|
|
||||||
function regular_for(init) {
|
function regular_for(init) {
|
||||||
expect(";");
|
expect(";");
|
||||||
var test = is("punc", ";") ? null : expression();
|
var test = is("punc", ";") ? null : expression(true);
|
||||||
expect(";");
|
expect(";");
|
||||||
var step = is("punc", ")") ? null : expression();
|
var step = is("punc", ")") ? null : expression(true);
|
||||||
expect(")");
|
expect(")");
|
||||||
return new AST_For({
|
return new AST_For({
|
||||||
init : init,
|
init : init,
|
||||||
@@ -962,7 +870,7 @@ function parse($TEXT, options) {
|
|||||||
|
|
||||||
function for_in(init) {
|
function for_in(init) {
|
||||||
var lhs = init instanceof AST_Var ? init.definitions[0].name : null;
|
var lhs = init instanceof AST_Var ? init.definitions[0].name : null;
|
||||||
var obj = expression();
|
var obj = expression(true);
|
||||||
expect(")");
|
expect(")");
|
||||||
return new AST_ForIn({
|
return new AST_ForIn({
|
||||||
init : init,
|
init : init,
|
||||||
@@ -972,14 +880,16 @@ function parse($TEXT, options) {
|
|||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
var function_ = function(in_statement) {
|
var function_ = function(in_statement, ctor) {
|
||||||
var name = is("name") ? as_symbol(in_statement
|
var name = is("name") ? as_symbol(in_statement
|
||||||
? AST_SymbolDefun
|
? AST_SymbolDefun
|
||||||
|
: ctor === AST_Accessor
|
||||||
|
? AST_SymbolAccessor
|
||||||
: AST_SymbolLambda) : null;
|
: AST_SymbolLambda) : null;
|
||||||
if (in_statement && !name)
|
if (in_statement && !name)
|
||||||
unexpected();
|
unexpected();
|
||||||
expect("(");
|
expect("(");
|
||||||
var ctor = in_statement ? AST_Defun : AST_Function;
|
if (!ctor) ctor = in_statement ? AST_Defun : AST_Function;
|
||||||
return new ctor({
|
return new ctor({
|
||||||
name: name,
|
name: name,
|
||||||
argnames: (function(first, a){
|
argnames: (function(first, a){
|
||||||
@@ -990,10 +900,8 @@ function parse($TEXT, options) {
|
|||||||
next();
|
next();
|
||||||
return a;
|
return a;
|
||||||
})(true, []),
|
})(true, []),
|
||||||
body: embed_tokens(function(){
|
body: (function(loop, labels){
|
||||||
++S.in_function;
|
++S.in_function;
|
||||||
var loop = S.in_loop;
|
|
||||||
var labels = S.labels;
|
|
||||||
S.in_directives = true;
|
S.in_directives = true;
|
||||||
S.in_loop = 0;
|
S.in_loop = 0;
|
||||||
S.labels = [];
|
S.labels = [];
|
||||||
@@ -1002,7 +910,7 @@ function parse($TEXT, options) {
|
|||||||
S.in_loop = loop;
|
S.in_loop = loop;
|
||||||
S.labels = labels;
|
S.labels = labels;
|
||||||
return a;
|
return a;
|
||||||
})()
|
})(S.in_loop, S.labels)
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -1030,17 +938,17 @@ function parse($TEXT, options) {
|
|||||||
return a;
|
return a;
|
||||||
};
|
};
|
||||||
|
|
||||||
var switch_body_ = curry(in_loop, function(){
|
function switch_body_() {
|
||||||
expect("{");
|
expect("{");
|
||||||
var a = [], cur = null, branch = null;
|
var a = [], cur = null, branch = null, tmp;
|
||||||
while (!is("punc", "}")) {
|
while (!is("punc", "}")) {
|
||||||
if (is("eof")) unexpected();
|
if (is("eof")) unexpected();
|
||||||
if (is("keyword", "case")) {
|
if (is("keyword", "case")) {
|
||||||
if (branch) branch.end = prev();
|
if (branch) branch.end = prev();
|
||||||
cur = [];
|
cur = [];
|
||||||
branch = new AST_Case({
|
branch = new AST_Case({
|
||||||
start : prog1(S.token, next),
|
start : (tmp = S.token, next(), tmp),
|
||||||
expression : expression(),
|
expression : expression(true),
|
||||||
body : cur
|
body : cur
|
||||||
});
|
});
|
||||||
a.push(branch);
|
a.push(branch);
|
||||||
@@ -1050,9 +958,9 @@ function parse($TEXT, options) {
|
|||||||
if (branch) branch.end = prev();
|
if (branch) branch.end = prev();
|
||||||
cur = [];
|
cur = [];
|
||||||
branch = new AST_Default({
|
branch = new AST_Default({
|
||||||
start : prog1(S.token, next, curry(expect, ":")),
|
start : (tmp = S.token, next(), expect(":"), tmp),
|
||||||
body : cur
|
body : cur
|
||||||
})
|
});
|
||||||
a.push(branch);
|
a.push(branch);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
@@ -1063,7 +971,7 @@ function parse($TEXT, options) {
|
|||||||
if (branch) branch.end = prev();
|
if (branch) branch.end = prev();
|
||||||
next();
|
next();
|
||||||
return a;
|
return a;
|
||||||
});
|
};
|
||||||
|
|
||||||
function try_() {
|
function try_() {
|
||||||
var body = block_(), bcatch = null, bfinally = null;
|
var body = block_(), bcatch = null, bfinally = null;
|
||||||
@@ -1160,7 +1068,7 @@ function parse($TEXT, options) {
|
|||||||
ret = new AST_String({ start: tok, end: tok, value: tok.value });
|
ret = new AST_String({ start: tok, end: tok, value: tok.value });
|
||||||
break;
|
break;
|
||||||
case "regexp":
|
case "regexp":
|
||||||
ret = new AST_RegExp({ start: tok, end: tok, pattern: tok.value[0], mods: tok.value[1] });
|
ret = new AST_RegExp({ start: tok, end: tok, value: tok.value });
|
||||||
break;
|
break;
|
||||||
case "atom":
|
case "atom":
|
||||||
switch (tok.value) {
|
switch (tok.value) {
|
||||||
@@ -1189,7 +1097,7 @@ function parse($TEXT, options) {
|
|||||||
switch (start.value) {
|
switch (start.value) {
|
||||||
case "(":
|
case "(":
|
||||||
next();
|
next();
|
||||||
var ex = expression();
|
var ex = expression(true);
|
||||||
ex.start = start;
|
ex.start = start;
|
||||||
ex.end = S.token;
|
ex.end = S.token;
|
||||||
expect(")");
|
expect(")");
|
||||||
@@ -1208,7 +1116,7 @@ function parse($TEXT, options) {
|
|||||||
func.end = prev();
|
func.end = prev();
|
||||||
return subscripts(func, allow_calls);
|
return subscripts(func, allow_calls);
|
||||||
}
|
}
|
||||||
if (HOP(ATOMIC_START_TOKEN, S.token.type)) {
|
if (ATOMIC_START_TOKEN[S.token.type]) {
|
||||||
return subscripts(as_atom_node(), allow_calls);
|
return subscripts(as_atom_node(), allow_calls);
|
||||||
}
|
}
|
||||||
unexpected();
|
unexpected();
|
||||||
@@ -1252,7 +1160,7 @@ function parse($TEXT, options) {
|
|||||||
a.push(new AST_ObjectGetter({
|
a.push(new AST_ObjectGetter({
|
||||||
start : start,
|
start : start,
|
||||||
key : name,
|
key : name,
|
||||||
value : function_(false),
|
value : function_(false, AST_Accessor),
|
||||||
end : prev()
|
end : prev()
|
||||||
}));
|
}));
|
||||||
continue;
|
continue;
|
||||||
@@ -1261,7 +1169,7 @@ function parse($TEXT, options) {
|
|||||||
a.push(new AST_ObjectSetter({
|
a.push(new AST_ObjectSetter({
|
||||||
start : start,
|
start : start,
|
||||||
key : name,
|
key : name,
|
||||||
value : function_(false),
|
value : function_(false, AST_Accessor),
|
||||||
end : prev()
|
end : prev()
|
||||||
}));
|
}));
|
||||||
continue;
|
continue;
|
||||||
@@ -1280,26 +1188,30 @@ function parse($TEXT, options) {
|
|||||||
});
|
});
|
||||||
|
|
||||||
function as_property_name() {
|
function as_property_name() {
|
||||||
switch (S.token.type) {
|
var tmp = S.token;
|
||||||
|
next();
|
||||||
|
switch (tmp.type) {
|
||||||
case "num":
|
case "num":
|
||||||
case "string":
|
case "string":
|
||||||
case "name":
|
case "name":
|
||||||
case "operator":
|
case "operator":
|
||||||
case "keyword":
|
case "keyword":
|
||||||
case "atom":
|
case "atom":
|
||||||
return prog1(S.token.value, next);
|
return tmp.value;
|
||||||
default:
|
default:
|
||||||
unexpected();
|
unexpected();
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
function as_name() {
|
function as_name() {
|
||||||
switch (S.token.type) {
|
var tmp = S.token;
|
||||||
|
next();
|
||||||
|
switch (tmp.type) {
|
||||||
case "name":
|
case "name":
|
||||||
case "operator":
|
case "operator":
|
||||||
case "keyword":
|
case "keyword":
|
||||||
case "atom":
|
case "atom":
|
||||||
return prog1(S.token.value, next);
|
return tmp.value;
|
||||||
default:
|
default:
|
||||||
unexpected();
|
unexpected();
|
||||||
}
|
}
|
||||||
@@ -1333,7 +1245,7 @@ function parse($TEXT, options) {
|
|||||||
}
|
}
|
||||||
if (is("punc", "[")) {
|
if (is("punc", "[")) {
|
||||||
next();
|
next();
|
||||||
var prop = expression();
|
var prop = expression(true);
|
||||||
expect("]");
|
expect("]");
|
||||||
return subscripts(new AST_Sub({
|
return subscripts(new AST_Sub({
|
||||||
start : start,
|
start : start,
|
||||||
@@ -1356,16 +1268,15 @@ function parse($TEXT, options) {
|
|||||||
|
|
||||||
var maybe_unary = function(allow_calls) {
|
var maybe_unary = function(allow_calls) {
|
||||||
var start = S.token;
|
var start = S.token;
|
||||||
if (is("operator") && HOP(UNARY_PREFIX, S.token.value)) {
|
if (is("operator") && UNARY_PREFIX(start.value)) {
|
||||||
var ex = make_unary(AST_UnaryPrefix,
|
next();
|
||||||
prog1(S.token.value, next),
|
var ex = make_unary(AST_UnaryPrefix, start.value, maybe_unary(allow_calls));
|
||||||
maybe_unary(allow_calls));
|
|
||||||
ex.start = start;
|
ex.start = start;
|
||||||
ex.end = prev();
|
ex.end = prev();
|
||||||
return ex;
|
return ex;
|
||||||
}
|
}
|
||||||
var val = expr_atom(allow_calls);
|
var val = expr_atom(allow_calls);
|
||||||
while (is("operator") && HOP(UNARY_POSTFIX, S.token.value) && !S.token.nlb) {
|
while (is("operator") && UNARY_POSTFIX(S.token.value) && !S.token.nlb) {
|
||||||
val = make_unary(AST_UnaryPostfix, S.token.value, val);
|
val = make_unary(AST_UnaryPostfix, S.token.value, val);
|
||||||
val.start = start;
|
val.start = start;
|
||||||
val.end = S.token;
|
val.end = S.token;
|
||||||
@@ -1436,13 +1347,13 @@ function parse($TEXT, options) {
|
|||||||
var maybe_assign = function(no_in) {
|
var maybe_assign = function(no_in) {
|
||||||
var start = S.token;
|
var start = S.token;
|
||||||
var left = maybe_conditional(no_in), val = S.token.value;
|
var left = maybe_conditional(no_in), val = S.token.value;
|
||||||
if (is("operator") && HOP(ASSIGNMENT, val)) {
|
if (is("operator") && ASSIGNMENT(val)) {
|
||||||
if (is_assignable(left)) {
|
if (is_assignable(left)) {
|
||||||
next();
|
next();
|
||||||
return new AST_Assign({
|
return new AST_Assign({
|
||||||
start : start,
|
start : start,
|
||||||
left : left,
|
left : left,
|
||||||
operator : ASSIGNMENT[val],
|
operator : val,
|
||||||
right : maybe_assign(no_in),
|
right : maybe_assign(no_in),
|
||||||
end : peek()
|
end : peek()
|
||||||
});
|
});
|
||||||
@@ -1453,8 +1364,6 @@ function parse($TEXT, options) {
|
|||||||
};
|
};
|
||||||
|
|
||||||
var expression = function(commas, no_in) {
|
var expression = function(commas, no_in) {
|
||||||
if (arguments.length == 0)
|
|
||||||
commas = true;
|
|
||||||
var start = S.token;
|
var start = S.token;
|
||||||
var expr = maybe_assign(no_in);
|
var expr = maybe_assign(no_in);
|
||||||
if (commas && is("punc", ",")) {
|
if (commas && is("punc", ",")) {
|
||||||
@@ -1493,5 +1402,3 @@ function parse($TEXT, options) {
|
|||||||
})();
|
})();
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
var warn = function() {};
|
|
||||||
|
|||||||
182
lib/scope.js
182
lib/scope.js
@@ -43,7 +43,7 @@
|
|||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
function SymbolDef(scope, orig) {
|
function SymbolDef(scope, index, orig) {
|
||||||
this.name = orig.name;
|
this.name = orig.name;
|
||||||
this.orig = [ orig ];
|
this.orig = [ orig ];
|
||||||
this.scope = scope;
|
this.scope = scope;
|
||||||
@@ -52,15 +52,18 @@ function SymbolDef(scope, orig) {
|
|||||||
this.mangled_name = null;
|
this.mangled_name = null;
|
||||||
this.undeclared = false;
|
this.undeclared = false;
|
||||||
this.constant = false;
|
this.constant = false;
|
||||||
|
this.index = index;
|
||||||
};
|
};
|
||||||
|
|
||||||
SymbolDef.prototype = {
|
SymbolDef.prototype = {
|
||||||
unmangleable: function() {
|
unmangleable: function(options) {
|
||||||
return this.global || this.undeclared || this.scope.uses_eval || this.scope.uses_with;
|
return this.global
|
||||||
|
|| this.undeclared
|
||||||
|
|| (!(options && options.eval) && (this.scope.uses_eval || this.scope.uses_with));
|
||||||
},
|
},
|
||||||
mangle: function() {
|
mangle: function(options) {
|
||||||
if (!this.mangled_name && !this.unmangleable())
|
if (!this.mangled_name && !this.unmangleable(options))
|
||||||
this.mangled_name = this.scope.next_mangled();
|
this.mangled_name = this.scope.next_mangled(options);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -75,14 +78,17 @@ AST_Toplevel.DEFMETHOD("figure_out_scope", function(){
|
|||||||
// pass 1: setup scope chaining and handle definitions
|
// pass 1: setup scope chaining and handle definitions
|
||||||
var self = this;
|
var self = this;
|
||||||
var scope = self.parent_scope = null;
|
var scope = self.parent_scope = null;
|
||||||
var labels = {};
|
var labels = new Dictionary();
|
||||||
|
var nesting = 0;
|
||||||
var tw = new TreeWalker(function(node, descend){
|
var tw = new TreeWalker(function(node, descend){
|
||||||
if (node instanceof AST_Scope) {
|
if (node instanceof AST_Scope) {
|
||||||
node.init_scope_vars();
|
node.init_scope_vars(nesting);
|
||||||
var save_scope = node.parent_scope = scope;
|
var save_scope = node.parent_scope = scope;
|
||||||
|
++nesting;
|
||||||
scope = node;
|
scope = node;
|
||||||
descend();
|
descend();
|
||||||
scope = save_scope;
|
scope = save_scope;
|
||||||
|
--nesting;
|
||||||
return true; // don't descend again in TreeWalker
|
return true; // don't descend again in TreeWalker
|
||||||
}
|
}
|
||||||
if (node instanceof AST_Directive) {
|
if (node instanceof AST_Directive) {
|
||||||
@@ -97,11 +103,11 @@ AST_Toplevel.DEFMETHOD("figure_out_scope", function(){
|
|||||||
}
|
}
|
||||||
if (node instanceof AST_LabeledStatement) {
|
if (node instanceof AST_LabeledStatement) {
|
||||||
var l = node.label;
|
var l = node.label;
|
||||||
if (labels[l.name])
|
if (labels.has(l.name))
|
||||||
throw new Error(string_template("Label {name} defined twice", l));
|
throw new Error(string_template("Label {name} defined twice", l));
|
||||||
labels[l.name] = l;
|
labels.set(l.name, l);
|
||||||
descend();
|
descend();
|
||||||
delete labels[l.name];
|
labels.del(l.name);
|
||||||
return true; // no descend again
|
return true; // no descend again
|
||||||
}
|
}
|
||||||
if (node instanceof AST_SymbolDeclaration) {
|
if (node instanceof AST_SymbolDeclaration) {
|
||||||
@@ -113,29 +119,16 @@ AST_Toplevel.DEFMETHOD("figure_out_scope", function(){
|
|||||||
if (node instanceof AST_Label) {
|
if (node instanceof AST_Label) {
|
||||||
node.thedef = node;
|
node.thedef = node;
|
||||||
node.init_scope_vars();
|
node.init_scope_vars();
|
||||||
var p = tw.parent(); // AST_LabeledStatement
|
|
||||||
var block = p.body;
|
|
||||||
if (block instanceof AST_StatementWithBody)
|
|
||||||
block = block.body;
|
|
||||||
node.label_target = block;
|
|
||||||
}
|
}
|
||||||
if (node instanceof AST_LoopControl) {
|
if (node instanceof AST_SymbolLambda) {
|
||||||
if (!node.label) {
|
//scope.def_function(node);
|
||||||
var a = tw.stack, i = a.length - 1;
|
//
|
||||||
while (--i >= 0) {
|
// https://github.com/mishoo/UglifyJS2/issues/24 — MSIE
|
||||||
var p = a[i];
|
// leaks function expression names into the containing
|
||||||
if (p instanceof AST_For
|
// scope. Don't like this fix but seems we can't do any
|
||||||
|| p instanceof AST_ForIn
|
// better. IE: please die. Please!
|
||||||
|| p instanceof AST_DWLoop
|
(node.scope = scope.parent_scope).def_function(node);
|
||||||
|| p instanceof AST_SwitchBranch) {
|
|
||||||
node.loopcontrol_target = p.body;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else if (node instanceof AST_SymbolLambda) {
|
|
||||||
scope.def_function(node);
|
|
||||||
node.init.push(tw.parent());
|
node.init.push(tw.parent());
|
||||||
}
|
}
|
||||||
else if (node instanceof AST_SymbolDefun) {
|
else if (node instanceof AST_SymbolDefun) {
|
||||||
@@ -164,7 +157,7 @@ AST_Toplevel.DEFMETHOD("figure_out_scope", function(){
|
|||||||
scope.def_variable(node);
|
scope.def_variable(node);
|
||||||
}
|
}
|
||||||
if (node instanceof AST_LabelRef) {
|
if (node instanceof AST_LabelRef) {
|
||||||
var sym = labels[node.name];
|
var sym = labels.get(node.name);
|
||||||
if (!sym) throw new Error(string_template("Undefined label {name} [{line},{col}]", {
|
if (!sym) throw new Error(string_template("Undefined label {name} [{line},{col}]", {
|
||||||
name: node.name,
|
name: node.name,
|
||||||
line: node.start.line,
|
line: node.start.line,
|
||||||
@@ -177,7 +170,7 @@ AST_Toplevel.DEFMETHOD("figure_out_scope", function(){
|
|||||||
|
|
||||||
// pass 2: find back references and eval
|
// pass 2: find back references and eval
|
||||||
var func = null;
|
var func = null;
|
||||||
var globals = self.globals = {};
|
var globals = self.globals = new Dictionary();
|
||||||
var tw = new TreeWalker(function(node, descend){
|
var tw = new TreeWalker(function(node, descend){
|
||||||
if (node instanceof AST_Lambda) {
|
if (node instanceof AST_Lambda) {
|
||||||
var prev_func = func;
|
var prev_func = func;
|
||||||
@@ -195,15 +188,15 @@ AST_Toplevel.DEFMETHOD("figure_out_scope", function(){
|
|||||||
var sym = node.scope.find_variable(name);
|
var sym = node.scope.find_variable(name);
|
||||||
if (!sym) {
|
if (!sym) {
|
||||||
var g;
|
var g;
|
||||||
if (HOP(globals, name)) {
|
if (globals.has(name)) {
|
||||||
g = globals[name];
|
g = globals.get(name);
|
||||||
} else {
|
} else {
|
||||||
g = new SymbolDef(self, node);
|
g = new SymbolDef(self, globals.size(), node);
|
||||||
g.undeclared = true;
|
g.undeclared = true;
|
||||||
globals[name] = g;
|
globals.set(name, g);
|
||||||
}
|
}
|
||||||
node.thedef = g;
|
node.thedef = g;
|
||||||
if (name == "eval") {
|
if (name == "eval" && tw.parent() instanceof AST_Call) {
|
||||||
for (var s = node.scope; s && !s.uses_eval; s = s.parent_scope)
|
for (var s = node.scope; s && !s.uses_eval; s = s.parent_scope)
|
||||||
s.uses_eval = true;
|
s.uses_eval = true;
|
||||||
}
|
}
|
||||||
@@ -220,15 +213,16 @@ AST_Toplevel.DEFMETHOD("figure_out_scope", function(){
|
|||||||
self.walk(tw);
|
self.walk(tw);
|
||||||
});
|
});
|
||||||
|
|
||||||
AST_Scope.DEFMETHOD("init_scope_vars", function(){
|
AST_Scope.DEFMETHOD("init_scope_vars", function(nesting){
|
||||||
this.directives = []; // contains the directives defined in this scope, i.e. "use strict"
|
this.directives = []; // contains the directives defined in this scope, i.e. "use strict"
|
||||||
this.variables = {}; // map name to AST_SymbolVar (variables defined in this scope; includes functions)
|
this.variables = new Dictionary(); // map name to AST_SymbolVar (variables defined in this scope; includes functions)
|
||||||
this.functions = {}; // map name to AST_SymbolDefun (functions defined in this scope)
|
this.functions = new Dictionary(); // map name to AST_SymbolDefun (functions defined in this scope)
|
||||||
this.uses_with = false; // will be set to true if this or some nested scope uses the `with` statement
|
this.uses_with = false; // will be set to true if this or some nested scope uses the `with` statement
|
||||||
this.uses_eval = false; // will be set to true if this or nested scope uses the global `eval`
|
this.uses_eval = false; // will be set to true if this or nested scope uses the global `eval`
|
||||||
this.parent_scope = null; // the parent scope
|
this.parent_scope = null; // the parent scope
|
||||||
this.enclosed = []; // a list of variables from this or outer scope(s) that are referenced from this or inner scopes
|
this.enclosed = []; // a list of variables from this or outer scope(s) that are referenced from this or inner scopes
|
||||||
this.cname = -1; // the current index for mangling functions/variables
|
this.cname = -1; // the current index for mangling functions/variables
|
||||||
|
this.nesting = nesting; // the nesting level of this scope (0 means toplevel)
|
||||||
});
|
});
|
||||||
|
|
||||||
AST_Scope.DEFMETHOD("strict", function(){
|
AST_Scope.DEFMETHOD("strict", function(){
|
||||||
@@ -236,7 +230,7 @@ AST_Scope.DEFMETHOD("strict", function(){
|
|||||||
});
|
});
|
||||||
|
|
||||||
AST_Lambda.DEFMETHOD("init_scope_vars", function(){
|
AST_Lambda.DEFMETHOD("init_scope_vars", function(){
|
||||||
AST_Scope.prototype.init_scope_vars.call(this);
|
AST_Scope.prototype.init_scope_vars.apply(this, arguments);
|
||||||
this.uses_arguments = false;
|
this.uses_arguments = false;
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -246,8 +240,10 @@ AST_SymbolRef.DEFMETHOD("reference", function() {
|
|||||||
var s = this.scope;
|
var s = this.scope;
|
||||||
while (s) {
|
while (s) {
|
||||||
push_uniq(s.enclosed, def);
|
push_uniq(s.enclosed, def);
|
||||||
|
if (s === def.scope) break;
|
||||||
s = s.parent_scope;
|
s = s.parent_scope;
|
||||||
}
|
}
|
||||||
|
this.frame = this.scope.nesting - def.scope.nesting;
|
||||||
});
|
});
|
||||||
|
|
||||||
AST_SymbolDeclaration.DEFMETHOD("init_scope_vars", function(){
|
AST_SymbolDeclaration.DEFMETHOD("init_scope_vars", function(){
|
||||||
@@ -264,9 +260,8 @@ AST_LabelRef.DEFMETHOD("reference", function(){
|
|||||||
|
|
||||||
AST_Scope.DEFMETHOD("find_variable", function(name){
|
AST_Scope.DEFMETHOD("find_variable", function(name){
|
||||||
if (name instanceof AST_Symbol) name = name.name;
|
if (name instanceof AST_Symbol) name = name.name;
|
||||||
return HOP(this.variables, name)
|
return this.variables.get(name)
|
||||||
? this.variables[name]
|
|| (this.parent_scope && this.parent_scope.find_variable(name));
|
||||||
: (this.parent_scope && this.parent_scope.find_variable(name));
|
|
||||||
});
|
});
|
||||||
|
|
||||||
AST_Scope.DEFMETHOD("has_directive", function(value){
|
AST_Scope.DEFMETHOD("has_directive", function(value){
|
||||||
@@ -275,23 +270,23 @@ AST_Scope.DEFMETHOD("has_directive", function(value){
|
|||||||
});
|
});
|
||||||
|
|
||||||
AST_Scope.DEFMETHOD("def_function", function(symbol){
|
AST_Scope.DEFMETHOD("def_function", function(symbol){
|
||||||
this.functions[symbol.name] = this.def_variable(symbol);
|
this.functions.set(symbol.name, this.def_variable(symbol));
|
||||||
});
|
});
|
||||||
|
|
||||||
AST_Scope.DEFMETHOD("def_variable", function(symbol){
|
AST_Scope.DEFMETHOD("def_variable", function(symbol){
|
||||||
var def;
|
var def;
|
||||||
if (!HOP(this.variables, symbol.name)) {
|
if (!this.variables.has(symbol.name)) {
|
||||||
def = new SymbolDef(this, symbol);
|
def = new SymbolDef(this, this.variables.size(), symbol);
|
||||||
this.variables[symbol.name] = def;
|
this.variables.set(symbol.name, def);
|
||||||
def.global = !this.parent_scope;
|
def.global = !this.parent_scope;
|
||||||
} else {
|
} else {
|
||||||
def = this.variables[symbol.name];
|
def = this.variables.get(symbol.name);
|
||||||
def.orig.push(symbol);
|
def.orig.push(symbol);
|
||||||
}
|
}
|
||||||
return symbol.thedef = def;
|
return symbol.thedef = def;
|
||||||
});
|
});
|
||||||
|
|
||||||
AST_Scope.DEFMETHOD("next_mangled", function(){
|
AST_Scope.DEFMETHOD("next_mangled", function(options){
|
||||||
var ext = this.enclosed, n = ext.length;
|
var ext = this.enclosed, n = ext.length;
|
||||||
out: while (true) {
|
out: while (true) {
|
||||||
var m = base54(++this.cname);
|
var m = base54(++this.cname);
|
||||||
@@ -301,7 +296,7 @@ AST_Scope.DEFMETHOD("next_mangled", function(){
|
|||||||
// inner scopes.
|
// inner scopes.
|
||||||
for (var i = n; --i >= 0;) {
|
for (var i = n; --i >= 0;) {
|
||||||
var sym = ext[i];
|
var sym = ext[i];
|
||||||
var name = sym.mangled_name || (sym.unmangleable() && sym.name);
|
var name = sym.mangled_name || (sym.unmangleable(options) && sym.name);
|
||||||
if (m == name) continue out;
|
if (m == name) continue out;
|
||||||
}
|
}
|
||||||
return m;
|
return m;
|
||||||
@@ -313,8 +308,13 @@ AST_Scope.DEFMETHOD("references", function(sym){
|
|||||||
return this.enclosed.indexOf(sym) < 0 ? null : sym;
|
return this.enclosed.indexOf(sym) < 0 ? null : sym;
|
||||||
});
|
});
|
||||||
|
|
||||||
AST_Symbol.DEFMETHOD("unmangleable", function(){
|
AST_Symbol.DEFMETHOD("unmangleable", function(options){
|
||||||
return this.definition().unmangleable();
|
return this.definition().unmangleable(options);
|
||||||
|
});
|
||||||
|
|
||||||
|
// property accessors are not mangleable
|
||||||
|
AST_SymbolAccessor.DEFMETHOD("unmangleable", function(){
|
||||||
|
return true;
|
||||||
});
|
});
|
||||||
|
|
||||||
// labels are always mangleable
|
// labels are always mangleable
|
||||||
@@ -347,16 +347,15 @@ AST_Symbol.DEFMETHOD("global", function(){
|
|||||||
return this.definition().global;
|
return this.definition().global;
|
||||||
});
|
});
|
||||||
|
|
||||||
AST_LoopControl.DEFMETHOD("target", function(){
|
AST_Toplevel.DEFMETHOD("_default_mangler_options", function(options){
|
||||||
if (this.label) return this.label.definition().label_target;
|
return defaults(options, {
|
||||||
return this.loopcontrol_target;
|
except : [],
|
||||||
|
eval : false,
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
AST_Toplevel.DEFMETHOD("mangle_names", function(options){
|
AST_Toplevel.DEFMETHOD("mangle_names", function(options){
|
||||||
options = defaults(options, {
|
options = this._default_mangler_options(options);
|
||||||
sort : false,
|
|
||||||
except : []
|
|
||||||
});
|
|
||||||
// We only need to mangle declaration nodes. Special logic wired
|
// We only need to mangle declaration nodes. Special logic wired
|
||||||
// into the code generator will display the mangled name if it's
|
// into the code generator will display the mangled name if it's
|
||||||
// present (and for AST_SymbolRef-s it'll use the mangled name of
|
// present (and for AST_SymbolRef-s it'll use the mangled name of
|
||||||
@@ -373,16 +372,11 @@ AST_Toplevel.DEFMETHOD("mangle_names", function(options){
|
|||||||
}
|
}
|
||||||
if (node instanceof AST_Scope) {
|
if (node instanceof AST_Scope) {
|
||||||
var p = tw.parent();
|
var p = tw.parent();
|
||||||
var is_setget = p instanceof AST_ObjectSetter || p instanceof AST_ObjectGetter;
|
node.variables.each(function(symbol){
|
||||||
var a = node.variables;
|
if (options.except.indexOf(symbol.name) < 0) {
|
||||||
for (var i in a) if (HOP(a, i)) {
|
to_mangle.push(symbol);
|
||||||
var symbol = a[i];
|
|
||||||
if (!(is_setget && symbol instanceof AST_SymbolLambda)) {
|
|
||||||
if (options.except.indexOf(symbol.name) < 0) {
|
|
||||||
to_mangle.push(symbol);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
});
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (node instanceof AST_Label) {
|
if (node instanceof AST_Label) {
|
||||||
@@ -393,15 +387,11 @@ AST_Toplevel.DEFMETHOD("mangle_names", function(options){
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
this.walk(tw);
|
this.walk(tw);
|
||||||
|
|
||||||
if (options.sort) to_mangle = mergeSort(to_mangle, function(a, b){
|
|
||||||
return b.references.length - a.references.length;
|
|
||||||
});
|
|
||||||
|
|
||||||
to_mangle.forEach(function(def){ def.mangle(options) });
|
to_mangle.forEach(function(def){ def.mangle(options) });
|
||||||
});
|
});
|
||||||
|
|
||||||
AST_Toplevel.DEFMETHOD("compute_char_frequency", function(){
|
AST_Toplevel.DEFMETHOD("compute_char_frequency", function(options){
|
||||||
|
options = this._default_mangler_options(options);
|
||||||
var tw = new TreeWalker(function(node){
|
var tw = new TreeWalker(function(node){
|
||||||
if (node instanceof AST_Constant)
|
if (node instanceof AST_Constant)
|
||||||
base54.consider(node.print_to_string());
|
base54.consider(node.print_to_string());
|
||||||
@@ -459,7 +449,7 @@ AST_Toplevel.DEFMETHOD("compute_char_frequency", function(){
|
|||||||
base54.consider("catch");
|
base54.consider("catch");
|
||||||
else if (node instanceof AST_Finally)
|
else if (node instanceof AST_Finally)
|
||||||
base54.consider("finally");
|
base54.consider("finally");
|
||||||
else if (node instanceof AST_Symbol && node.unmangleable())
|
else if (node instanceof AST_Symbol && node.unmangleable(options))
|
||||||
base54.consider(node.name);
|
base54.consider(node.name);
|
||||||
else if (node instanceof AST_Unary || node instanceof AST_Binary)
|
else if (node instanceof AST_Unary || node instanceof AST_Binary)
|
||||||
base54.consider(node.operator);
|
base54.consider(node.operator);
|
||||||
@@ -467,21 +457,21 @@ AST_Toplevel.DEFMETHOD("compute_char_frequency", function(){
|
|||||||
base54.consider(node.property);
|
base54.consider(node.property);
|
||||||
});
|
});
|
||||||
this.walk(tw);
|
this.walk(tw);
|
||||||
|
base54.sort();
|
||||||
});
|
});
|
||||||
|
|
||||||
var base54 = (function() {
|
var base54 = (function() {
|
||||||
var string = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ$_0123456789";
|
var string = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ$_0123456789";
|
||||||
var chars, frequency;
|
var chars, frequency;
|
||||||
function reset() {
|
function reset() {
|
||||||
frequency = {};
|
frequency = Object.create(null);
|
||||||
chars = string.split("");
|
chars = string.split("").map(function(ch){ return ch.charCodeAt(0) });
|
||||||
chars.map(function(ch){ frequency[ch] = 0 });
|
chars.forEach(function(ch){ frequency[ch] = 0 });
|
||||||
}
|
}
|
||||||
base54.consider = function(str){
|
base54.consider = function(str){
|
||||||
for (var i = str.length; --i >= 0;) {
|
for (var i = str.length; --i >= 0;) {
|
||||||
var ch = str.charAt(i);
|
var code = str.charCodeAt(i);
|
||||||
if (string.indexOf(ch) >= 0)
|
if (code in frequency) ++frequency[code];
|
||||||
++frequency[ch];
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
base54.sort = function() {
|
base54.sort = function() {
|
||||||
@@ -498,7 +488,7 @@ var base54 = (function() {
|
|||||||
function base54(num) {
|
function base54(num) {
|
||||||
var ret = "", base = 54;
|
var ret = "", base = 54;
|
||||||
do {
|
do {
|
||||||
ret += chars[num % base];
|
ret += String.fromCharCode(chars[num % base]);
|
||||||
num = Math.floor(num / base);
|
num = Math.floor(num / base);
|
||||||
base = 64;
|
base = 64;
|
||||||
} while (num > 0);
|
} while (num > 0);
|
||||||
@@ -524,8 +514,9 @@ AST_Toplevel.DEFMETHOD("scope_warnings", function(options){
|
|||||||
// XXX: this also warns about JS standard names,
|
// XXX: this also warns about JS standard names,
|
||||||
// i.e. Object, Array, parseInt etc. Should add a list of
|
// i.e. Object, Array, parseInt etc. Should add a list of
|
||||||
// exceptions.
|
// exceptions.
|
||||||
AST_Node.warn("Undeclared symbol: {name} [{line},{col}]", {
|
AST_Node.warn("Undeclared symbol: {name} [{file}:{line},{col}]", {
|
||||||
name: node.name,
|
name: node.name,
|
||||||
|
file: node.start.file,
|
||||||
line: node.start.line,
|
line: node.start.line,
|
||||||
col: node.start.col
|
col: node.start.col
|
||||||
});
|
});
|
||||||
@@ -540,9 +531,10 @@ AST_Toplevel.DEFMETHOD("scope_warnings", function(options){
|
|||||||
if (sym
|
if (sym
|
||||||
&& (sym.undeclared()
|
&& (sym.undeclared()
|
||||||
|| (sym.global() && sym.scope !== sym.definition().scope))) {
|
|| (sym.global() && sym.scope !== sym.definition().scope))) {
|
||||||
AST_Node.warn("{msg}: {name} [{line},{col}]", {
|
AST_Node.warn("{msg}: {name} [{file}:{line},{col}]", {
|
||||||
msg: sym.undeclared() ? "Accidental global?" : "Assignment to global",
|
msg: sym.undeclared() ? "Accidental global?" : "Assignment to global",
|
||||||
name: sym.name,
|
name: sym.name,
|
||||||
|
file: sym.start.file,
|
||||||
line: sym.start.line,
|
line: sym.start.line,
|
||||||
col: sym.start.col
|
col: sym.start.col
|
||||||
});
|
});
|
||||||
@@ -552,14 +544,15 @@ AST_Toplevel.DEFMETHOD("scope_warnings", function(options){
|
|||||||
&& node instanceof AST_SymbolRef
|
&& node instanceof AST_SymbolRef
|
||||||
&& node.undeclared()
|
&& node.undeclared()
|
||||||
&& node.name == "eval") {
|
&& node.name == "eval") {
|
||||||
AST_Node.warn("Eval is used [{line},{col}]", node.start);
|
AST_Node.warn("Eval is used [{file}:{line},{col}]", node.start);
|
||||||
}
|
}
|
||||||
if (options.unreferenced
|
if (options.unreferenced
|
||||||
&& node instanceof AST_SymbolDeclaration
|
&& (node instanceof AST_SymbolDeclaration || node instanceof AST_Label)
|
||||||
&& node.unreferenced()) {
|
&& node.unreferenced()) {
|
||||||
AST_Node.warn("{type} {name} is declared but not referenced [{line},{col}]", {
|
AST_Node.warn("{type} {name} is declared but not referenced [{file}:{line},{col}]", {
|
||||||
type: node instanceof AST_Label ? "Label" : "Symbol",
|
type: node instanceof AST_Label ? "Label" : "Symbol",
|
||||||
name: node.name,
|
name: node.name,
|
||||||
|
file: node.start.file,
|
||||||
line: node.start.line,
|
line: node.start.line,
|
||||||
col: node.start.col
|
col: node.start.col
|
||||||
});
|
});
|
||||||
@@ -567,8 +560,9 @@ AST_Toplevel.DEFMETHOD("scope_warnings", function(options){
|
|||||||
if (options.func_arguments
|
if (options.func_arguments
|
||||||
&& node instanceof AST_Lambda
|
&& node instanceof AST_Lambda
|
||||||
&& node.uses_arguments) {
|
&& node.uses_arguments) {
|
||||||
AST_Node.warn("arguments used in function {name} [{line},{col}]", {
|
AST_Node.warn("arguments used in function {name} [{file}:{line},{col}]", {
|
||||||
name: node.name ? node.name.name : "anonymous",
|
name: node.name ? node.name.name : "anonymous",
|
||||||
|
file: node.start.file,
|
||||||
line: node.start.line,
|
line: node.start.line,
|
||||||
col: node.start.col
|
col: node.start.col
|
||||||
});
|
});
|
||||||
@@ -576,8 +570,10 @@ AST_Toplevel.DEFMETHOD("scope_warnings", function(options){
|
|||||||
if (options.nested_defuns
|
if (options.nested_defuns
|
||||||
&& node instanceof AST_Defun
|
&& node instanceof AST_Defun
|
||||||
&& !(tw.parent() instanceof AST_Scope)) {
|
&& !(tw.parent() instanceof AST_Scope)) {
|
||||||
AST_Node.warn("Function {name} declared in nested statement [{line},{col}]", {
|
AST_Node.warn("Function {name} declared in nested statement \"{type}\" [{file}:{line},{col}]", {
|
||||||
name: node.name.name,
|
name: node.name.name,
|
||||||
|
type: tw.parent().TYPE,
|
||||||
|
file: node.start.file,
|
||||||
line: node.start.line,
|
line: node.start.line,
|
||||||
col: node.start.col
|
col: node.start.col
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -59,13 +59,13 @@ TreeTransformer.prototype = new TreeWalker;
|
|||||||
node.DEFMETHOD("transform", function(tw, in_list){
|
node.DEFMETHOD("transform", function(tw, in_list){
|
||||||
var x, y;
|
var x, y;
|
||||||
tw.push(this);
|
tw.push(this);
|
||||||
x = tw.before(this, descend, in_list);
|
if (tw.before) x = tw.before(this, descend, in_list);
|
||||||
if (x === undefined) {
|
if (x === undefined) {
|
||||||
if (!tw.after) {
|
if (!tw.after) {
|
||||||
x = this;
|
x = this;
|
||||||
descend(x, tw);
|
descend(x, tw);
|
||||||
} else {
|
} else {
|
||||||
x = this.clone();
|
tw.stack[tw.stack - 1] = x = this.clone();
|
||||||
descend(x, tw);
|
descend(x, tw);
|
||||||
y = tw.after(x, in_list);
|
y = tw.after(x, in_list);
|
||||||
if (y !== undefined) x = y;
|
if (y !== undefined) x = y;
|
||||||
@@ -93,10 +93,6 @@ TreeTransformer.prototype = new TreeWalker;
|
|||||||
self.body = self.body.transform(tw);
|
self.body = self.body.transform(tw);
|
||||||
});
|
});
|
||||||
|
|
||||||
_(AST_BlockStatement, function(self, tw){
|
|
||||||
self.body = do_list(self.body, tw);
|
|
||||||
});
|
|
||||||
|
|
||||||
_(AST_Block, function(self, tw){
|
_(AST_Block, function(self, tw){
|
||||||
self.body = do_list(self.body, tw);
|
self.body = do_list(self.body, tw);
|
||||||
});
|
});
|
||||||
|
|||||||
106
lib/utils.js
106
lib/utils.js
@@ -43,21 +43,8 @@
|
|||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
function curry(f) {
|
|
||||||
var args = slice(arguments, 1);
|
|
||||||
return function() { return f.apply(this, args.concat(slice(arguments))); };
|
|
||||||
};
|
|
||||||
|
|
||||||
function prog1(ret) {
|
|
||||||
if (ret instanceof Function)
|
|
||||||
ret = ret();
|
|
||||||
for (var i = 1, n = arguments.length; --n > 0; ++i)
|
|
||||||
arguments[i]();
|
|
||||||
return ret;
|
|
||||||
};
|
|
||||||
|
|
||||||
function array_to_hash(a) {
|
function array_to_hash(a) {
|
||||||
var ret = {};
|
var ret = Object.create(null);
|
||||||
for (var i = 0; i < a.length; ++i)
|
for (var i = 0; i < a.length; ++i)
|
||||||
ret[a[i]] = true;
|
ret[a[i]] = true;
|
||||||
return ret;
|
return ret;
|
||||||
@@ -85,10 +72,6 @@ function find_if(func, array) {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
function HOP(obj, prop) {
|
|
||||||
return Object.prototype.hasOwnProperty.call(obj, prop);
|
|
||||||
};
|
|
||||||
|
|
||||||
function repeat_string(str, i) {
|
function repeat_string(str, i) {
|
||||||
if (i <= 0) return "";
|
if (i <= 0) return "";
|
||||||
if (i == 1) return str;
|
if (i == 1) return str;
|
||||||
@@ -107,16 +90,16 @@ function defaults(args, defs, croak) {
|
|||||||
if (args === true)
|
if (args === true)
|
||||||
args = {};
|
args = {};
|
||||||
var ret = args || {};
|
var ret = args || {};
|
||||||
if (croak) for (var i in ret) if (HOP(ret, i) && !HOP(defs, i))
|
if (croak) for (var i in ret) if (ret.hasOwnProperty(i) && !defs.hasOwnProperty(i))
|
||||||
throw new DefaultsError("`" + i + "` is not a supported option", defs);
|
throw new DefaultsError("`" + i + "` is not a supported option", defs);
|
||||||
for (var i in defs) if (HOP(defs, i)) {
|
for (var i in defs) if (defs.hasOwnProperty(i)) {
|
||||||
ret[i] = (args && HOP(args, i)) ? args[i] : defs[i];
|
ret[i] = (args && args.hasOwnProperty(i)) ? args[i] : defs[i];
|
||||||
}
|
}
|
||||||
return ret;
|
return ret;
|
||||||
};
|
};
|
||||||
|
|
||||||
function merge(obj, ext) {
|
function merge(obj, ext) {
|
||||||
for (var i in ext) if (HOP(ext, i)) {
|
for (var i in ext) if (ext.hasOwnProperty(i)) {
|
||||||
obj[i] = ext[i];
|
obj[i] = ext[i];
|
||||||
}
|
}
|
||||||
return obj;
|
return obj;
|
||||||
@@ -158,7 +141,7 @@ var MAP = (function(){
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
for (i in a) if (HOP(a, i)) if (doit()) break;
|
for (i in a) if (a.hasOwnProperty(i)) if (doit()) break;
|
||||||
}
|
}
|
||||||
return top.concat(ret);
|
return top.concat(ret);
|
||||||
};
|
};
|
||||||
@@ -183,6 +166,12 @@ function string_template(text, props) {
|
|||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
|
function remove(array, el) {
|
||||||
|
for (var i = array.length; --i >= 0;) {
|
||||||
|
if (array[i] === el) array.splice(i, 1);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
function mergeSort(array, cmp) {
|
function mergeSort(array, cmp) {
|
||||||
if (array.length < 2) return array.slice();
|
if (array.length < 2) return array.slice();
|
||||||
function merge(a, b) {
|
function merge(a, b) {
|
||||||
@@ -218,3 +207,74 @@ function set_intersection(a, b) {
|
|||||||
return b.indexOf(el) >= 0;
|
return b.indexOf(el) >= 0;
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// this function is taken from Acorn [1], written by Marijn Haverbeke
|
||||||
|
// [1] https://github.com/marijnh/acorn
|
||||||
|
function makePredicate(words) {
|
||||||
|
if (!(words instanceof Array)) words = words.split(" ");
|
||||||
|
var f = "", cats = [];
|
||||||
|
out: for (var i = 0; i < words.length; ++i) {
|
||||||
|
for (var j = 0; j < cats.length; ++j)
|
||||||
|
if (cats[j][0].length == words[i].length) {
|
||||||
|
cats[j].push(words[i]);
|
||||||
|
continue out;
|
||||||
|
}
|
||||||
|
cats.push([words[i]]);
|
||||||
|
}
|
||||||
|
function compareTo(arr) {
|
||||||
|
if (arr.length == 1) return f += "return str === " + JSON.stringify(arr[0]) + ";";
|
||||||
|
f += "switch(str){";
|
||||||
|
for (var i = 0; i < arr.length; ++i) f += "case " + JSON.stringify(arr[i]) + ":";
|
||||||
|
f += "return true}return false;";
|
||||||
|
}
|
||||||
|
// When there are more than three length categories, an outer
|
||||||
|
// switch first dispatches on the lengths, to save on comparisons.
|
||||||
|
if (cats.length > 3) {
|
||||||
|
cats.sort(function(a, b) {return b.length - a.length;});
|
||||||
|
f += "switch(str.length){";
|
||||||
|
for (var i = 0; i < cats.length; ++i) {
|
||||||
|
var cat = cats[i];
|
||||||
|
f += "case " + cat[0].length + ":";
|
||||||
|
compareTo(cat);
|
||||||
|
}
|
||||||
|
f += "}";
|
||||||
|
// Otherwise, simply generate a flat `switch` statement.
|
||||||
|
} else {
|
||||||
|
compareTo(words);
|
||||||
|
}
|
||||||
|
return new Function("str", f);
|
||||||
|
};
|
||||||
|
|
||||||
|
function Dictionary() {
|
||||||
|
this._values = Object.create(null);
|
||||||
|
this._size = 0;
|
||||||
|
};
|
||||||
|
Dictionary.prototype = {
|
||||||
|
set: function(key, val) {
|
||||||
|
if (!this.has(key)) ++this._size;
|
||||||
|
this._values["$" + key] = val;
|
||||||
|
return this;
|
||||||
|
},
|
||||||
|
get: function(key) { return this._values["$" + key] },
|
||||||
|
del: function(key) {
|
||||||
|
if (this.has(key)) {
|
||||||
|
--this._size;
|
||||||
|
delete this._values["$" + key];
|
||||||
|
}
|
||||||
|
return this;
|
||||||
|
},
|
||||||
|
has: function(key) { return ("$" + key) in this._values },
|
||||||
|
each: function(f) {
|
||||||
|
for (var i in this._values)
|
||||||
|
f(this._values[i], i.substr(1));
|
||||||
|
},
|
||||||
|
size: function() {
|
||||||
|
return this._size;
|
||||||
|
},
|
||||||
|
map: function(f) {
|
||||||
|
var ret = [];
|
||||||
|
for (var i in this._values)
|
||||||
|
ret.push(f(this._values[i], i.substr(1)));
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
"description": "JavaScript parser, mangler/compressor and beautifier toolkit",
|
"description": "JavaScript parser, mangler/compressor and beautifier toolkit",
|
||||||
"homepage": "http://lisperator.net/uglifyjs",
|
"homepage": "http://lisperator.net/uglifyjs",
|
||||||
"main": "tools/node.js",
|
"main": "tools/node.js",
|
||||||
"version": "2.0.0",
|
"version": "2.1.11",
|
||||||
"engines": { "node" : ">=0.4.0" },
|
"engines": { "node" : ">=0.4.0" },
|
||||||
"maintainers": [{
|
"maintainers": [{
|
||||||
"name": "Mihai Bazon",
|
"name": "Mihai Bazon",
|
||||||
@@ -15,8 +15,8 @@
|
|||||||
"url": "https://github.com/mishoo/UglifyJS2.git"
|
"url": "https://github.com/mishoo/UglifyJS2.git"
|
||||||
}],
|
}],
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"source-map" : "*",
|
"source-map" : "~0.1.7",
|
||||||
"optimist" : "*"
|
"optimist" : "~0.3.5"
|
||||||
},
|
},
|
||||||
"bin": {
|
"bin": {
|
||||||
"uglifyjs2" : "bin/uglifyjs2"
|
"uglifyjs2" : "bin/uglifyjs2"
|
||||||
|
|||||||
97
test/compress/drop-unused.js
Normal file
97
test/compress/drop-unused.js
Normal file
@@ -0,0 +1,97 @@
|
|||||||
|
unused_funarg_1: {
|
||||||
|
options = { unused: true };
|
||||||
|
input: {
|
||||||
|
function f(a, b, c, d, e) {
|
||||||
|
return a + b;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
expect: {
|
||||||
|
function f(a, b) {
|
||||||
|
return a + b;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
unused_funarg_2: {
|
||||||
|
options = { unused: true };
|
||||||
|
input: {
|
||||||
|
function f(a, b, c, d, e) {
|
||||||
|
return a + c;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
expect: {
|
||||||
|
function f(a, b, c) {
|
||||||
|
return a + c;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
unused_nested_function: {
|
||||||
|
options = { unused: true };
|
||||||
|
input: {
|
||||||
|
function f(x, y) {
|
||||||
|
function g() {
|
||||||
|
something();
|
||||||
|
}
|
||||||
|
return x + y;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
expect: {
|
||||||
|
function f(x, y) {
|
||||||
|
return x + y;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
unused_circular_references_1: {
|
||||||
|
options = { unused: true };
|
||||||
|
input: {
|
||||||
|
function f(x, y) {
|
||||||
|
// circular reference
|
||||||
|
function g() {
|
||||||
|
return h();
|
||||||
|
}
|
||||||
|
function h() {
|
||||||
|
return g();
|
||||||
|
}
|
||||||
|
return x + y;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
expect: {
|
||||||
|
function f(x, y) {
|
||||||
|
return x + y;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
unused_circular_references_2: {
|
||||||
|
options = { unused: true };
|
||||||
|
input: {
|
||||||
|
function f(x, y) {
|
||||||
|
var foo = 1, bar = baz, baz = foo + bar, qwe = moo();
|
||||||
|
return x + y;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
expect: {
|
||||||
|
function f(x, y) {
|
||||||
|
moo(); // keeps side effect
|
||||||
|
return x + y;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
unused_circular_references_3: {
|
||||||
|
options = { unused: true };
|
||||||
|
input: {
|
||||||
|
function f(x, y) {
|
||||||
|
var g = function() { return h() };
|
||||||
|
var h = function() { return g() };
|
||||||
|
return x + y;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
expect: {
|
||||||
|
function f(x, y) {
|
||||||
|
return x + y;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
11
test/compress/issue-12.js
Normal file
11
test/compress/issue-12.js
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
keep_name_of_getter: {
|
||||||
|
options = { unused: true };
|
||||||
|
input: { a = { get foo () {} } }
|
||||||
|
expect: { a = { get foo () {} } }
|
||||||
|
}
|
||||||
|
|
||||||
|
keep_name_of_setter: {
|
||||||
|
options = { unused: true };
|
||||||
|
input: { a = { set foo () {} } }
|
||||||
|
expect: { a = { set foo () {} } }
|
||||||
|
}
|
||||||
17
test/compress/issue-22.js
Normal file
17
test/compress/issue-22.js
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
return_with_no_value_in_if_body: {
|
||||||
|
options = { conditionals: true };
|
||||||
|
input: {
|
||||||
|
function foo(bar) {
|
||||||
|
if (bar) {
|
||||||
|
return;
|
||||||
|
} else {
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
expect: {
|
||||||
|
function foo (bar) {
|
||||||
|
return bar ? void 0 : 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
31
test/compress/issue-44.js
Normal file
31
test/compress/issue-44.js
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
issue_44_valid_ast_1: {
|
||||||
|
options = { unused: true };
|
||||||
|
input: {
|
||||||
|
function a(b) {
|
||||||
|
for (var i = 0, e = b.qoo(); ; i++) {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
expect: {
|
||||||
|
function a(b) {
|
||||||
|
var i = 0;
|
||||||
|
for (b.qoo(); ; i++);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
issue_44_valid_ast_2: {
|
||||||
|
options = { unused: true };
|
||||||
|
input: {
|
||||||
|
function a(b) {
|
||||||
|
if (foo) for (var i = 0, e = b.qoo(); ; i++) {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
expect: {
|
||||||
|
function a(b) {
|
||||||
|
if (foo) {
|
||||||
|
var i = 0;
|
||||||
|
for (b.qoo(); ; i++);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
163
test/compress/labels.js
Normal file
163
test/compress/labels.js
Normal file
@@ -0,0 +1,163 @@
|
|||||||
|
labels_1: {
|
||||||
|
options = { if_return: true, conditionals: true, dead_code: true };
|
||||||
|
input: {
|
||||||
|
out: {
|
||||||
|
if (foo) break out;
|
||||||
|
console.log("bar");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
expect: {
|
||||||
|
foo || console.log("bar");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
labels_2: {
|
||||||
|
options = { if_return: true, conditionals: true, dead_code: true };
|
||||||
|
input: {
|
||||||
|
out: {
|
||||||
|
if (foo) print("stuff");
|
||||||
|
else break out;
|
||||||
|
console.log("here");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
expect: {
|
||||||
|
if (foo) {
|
||||||
|
print("stuff");
|
||||||
|
console.log("here");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
labels_3: {
|
||||||
|
options = { if_return: true, conditionals: true, dead_code: true };
|
||||||
|
input: {
|
||||||
|
for (var i = 0; i < 5; ++i) {
|
||||||
|
if (i < 3) continue;
|
||||||
|
console.log(i);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
expect: {
|
||||||
|
for (var i = 0; i < 5; ++i)
|
||||||
|
i < 3 || console.log(i);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
labels_4: {
|
||||||
|
options = { if_return: true, conditionals: true, dead_code: true };
|
||||||
|
input: {
|
||||||
|
out: for (var i = 0; i < 5; ++i) {
|
||||||
|
if (i < 3) continue out;
|
||||||
|
console.log(i);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
expect: {
|
||||||
|
for (var i = 0; i < 5; ++i)
|
||||||
|
i < 3 || console.log(i);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
labels_5: {
|
||||||
|
options = { if_return: true, conditionals: true, dead_code: true };
|
||||||
|
// should keep the break-s in the following
|
||||||
|
input: {
|
||||||
|
while (foo) {
|
||||||
|
if (bar) break;
|
||||||
|
console.log("foo");
|
||||||
|
}
|
||||||
|
out: while (foo) {
|
||||||
|
if (bar) break out;
|
||||||
|
console.log("foo");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
expect: {
|
||||||
|
while (foo) {
|
||||||
|
if (bar) break;
|
||||||
|
console.log("foo");
|
||||||
|
}
|
||||||
|
out: while (foo) {
|
||||||
|
if (bar) break out;
|
||||||
|
console.log("foo");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
labels_6: {
|
||||||
|
input: {
|
||||||
|
out: break out;
|
||||||
|
};
|
||||||
|
expect: {}
|
||||||
|
}
|
||||||
|
|
||||||
|
labels_7: {
|
||||||
|
options = { if_return: true, conditionals: true, dead_code: true };
|
||||||
|
input: {
|
||||||
|
while (foo) {
|
||||||
|
x();
|
||||||
|
y();
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
expect: {
|
||||||
|
while (foo) {
|
||||||
|
x();
|
||||||
|
y();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
labels_8: {
|
||||||
|
options = { if_return: true, conditionals: true, dead_code: true };
|
||||||
|
input: {
|
||||||
|
while (foo) {
|
||||||
|
x();
|
||||||
|
y();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
expect: {
|
||||||
|
while (foo) {
|
||||||
|
x();
|
||||||
|
y();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
labels_9: {
|
||||||
|
options = { if_return: true, conditionals: true, dead_code: true };
|
||||||
|
input: {
|
||||||
|
out: while (foo) {
|
||||||
|
x();
|
||||||
|
y();
|
||||||
|
continue out;
|
||||||
|
z();
|
||||||
|
k();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
expect: {
|
||||||
|
while (foo) {
|
||||||
|
x();
|
||||||
|
y();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
labels_10: {
|
||||||
|
options = { if_return: true, conditionals: true, dead_code: true };
|
||||||
|
input: {
|
||||||
|
out: while (foo) {
|
||||||
|
x();
|
||||||
|
y();
|
||||||
|
break out;
|
||||||
|
z();
|
||||||
|
k();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
expect: {
|
||||||
|
out: while (foo) {
|
||||||
|
x();
|
||||||
|
y();
|
||||||
|
break out;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
123
test/compress/loops.js
Normal file
123
test/compress/loops.js
Normal file
@@ -0,0 +1,123 @@
|
|||||||
|
while_becomes_for: {
|
||||||
|
options = { loops: true };
|
||||||
|
input: {
|
||||||
|
while (foo()) bar();
|
||||||
|
}
|
||||||
|
expect: {
|
||||||
|
for (; foo(); ) bar();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
drop_if_break_1: {
|
||||||
|
options = { loops: true };
|
||||||
|
input: {
|
||||||
|
for (;;)
|
||||||
|
if (foo()) break;
|
||||||
|
}
|
||||||
|
expect: {
|
||||||
|
for (; !foo(););
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
drop_if_break_2: {
|
||||||
|
options = { loops: true };
|
||||||
|
input: {
|
||||||
|
for (;bar();)
|
||||||
|
if (foo()) break;
|
||||||
|
}
|
||||||
|
expect: {
|
||||||
|
for (; bar() && !foo(););
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
drop_if_break_3: {
|
||||||
|
options = { loops: true };
|
||||||
|
input: {
|
||||||
|
for (;bar();) {
|
||||||
|
if (foo()) break;
|
||||||
|
stuff1();
|
||||||
|
stuff2();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
expect: {
|
||||||
|
for (; bar() && !foo();) {
|
||||||
|
stuff1();
|
||||||
|
stuff2();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
drop_if_break_4: {
|
||||||
|
options = { loops: true, sequences: true };
|
||||||
|
input: {
|
||||||
|
for (;bar();) {
|
||||||
|
x();
|
||||||
|
y();
|
||||||
|
if (foo()) break;
|
||||||
|
z();
|
||||||
|
k();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
expect: {
|
||||||
|
for (; bar() && (x(), y(), !foo());) z(), k();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
drop_if_else_break_1: {
|
||||||
|
options = { loops: true };
|
||||||
|
input: {
|
||||||
|
for (;;) if (foo()) bar(); else break;
|
||||||
|
}
|
||||||
|
expect: {
|
||||||
|
for (; foo(); ) bar();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
drop_if_else_break_2: {
|
||||||
|
options = { loops: true };
|
||||||
|
input: {
|
||||||
|
for (;bar();) {
|
||||||
|
if (foo()) baz();
|
||||||
|
else break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
expect: {
|
||||||
|
for (; bar() && foo();) baz();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
drop_if_else_break_3: {
|
||||||
|
options = { loops: true };
|
||||||
|
input: {
|
||||||
|
for (;bar();) {
|
||||||
|
if (foo()) baz();
|
||||||
|
else break;
|
||||||
|
stuff1();
|
||||||
|
stuff2();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
expect: {
|
||||||
|
for (; bar() && foo();) {
|
||||||
|
baz();
|
||||||
|
stuff1();
|
||||||
|
stuff2();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
drop_if_else_break_4: {
|
||||||
|
options = { loops: true, sequences: true };
|
||||||
|
input: {
|
||||||
|
for (;bar();) {
|
||||||
|
x();
|
||||||
|
y();
|
||||||
|
if (foo()) baz();
|
||||||
|
else break;
|
||||||
|
z();
|
||||||
|
k();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
expect: {
|
||||||
|
for (; bar() && (x(), y(), foo());) baz(), z(), k();
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -87,3 +87,75 @@ make_sequences_4: {
|
|||||||
with (x = 5, obj);
|
with (x = 5, obj);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
lift_sequences_1: {
|
||||||
|
options = { sequences: true };
|
||||||
|
input: {
|
||||||
|
foo = !(x(), y(), bar());
|
||||||
|
}
|
||||||
|
expect: {
|
||||||
|
x(), y(), foo = !bar();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
lift_sequences_2: {
|
||||||
|
options = { sequences: true, evaluate: true };
|
||||||
|
input: {
|
||||||
|
q = 1 + (foo(), bar(), 5) + 7 * (5 / (3 - (a(), (QW=ER), c(), 2))) - (x(), y(), 5);
|
||||||
|
}
|
||||||
|
expect: {
|
||||||
|
foo(), bar(), a(), QW = ER, c(), x(), y(), q = 36
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
lift_sequences_3: {
|
||||||
|
options = { sequences: true, conditionals: true };
|
||||||
|
input: {
|
||||||
|
x = (foo(), bar(), baz()) ? 10 : 20;
|
||||||
|
}
|
||||||
|
expect: {
|
||||||
|
foo(), bar(), x = baz() ? 10 : 20;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
lift_sequences_4: {
|
||||||
|
options = { side_effects: true };
|
||||||
|
input: {
|
||||||
|
x = (foo, bar, baz);
|
||||||
|
}
|
||||||
|
expect: {
|
||||||
|
x = baz;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for_sequences: {
|
||||||
|
options = { sequences: true };
|
||||||
|
input: {
|
||||||
|
// 1
|
||||||
|
foo();
|
||||||
|
bar();
|
||||||
|
for (; false;);
|
||||||
|
// 2
|
||||||
|
foo();
|
||||||
|
bar();
|
||||||
|
for (x = 5; false;);
|
||||||
|
// 3
|
||||||
|
x = (foo in bar);
|
||||||
|
for (; false;);
|
||||||
|
// 4
|
||||||
|
x = (foo in bar);
|
||||||
|
for (y = 5; false;);
|
||||||
|
}
|
||||||
|
expect: {
|
||||||
|
// 1
|
||||||
|
for (foo(), bar(); false;);
|
||||||
|
// 2
|
||||||
|
for (foo(), bar(), x = 5; false;);
|
||||||
|
// 3
|
||||||
|
x = (foo in bar);
|
||||||
|
for (; false;);
|
||||||
|
// 4
|
||||||
|
x = (foo in bar);
|
||||||
|
for (y = 5; false;);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -37,6 +37,12 @@ function find_test_files(dir) {
|
|||||||
var files = fs.readdirSync(dir).filter(function(name){
|
var files = fs.readdirSync(dir).filter(function(name){
|
||||||
return /\.js$/i.test(name);
|
return /\.js$/i.test(name);
|
||||||
});
|
});
|
||||||
|
if (process.argv.length > 2) {
|
||||||
|
var x = process.argv.slice(2);
|
||||||
|
files = files.filter(function(f){
|
||||||
|
return x.indexOf(f) >= 0;
|
||||||
|
});
|
||||||
|
}
|
||||||
return files;
|
return files;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -61,15 +67,19 @@ function run_compress_tests() {
|
|||||||
log_start_file(file);
|
log_start_file(file);
|
||||||
function test_case(test) {
|
function test_case(test) {
|
||||||
log_test(test.name);
|
log_test(test.name);
|
||||||
var cmp = new U.Compressor(test.options || {}, true);
|
var options = U.defaults(test.options, {
|
||||||
|
warnings: false
|
||||||
|
});
|
||||||
|
var cmp = new U.Compressor(options, true);
|
||||||
var expect = make_code(as_toplevel(test.expect), false);
|
var expect = make_code(as_toplevel(test.expect), false);
|
||||||
var input = as_toplevel(test.input);
|
var input = as_toplevel(test.input);
|
||||||
|
var input_code = make_code(test.input);
|
||||||
var output = input.transform(cmp);
|
var output = input.transform(cmp);
|
||||||
output.figure_out_scope();
|
output.figure_out_scope();
|
||||||
output = make_code(output, false);
|
output = make_code(output, false);
|
||||||
if (expect != output) {
|
if (expect != output) {
|
||||||
log("!!! failed\n---INPUT---\n{input}\n---OUTPUT---\n{output}\n---EXPECTED---\n{expected}\n\n", {
|
log("!!! failed\n---INPUT---\n{input}\n---OUTPUT---\n{output}\n---EXPECTED---\n{expected}\n\n", {
|
||||||
input: make_code(test.input),
|
input: input_code,
|
||||||
output: output,
|
output: output,
|
||||||
expected: expect
|
expected: expect
|
||||||
});
|
});
|
||||||
|
|||||||
142
tools/node.js
142
tools/node.js
@@ -1,27 +1,14 @@
|
|||||||
var save_stderr = process.stderr;
|
var path = require("path");
|
||||||
var fs = require("fs");
|
var fs = require("fs");
|
||||||
|
|
||||||
// discard annoying NodeJS warning ("path.existsSync is now called `fs.existsSync`.")
|
|
||||||
var devnull = fs.createWriteStream("/dev/null");
|
|
||||||
process.__defineGetter__("stderr", function(){
|
|
||||||
return devnull;
|
|
||||||
});
|
|
||||||
|
|
||||||
var vm = require("vm");
|
var vm = require("vm");
|
||||||
var sys = require("util");
|
var sys = require("util");
|
||||||
var path = require("path");
|
|
||||||
|
|
||||||
var UglifyJS = vm.createContext({
|
var UglifyJS = vm.createContext({
|
||||||
sys : sys,
|
sys : sys,
|
||||||
console : console,
|
console : console,
|
||||||
|
|
||||||
MOZ_SourceMap : require("source-map")
|
MOZ_SourceMap : require("source-map")
|
||||||
});
|
});
|
||||||
|
|
||||||
process.__defineGetter__("stderr", function(){
|
|
||||||
return save_stderr;
|
|
||||||
});
|
|
||||||
|
|
||||||
function load_global(file) {
|
function load_global(file) {
|
||||||
file = path.resolve(path.dirname(module.filename), file);
|
file = path.resolve(path.dirname(module.filename), file);
|
||||||
try {
|
try {
|
||||||
@@ -35,15 +22,21 @@ function load_global(file) {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
load_global("../lib/utils.js");
|
var FILES = exports.FILES = [
|
||||||
load_global("../lib/ast.js");
|
"../lib/utils.js",
|
||||||
load_global("../lib/parse.js");
|
"../lib/ast.js",
|
||||||
load_global("../lib/transform.js");
|
"../lib/parse.js",
|
||||||
load_global("../lib/scope.js");
|
"../lib/transform.js",
|
||||||
load_global("../lib/output.js");
|
"../lib/scope.js",
|
||||||
load_global("../lib/compress.js");
|
"../lib/output.js",
|
||||||
load_global("../lib/sourcemap.js");
|
"../lib/compress.js",
|
||||||
load_global("../lib/mozilla-ast.js");
|
"../lib/sourcemap.js",
|
||||||
|
"../lib/mozilla-ast.js"
|
||||||
|
].map(function(file){
|
||||||
|
return path.join(path.dirname(fs.realpathSync(__filename)), file);
|
||||||
|
});
|
||||||
|
|
||||||
|
FILES.forEach(load_global);
|
||||||
|
|
||||||
UglifyJS.AST_Node.warn_function = function(txt) {
|
UglifyJS.AST_Node.warn_function = function(txt) {
|
||||||
sys.error("WARN: " + txt);
|
sys.error("WARN: " + txt);
|
||||||
@@ -55,3 +48,106 @@ for (var i in UglifyJS) {
|
|||||||
exports[i] = UglifyJS[i];
|
exports[i] = UglifyJS[i];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
exports.minify = function(files, options) {
|
||||||
|
options = UglifyJS.defaults(options, {
|
||||||
|
outSourceMap : null,
|
||||||
|
sourceRoot : null,
|
||||||
|
inSourceMap : null,
|
||||||
|
fromString : false,
|
||||||
|
warnings : false,
|
||||||
|
});
|
||||||
|
if (typeof files == "string")
|
||||||
|
files = [ files ];
|
||||||
|
|
||||||
|
// 1. parse
|
||||||
|
var toplevel = null;
|
||||||
|
files.forEach(function(file){
|
||||||
|
var code = options.fromString
|
||||||
|
? file
|
||||||
|
: fs.readFileSync(file, "utf8");
|
||||||
|
toplevel = UglifyJS.parse(code, {
|
||||||
|
filename: options.fromString ? "?" : file,
|
||||||
|
toplevel: toplevel
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// 2. compress
|
||||||
|
toplevel.figure_out_scope();
|
||||||
|
var sq = UglifyJS.Compressor({
|
||||||
|
warnings: options.warnings,
|
||||||
|
});
|
||||||
|
toplevel = toplevel.transform(sq);
|
||||||
|
|
||||||
|
// 3. mangle
|
||||||
|
toplevel.figure_out_scope();
|
||||||
|
toplevel.compute_char_frequency();
|
||||||
|
toplevel.mangle_names();
|
||||||
|
|
||||||
|
// 4. output
|
||||||
|
var map = null;
|
||||||
|
var inMap = null;
|
||||||
|
if (options.inSourceMap) {
|
||||||
|
inMap = fs.readFileSync(options.inSourceMap, "utf8");
|
||||||
|
}
|
||||||
|
if (options.outSourceMap) map = UglifyJS.SourceMap({
|
||||||
|
file: options.outSourceMap,
|
||||||
|
orig: inMap,
|
||||||
|
root: options.sourceRoot
|
||||||
|
});
|
||||||
|
var stream = UglifyJS.OutputStream({ source_map: map });
|
||||||
|
toplevel.print(stream);
|
||||||
|
return {
|
||||||
|
code : stream + "",
|
||||||
|
map : map + ""
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
// exports.describe_ast = function() {
|
||||||
|
// function doitem(ctor) {
|
||||||
|
// var sub = {};
|
||||||
|
// ctor.SUBCLASSES.forEach(function(ctor){
|
||||||
|
// sub[ctor.TYPE] = doitem(ctor);
|
||||||
|
// });
|
||||||
|
// var ret = {};
|
||||||
|
// if (ctor.SELF_PROPS.length > 0) ret.props = ctor.SELF_PROPS;
|
||||||
|
// if (ctor.SUBCLASSES.length > 0) ret.sub = sub;
|
||||||
|
// return ret;
|
||||||
|
// }
|
||||||
|
// return doitem(UglifyJS.AST_Node).sub;
|
||||||
|
// }
|
||||||
|
|
||||||
|
exports.describe_ast = function() {
|
||||||
|
var out = UglifyJS.OutputStream({ beautify: true });
|
||||||
|
function doitem(ctor) {
|
||||||
|
out.print("AST_" + ctor.TYPE);
|
||||||
|
var props = ctor.SELF_PROPS.filter(function(prop){
|
||||||
|
return !/^\$/.test(prop);
|
||||||
|
});
|
||||||
|
if (props.length > 0) {
|
||||||
|
out.space();
|
||||||
|
out.with_parens(function(){
|
||||||
|
props.forEach(function(prop, i){
|
||||||
|
if (i) out.space();
|
||||||
|
out.print(prop);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
if (ctor.documentation) {
|
||||||
|
out.space();
|
||||||
|
out.print_string(ctor.documentation);
|
||||||
|
}
|
||||||
|
if (ctor.SUBCLASSES.length > 0) {
|
||||||
|
out.space();
|
||||||
|
out.with_block(function(){
|
||||||
|
ctor.SUBCLASSES.forEach(function(ctor, i){
|
||||||
|
out.indent();
|
||||||
|
doitem(ctor);
|
||||||
|
out.newline();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
doitem(UglifyJS.AST_Node);
|
||||||
|
return out + "";
|
||||||
|
};
|
||||||
|
|||||||
Reference in New Issue
Block a user