better support for multiple input files:
- use a single AST_Toplevel node for all files - keep original source filename in the tokens
This commit is contained in:
@@ -79,6 +79,7 @@ if (files.filter(function(el){ return el == "-" }).length > 1) {
|
|||||||
|
|
||||||
var STATS = {};
|
var STATS = {};
|
||||||
var OUTPUT_FILE = ARGS.o;
|
var OUTPUT_FILE = ARGS.o;
|
||||||
|
var TOPLEVEL = null;
|
||||||
|
|
||||||
var SOURCE_MAP = ARGS.source_map ? UglifyJS.SourceMap({
|
var SOURCE_MAP = ARGS.source_map ? UglifyJS.SourceMap({
|
||||||
file: OUTPUT_FILE,
|
file: OUTPUT_FILE,
|
||||||
@@ -90,15 +91,48 @@ var output = UglifyJS.OutputStream({
|
|||||||
source_map: SOURCE_MAP
|
source_map: SOURCE_MAP
|
||||||
});
|
});
|
||||||
|
|
||||||
files = files.map(do_file_1);
|
files.forEach(function(file) {
|
||||||
files = files.map(do_file_2);
|
if (ARGS.v) {
|
||||||
UglifyJS.base54.sort();
|
sys.error("Parsing " + file);
|
||||||
files.forEach(do_file_3);
|
}
|
||||||
if (ARGS.v) {
|
var code = read_whole_file(file);
|
||||||
sys.error("BASE54 digits: " + UglifyJS.base54.get());
|
if (ARGS.p != null) {
|
||||||
//sys.error("Frequency: " + sys.inspect(UglifyJS.base54.freq()));
|
file = file.replace(/^\/+/, "").split(/\/+/).slice(ARGS.p).join("/");
|
||||||
|
}
|
||||||
|
time_it("parse", function(){
|
||||||
|
TOPLEVEL = UglifyJS.parse(code, {
|
||||||
|
filename: file,
|
||||||
|
toplevel: TOPLEVEL
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
time_it("scope", function(){
|
||||||
|
TOPLEVEL.figure_out_scope();
|
||||||
|
});
|
||||||
|
|
||||||
|
if (ARGS.c !== true) {
|
||||||
|
time_it("squeeze", function(){
|
||||||
|
var compressor = UglifyJS.Compressor(COMPRESSOR_OPTIONS);
|
||||||
|
TOPLEVEL = TOPLEVEL.squeeze(compressor);
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
time_it("scope", function(){
|
||||||
|
TOPLEVEL.figure_out_scope();
|
||||||
|
if (!ARGS.m) {
|
||||||
|
TOPLEVEL.compute_char_frequency();
|
||||||
|
UglifyJS.base54.sort();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!ARGS.m) time_it("mangle", function(){
|
||||||
|
TOPLEVEL.mangle_names();
|
||||||
|
});
|
||||||
|
time_it("generate", function(){
|
||||||
|
TOPLEVEL.print(output);
|
||||||
|
});
|
||||||
|
|
||||||
output = output.get();
|
output = output.get();
|
||||||
|
|
||||||
if (SOURCE_MAP) {
|
if (SOURCE_MAP) {
|
||||||
@@ -127,57 +161,6 @@ if (ARGS.stats) {
|
|||||||
|
|
||||||
/* -----[ functions ]----- */
|
/* -----[ functions ]----- */
|
||||||
|
|
||||||
function do_file_1(file) {
|
|
||||||
if (ARGS.v) {
|
|
||||||
sys.error("Compressing " + file);
|
|
||||||
}
|
|
||||||
var code = read_whole_file(file);
|
|
||||||
var ast;
|
|
||||||
time_it("parse", function(){
|
|
||||||
ast = UglifyJS.parse(code);
|
|
||||||
});
|
|
||||||
time_it("scope", function(){
|
|
||||||
ast.figure_out_scope();
|
|
||||||
});
|
|
||||||
if (ARGS.c !== true) {
|
|
||||||
time_it("squeeze", function(){
|
|
||||||
var compressor = UglifyJS.Compressor(COMPRESSOR_OPTIONS);
|
|
||||||
ast = ast.squeeze(compressor);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
ast.filename = file;
|
|
||||||
return ast;
|
|
||||||
}
|
|
||||||
|
|
||||||
function do_file_2(ast) {
|
|
||||||
time_it("scope", function(){
|
|
||||||
ast.figure_out_scope();
|
|
||||||
if (!ARGS.m) {
|
|
||||||
ast.compute_char_frequency();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
return ast;
|
|
||||||
}
|
|
||||||
|
|
||||||
function do_file_3(ast) {
|
|
||||||
var file = ast.filename;
|
|
||||||
// if (ARGS.v) {
|
|
||||||
// sys.error("Mangling/generating " + file);
|
|
||||||
// }
|
|
||||||
if (!ARGS.m) time_it("mangle", function(){
|
|
||||||
ast.mangle_names();
|
|
||||||
});
|
|
||||||
time_it("generate", function(){
|
|
||||||
if (SOURCE_MAP) {
|
|
||||||
if (ARGS.p != null) {
|
|
||||||
file = file.replace(/^\/+/, "").split(/\/+/).slice(ARGS.p).join("/");
|
|
||||||
}
|
|
||||||
SOURCE_MAP.set_source(file);
|
|
||||||
}
|
|
||||||
ast.print(output);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function read_whole_file(filename) {
|
function read_whole_file(filename) {
|
||||||
if (filename == "-") {
|
if (filename == "-") {
|
||||||
// XXX: this sucks. How does one read the whole STDIN
|
// XXX: this sucks. How does one read the whole STDIN
|
||||||
|
|||||||
10
lib/ast.js
10
lib/ast.js
@@ -86,7 +86,7 @@ function DEFNODE(type, props, methods, base) {
|
|||||||
return ctor;
|
return ctor;
|
||||||
};
|
};
|
||||||
|
|
||||||
var AST_Token = DEFNODE("Token", "type value line col pos endpos nlb comments_before", {
|
var AST_Token = DEFNODE("Token", "type value line col pos endpos nlb comments_before file", {
|
||||||
}, null);
|
}, null);
|
||||||
|
|
||||||
var AST_Node = DEFNODE("Node", "start end", {
|
var AST_Node = DEFNODE("Node", "start end", {
|
||||||
@@ -146,10 +146,12 @@ var AST_BlockStatement = DEFNODE("BlockStatement", null, {
|
|||||||
}, AST_Statement);
|
}, AST_Statement);
|
||||||
|
|
||||||
function walk_body(node, visitor) {
|
function walk_body(node, visitor) {
|
||||||
if (node.body instanceof Array) node.body.forEach(function(stat){
|
if (node.body instanceof AST_Statement) {
|
||||||
|
node.body._walk(visitor);
|
||||||
|
}
|
||||||
|
else node.body.forEach(function(stat){
|
||||||
stat._walk(visitor);
|
stat._walk(visitor);
|
||||||
});
|
});
|
||||||
else if (node.body instanceof AST_Statement) node.body._walk(visitor);
|
|
||||||
};
|
};
|
||||||
|
|
||||||
var AST_Block = DEFNODE("Block", null, {
|
var AST_Block = DEFNODE("Block", null, {
|
||||||
@@ -239,7 +241,7 @@ var AST_Scope = DEFNODE("Scope", "directives variables functions uses_with uses_
|
|||||||
$documentation: "Base class for all statements introducing a lexical scope",
|
$documentation: "Base class for all statements introducing a lexical scope",
|
||||||
}, AST_Block);
|
}, AST_Block);
|
||||||
|
|
||||||
var AST_Toplevel = DEFNODE("Toplevel", null, {
|
var AST_Toplevel = DEFNODE("Toplevel", "globals", {
|
||||||
$documentation: "The toplevel scope"
|
$documentation: "The toplevel scope"
|
||||||
}, AST_Scope);
|
}, AST_Scope);
|
||||||
|
|
||||||
|
|||||||
@@ -54,7 +54,8 @@ function OutputStream(options) {
|
|||||||
max_line_len : 32000,
|
max_line_len : 32000,
|
||||||
ie_proof : true,
|
ie_proof : true,
|
||||||
beautify : true,
|
beautify : true,
|
||||||
source_map : null
|
source_map : null,
|
||||||
|
in_source_map : null
|
||||||
});
|
});
|
||||||
|
|
||||||
var indentation = 0;
|
var indentation = 0;
|
||||||
@@ -245,6 +246,7 @@ function OutputStream(options) {
|
|||||||
|
|
||||||
var add_mapping = options.source_map ? function(token, name) {
|
var add_mapping = options.source_map ? function(token, name) {
|
||||||
options.source_map.add(
|
options.source_map.add(
|
||||||
|
token.file,
|
||||||
current_line, current_col,
|
current_line, current_col,
|
||||||
token.line, token.col,
|
token.line, token.col,
|
||||||
(!name && token.type == "name") ? token.value : name
|
(!name && token.type == "name") ? token.value : name
|
||||||
|
|||||||
47
lib/parse.js
47
lib/parse.js
@@ -266,7 +266,7 @@ function is_token(token, type, val) {
|
|||||||
|
|
||||||
var EX_EOF = {};
|
var EX_EOF = {};
|
||||||
|
|
||||||
function tokenizer($TEXT) {
|
function tokenizer($TEXT, filename) {
|
||||||
|
|
||||||
var S = {
|
var S = {
|
||||||
text : $TEXT.replace(/\r\n?|[\n\u2028\u2029]/g, "\n").replace(/^\uFEFF/, ''),
|
text : $TEXT.replace(/\r\n?|[\n\u2028\u2029]/g, "\n").replace(/^\uFEFF/, ''),
|
||||||
@@ -324,7 +324,8 @@ function tokenizer($TEXT) {
|
|||||||
col : S.tokcol,
|
col : S.tokcol,
|
||||||
pos : S.tokpos,
|
pos : S.tokpos,
|
||||||
endpos : S.pos,
|
endpos : S.pos,
|
||||||
nlb : S.newline_before
|
nlb : S.newline_before,
|
||||||
|
file : filename
|
||||||
};
|
};
|
||||||
if (!is_comment) {
|
if (!is_comment) {
|
||||||
ret.comments_before = S.comments_before;
|
ret.comments_before = S.comments_before;
|
||||||
@@ -669,10 +670,16 @@ var ATOMIC_START_TOKEN = array_to_hash([ "atom", "num", "string", "regexp", "nam
|
|||||||
|
|
||||||
/* -----[ Parser ]----- */
|
/* -----[ Parser ]----- */
|
||||||
|
|
||||||
function parse($TEXT, exigent_mode) {
|
function parse($TEXT, options) {
|
||||||
|
|
||||||
|
options = defaults(options, {
|
||||||
|
strict : false,
|
||||||
|
filename : null,
|
||||||
|
toplevel : null
|
||||||
|
});
|
||||||
|
|
||||||
var S = {
|
var S = {
|
||||||
input : typeof $TEXT == "string" ? tokenizer($TEXT, true) : $TEXT,
|
input : typeof $TEXT == "string" ? tokenizer($TEXT, options.filename) : $TEXT,
|
||||||
token : null,
|
token : null,
|
||||||
prev : null,
|
prev : null,
|
||||||
peeked : null,
|
peeked : null,
|
||||||
@@ -736,7 +743,7 @@ function parse($TEXT, exigent_mode) {
|
|||||||
function expect(punc) { return expect_token("punc", punc); };
|
function expect(punc) { return expect_token("punc", punc); };
|
||||||
|
|
||||||
function can_insert_semicolon() {
|
function can_insert_semicolon() {
|
||||||
return !exigent_mode && (
|
return !options.strict && (
|
||||||
S.token.nlb || is("eof") || is("punc", "}")
|
S.token.nlb || is("eof") || is("punc", "}")
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
@@ -1226,7 +1233,7 @@ function parse($TEXT, exigent_mode) {
|
|||||||
var array_ = embed_tokens(function() {
|
var array_ = embed_tokens(function() {
|
||||||
expect("[");
|
expect("[");
|
||||||
return new AST_Array({
|
return new AST_Array({
|
||||||
elements: expr_list("]", !exigent_mode, true)
|
elements: expr_list("]", !options.strict, true)
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -1235,7 +1242,7 @@ function parse($TEXT, exigent_mode) {
|
|||||||
var first = true, a = [];
|
var first = true, a = [];
|
||||||
while (!is("punc", "}")) {
|
while (!is("punc", "}")) {
|
||||||
if (first) first = false; else expect(",");
|
if (first) first = false; else expect(",");
|
||||||
if (!exigent_mode && is("punc", "}"))
|
if (!options.strict && is("punc", "}"))
|
||||||
// allow trailing comma
|
// allow trailing comma
|
||||||
break;
|
break;
|
||||||
var start = S.token;
|
var start = S.token;
|
||||||
@@ -1415,7 +1422,7 @@ function parse($TEXT, exigent_mode) {
|
|||||||
};
|
};
|
||||||
|
|
||||||
function is_assignable(expr) {
|
function is_assignable(expr) {
|
||||||
if (!exigent_mode) return true;
|
if (!options.strict) return true;
|
||||||
switch (expr[0]+"") {
|
switch (expr[0]+"") {
|
||||||
case "dot":
|
case "dot":
|
||||||
case "sub":
|
case "sub":
|
||||||
@@ -1470,15 +1477,21 @@ function parse($TEXT, exigent_mode) {
|
|||||||
return ret;
|
return ret;
|
||||||
};
|
};
|
||||||
|
|
||||||
return new AST_Toplevel({
|
return (function(){
|
||||||
start: S.token,
|
var start = S.token;
|
||||||
body: (function(a){
|
var body = [];
|
||||||
while (!is("eof"))
|
while (!is("eof"))
|
||||||
a.push(statement());
|
body.push(statement());
|
||||||
return a;
|
var end = prev();
|
||||||
})([]),
|
var toplevel = options.toplevel;
|
||||||
end: prev()
|
if (toplevel) {
|
||||||
});
|
toplevel.body = toplevel.body.concat(body);
|
||||||
|
toplevel.end = end;
|
||||||
|
} else {
|
||||||
|
toplevel = new AST_Toplevel({ start: start, body: body, end: end });
|
||||||
|
}
|
||||||
|
return toplevel;
|
||||||
|
})();
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -188,6 +188,7 @@ AST_Toplevel.DEFMETHOD("figure_out_scope", function(){
|
|||||||
} else {
|
} else {
|
||||||
g = new SymbolDef(self, node);
|
g = new SymbolDef(self, node);
|
||||||
g.undeclared = true;
|
g.undeclared = true;
|
||||||
|
globals[name] = g;
|
||||||
}
|
}
|
||||||
node.thedef = g;
|
node.thedef = g;
|
||||||
if (name == "eval") {
|
if (name == "eval") {
|
||||||
|
|||||||
@@ -51,27 +51,29 @@ function SourceMap(options) {
|
|||||||
file : options.file,
|
file : options.file,
|
||||||
sourceRoot : options.root
|
sourceRoot : options.root
|
||||||
});
|
});
|
||||||
var current_source = null;
|
function add(source, gen_line, gen_col, orig_line, orig_col, name) {
|
||||||
function add(gen_line, gen_col, orig_line, orig_col, name) {
|
|
||||||
// AST_Node.warn("Mapping in {file}: {orig_line},{orig_col} → {gen_line},{gen_col} ({name})", {
|
|
||||||
// orig_line : orig_line,
|
|
||||||
// orig_col : orig_col,
|
|
||||||
// gen_line : gen_line,
|
|
||||||
// gen_col : gen_col,
|
|
||||||
// file : current_source,
|
|
||||||
// name : name
|
|
||||||
// });
|
|
||||||
generator.addMapping({
|
generator.addMapping({
|
||||||
generated : { line: gen_line + 1, column: gen_col },
|
generated : { line: gen_line + 1, column: gen_col },
|
||||||
original : { line: orig_line + 1, column: orig_col },
|
original : { line: orig_line + 1, column: orig_col },
|
||||||
source : current_source,
|
source : source,
|
||||||
name : name
|
name : name
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
return {
|
return {
|
||||||
add : add,
|
add : add,
|
||||||
set_source : function(filename) { current_source = filename },
|
|
||||||
get : function() { return generator },
|
get : function() { return generator },
|
||||||
toString : function() { return generator.toString() }
|
toString : function() { return generator.toString() }
|
||||||
}
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
function SourceMapInput(map) {
|
||||||
|
map = new MOZ_SourceMap.SourceMapConsumer(map);
|
||||||
|
return {
|
||||||
|
info: function(line, col) {
|
||||||
|
return map.originalPositionFor({
|
||||||
|
line: line,
|
||||||
|
column: col
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
};
|
};
|
||||||
|
|||||||
Reference in New Issue
Block a user