better support for multiple input files:

- use a single AST_Toplevel node for all files
- keep original source filename in the tokens
This commit is contained in:
Mihai Bazon
2012-09-21 14:19:05 +03:00
parent c4f8c2103f
commit 5491e1d7b1
6 changed files with 96 additions and 93 deletions

View File

@@ -79,6 +79,7 @@ if (files.filter(function(el){ return el == "-" }).length > 1) {
var STATS = {};
var OUTPUT_FILE = ARGS.o;
var TOPLEVEL = null;
var SOURCE_MAP = ARGS.source_map ? UglifyJS.SourceMap({
file: OUTPUT_FILE,
@@ -90,15 +91,48 @@ var output = UglifyJS.OutputStream({
source_map: SOURCE_MAP
});
files = files.map(do_file_1);
files = files.map(do_file_2);
UglifyJS.base54.sort();
files.forEach(do_file_3);
if (ARGS.v) {
sys.error("BASE54 digits: " + UglifyJS.base54.get());
//sys.error("Frequency: " + sys.inspect(UglifyJS.base54.freq()));
files.forEach(function(file) {
if (ARGS.v) {
sys.error("Parsing " + file);
}
var code = read_whole_file(file);
if (ARGS.p != null) {
file = file.replace(/^\/+/, "").split(/\/+/).slice(ARGS.p).join("/");
}
time_it("parse", function(){
TOPLEVEL = UglifyJS.parse(code, {
filename: file,
toplevel: TOPLEVEL
});
});
});
time_it("scope", function(){
TOPLEVEL.figure_out_scope();
});
if (ARGS.c !== true) {
time_it("squeeze", function(){
var compressor = UglifyJS.Compressor(COMPRESSOR_OPTIONS);
TOPLEVEL = TOPLEVEL.squeeze(compressor);
});
}
time_it("scope", function(){
TOPLEVEL.figure_out_scope();
if (!ARGS.m) {
TOPLEVEL.compute_char_frequency();
UglifyJS.base54.sort();
}
});
if (!ARGS.m) time_it("mangle", function(){
TOPLEVEL.mangle_names();
});
time_it("generate", function(){
TOPLEVEL.print(output);
});
output = output.get();
if (SOURCE_MAP) {
@@ -127,57 +161,6 @@ if (ARGS.stats) {
/* -----[ functions ]----- */
function do_file_1(file) {
if (ARGS.v) {
sys.error("Compressing " + file);
}
var code = read_whole_file(file);
var ast;
time_it("parse", function(){
ast = UglifyJS.parse(code);
});
time_it("scope", function(){
ast.figure_out_scope();
});
if (ARGS.c !== true) {
time_it("squeeze", function(){
var compressor = UglifyJS.Compressor(COMPRESSOR_OPTIONS);
ast = ast.squeeze(compressor);
});
}
ast.filename = file;
return ast;
}
function do_file_2(ast) {
time_it("scope", function(){
ast.figure_out_scope();
if (!ARGS.m) {
ast.compute_char_frequency();
}
});
return ast;
}
function do_file_3(ast) {
var file = ast.filename;
// if (ARGS.v) {
// sys.error("Mangling/generating " + file);
// }
if (!ARGS.m) time_it("mangle", function(){
ast.mangle_names();
});
time_it("generate", function(){
if (SOURCE_MAP) {
if (ARGS.p != null) {
file = file.replace(/^\/+/, "").split(/\/+/).slice(ARGS.p).join("/");
}
SOURCE_MAP.set_source(file);
}
ast.print(output);
});
}
function read_whole_file(filename) {
if (filename == "-") {
// XXX: this sucks. How does one read the whole STDIN

View File

@@ -86,7 +86,7 @@ function DEFNODE(type, props, methods, base) {
return ctor;
};
var AST_Token = DEFNODE("Token", "type value line col pos endpos nlb comments_before", {
var AST_Token = DEFNODE("Token", "type value line col pos endpos nlb comments_before file", {
}, null);
var AST_Node = DEFNODE("Node", "start end", {
@@ -146,10 +146,12 @@ var AST_BlockStatement = DEFNODE("BlockStatement", null, {
}, AST_Statement);
function walk_body(node, visitor) {
if (node.body instanceof Array) node.body.forEach(function(stat){
if (node.body instanceof AST_Statement) {
node.body._walk(visitor);
}
else node.body.forEach(function(stat){
stat._walk(visitor);
});
else if (node.body instanceof AST_Statement) node.body._walk(visitor);
};
var AST_Block = DEFNODE("Block", null, {
@@ -239,7 +241,7 @@ var AST_Scope = DEFNODE("Scope", "directives variables functions uses_with uses_
$documentation: "Base class for all statements introducing a lexical scope",
}, AST_Block);
var AST_Toplevel = DEFNODE("Toplevel", null, {
var AST_Toplevel = DEFNODE("Toplevel", "globals", {
$documentation: "The toplevel scope"
}, AST_Scope);

View File

@@ -54,7 +54,8 @@ function OutputStream(options) {
max_line_len : 32000,
ie_proof : true,
beautify : true,
source_map : null
source_map : null,
in_source_map : null
});
var indentation = 0;
@@ -245,6 +246,7 @@ function OutputStream(options) {
var add_mapping = options.source_map ? function(token, name) {
options.source_map.add(
token.file,
current_line, current_col,
token.line, token.col,
(!name && token.type == "name") ? token.value : name

View File

@@ -266,7 +266,7 @@ function is_token(token, type, val) {
var EX_EOF = {};
function tokenizer($TEXT) {
function tokenizer($TEXT, filename) {
var S = {
text : $TEXT.replace(/\r\n?|[\n\u2028\u2029]/g, "\n").replace(/^\uFEFF/, ''),
@@ -324,7 +324,8 @@ function tokenizer($TEXT) {
col : S.tokcol,
pos : S.tokpos,
endpos : S.pos,
nlb : S.newline_before
nlb : S.newline_before,
file : filename
};
if (!is_comment) {
ret.comments_before = S.comments_before;
@@ -669,10 +670,16 @@ var ATOMIC_START_TOKEN = array_to_hash([ "atom", "num", "string", "regexp", "nam
/* -----[ Parser ]----- */
function parse($TEXT, exigent_mode) {
function parse($TEXT, options) {
options = defaults(options, {
strict : false,
filename : null,
toplevel : null
});
var S = {
input : typeof $TEXT == "string" ? tokenizer($TEXT, true) : $TEXT,
input : typeof $TEXT == "string" ? tokenizer($TEXT, options.filename) : $TEXT,
token : null,
prev : null,
peeked : null,
@@ -736,7 +743,7 @@ function parse($TEXT, exigent_mode) {
function expect(punc) { return expect_token("punc", punc); };
function can_insert_semicolon() {
return !exigent_mode && (
return !options.strict && (
S.token.nlb || is("eof") || is("punc", "}")
);
};
@@ -1226,7 +1233,7 @@ function parse($TEXT, exigent_mode) {
var array_ = embed_tokens(function() {
expect("[");
return new AST_Array({
elements: expr_list("]", !exigent_mode, true)
elements: expr_list("]", !options.strict, true)
});
});
@@ -1235,7 +1242,7 @@ function parse($TEXT, exigent_mode) {
var first = true, a = [];
while (!is("punc", "}")) {
if (first) first = false; else expect(",");
if (!exigent_mode && is("punc", "}"))
if (!options.strict && is("punc", "}"))
// allow trailing comma
break;
var start = S.token;
@@ -1415,7 +1422,7 @@ function parse($TEXT, exigent_mode) {
};
function is_assignable(expr) {
if (!exigent_mode) return true;
if (!options.strict) return true;
switch (expr[0]+"") {
case "dot":
case "sub":
@@ -1470,15 +1477,21 @@ function parse($TEXT, exigent_mode) {
return ret;
};
return new AST_Toplevel({
start: S.token,
body: (function(a){
while (!is("eof"))
a.push(statement());
return a;
})([]),
end: prev()
});
return (function(){
var start = S.token;
var body = [];
while (!is("eof"))
body.push(statement());
var end = prev();
var toplevel = options.toplevel;
if (toplevel) {
toplevel.body = toplevel.body.concat(body);
toplevel.end = end;
} else {
toplevel = new AST_Toplevel({ start: start, body: body, end: end });
}
return toplevel;
})();
};

View File

@@ -188,6 +188,7 @@ AST_Toplevel.DEFMETHOD("figure_out_scope", function(){
} else {
g = new SymbolDef(self, node);
g.undeclared = true;
globals[name] = g;
}
node.thedef = g;
if (name == "eval") {

View File

@@ -51,27 +51,29 @@ function SourceMap(options) {
file : options.file,
sourceRoot : options.root
});
var current_source = null;
function add(gen_line, gen_col, orig_line, orig_col, name) {
// AST_Node.warn("Mapping in {file}: {orig_line},{orig_col} → {gen_line},{gen_col} ({name})", {
// orig_line : orig_line,
// orig_col : orig_col,
// gen_line : gen_line,
// gen_col : gen_col,
// file : current_source,
// name : name
// });
function add(source, gen_line, gen_col, orig_line, orig_col, name) {
generator.addMapping({
generated : { line: gen_line + 1, column: gen_col },
original : { line: orig_line + 1, column: orig_col },
source : current_source,
source : source,
name : name
});
};
return {
add : add,
set_source : function(filename) { current_source = filename },
get : function() { return generator },
toString : function() { return generator.toString() }
}
};
};
function SourceMapInput(map) {
map = new MOZ_SourceMap.SourceMapConsumer(map);
return {
info: function(line, col) {
return map.originalPositionFor({
line: line,
column: col
});
}
};
};