Mihai Bazon
2013-09-06 09:54:30 +03:00
parent cb9d16fbe4
commit 7c10b25346
2 changed files with 45 additions and 21 deletions

View File

@@ -399,7 +399,7 @@ function OutputStream(options) {
});
}
comments.forEach(function(c){
if (c.type == "comment1") {
if (/comment[134]/.test(c.type)) {
output.print("//" + c.value + "\n");
output.indent();
}

View File

@@ -210,7 +210,7 @@ function is_token(token, type, val) {
var EX_EOF = {};
function tokenizer($TEXT, filename) {
function tokenizer($TEXT, filename, html5_comments) {
var S = {
text : $TEXT.replace(/\r\n?|[\n\u2028\u2029]/g, "\n").replace(/\uFEFF/g, ''),
@@ -242,6 +242,14 @@ function tokenizer($TEXT, filename) {
return ch;
};
function forward(i) {
while (i-- > 0) next();
};
function looking_at(str) {
return S.text.substr(S.pos, str.length) == str;
};
function find(what, signal_eof) {
var pos = S.text.indexOf(what, S.pos);
if (signal_eof && pos == -1) throw EX_EOF;
@@ -381,8 +389,8 @@ function tokenizer($TEXT, filename) {
return token("string", ret);
});
function read_line_comment() {
next();
function skip_line_comment(type) {
var regex_allowed = S.regex_allowed;
var i = find("\n"), ret;
if (i == -1) {
ret = S.text.substr(S.pos);
@@ -391,11 +399,13 @@ function tokenizer($TEXT, filename) {
ret = S.text.substring(S.pos, i);
S.pos = i;
}
return token("comment1", ret, true);
S.comments_before.push(token(type, ret, true));
S.regex_allowed = regex_allowed;
return next_token();
};
var read_multiline_comment = with_eof_error("Unterminated multiline comment", function(){
next();
var skip_multiline_comment = with_eof_error("Unterminated multiline comment", function(){
var regex_allowed = S.regex_allowed;
var i = find("*/", true);
var text = S.text.substring(S.pos, i);
var a = text.split("\n"), n = a.length;
@@ -405,8 +415,11 @@ function tokenizer($TEXT, filename) {
if (n > 1) S.col = a[n - 1].length;
else S.col += a[n - 1].length;
S.col += 2;
S.newline_before = S.newline_before || text.indexOf("\n") >= 0;
return token("comment2", text, true);
var nlb = S.newline_before = S.newline_before || text.indexOf("\n") >= 0;
S.comments_before.push(token("comment2", text, true));
S.regex_allowed = regex_allowed;
S.newline_before = nlb;
return next_token();
});
function read_name() {
@@ -470,16 +483,13 @@ function tokenizer($TEXT, filename) {
function handle_slash() {
next();
var regex_allowed = S.regex_allowed;
switch (peek()) {
case "/":
S.comments_before.push(read_line_comment());
S.regex_allowed = regex_allowed;
return next_token();
next();
return skip_line_comment("comment1");
case "*":
S.comments_before.push(read_multiline_comment());
S.regex_allowed = regex_allowed;
return next_token();
next();
return skip_multiline_comment();
}
return S.regex_allowed ? read_regexp("") : read_operator("/");
};
@@ -516,6 +526,16 @@ function tokenizer($TEXT, filename) {
return read_regexp(force_regexp);
skip_whitespace();
start_token();
if (html5_comments) {
if (looking_at("<!--")) {
forward(4);
return skip_line_comment("comment3");
}
if (looking_at("-->") && S.newline_before) {
forward(3);
return skip_line_comment("comment4");
}
}
var ch = peek();
if (!ch) return token("eof");
var code = ch.charCodeAt(0);
@@ -594,11 +614,15 @@ function parse($TEXT, options) {
strict : false,
filename : null,
toplevel : null,
expression : false
expression : false,
html5_comments : true,
});
var S = {
input : typeof $TEXT == "string" ? tokenizer($TEXT, options.filename) : $TEXT,
input : (typeof $TEXT == "string"
? tokenizer($TEXT, options.filename,
options.html5_comments)
: $TEXT),
token : null,
prev : null,
peeked : null,