},
Object,
{
-
+ collapseWhite : false, // only reduces white space...
/**
* tokenize a stream
}
var n = found.split('.');
var p = false;
+ var _this = this;
n.forEach(function(nm) {
if (p) {
- tokens.push(new Token('.', "PUNC", "DOT", this.line));
+ tokens.push(new Token('.', "PUNC", "DOT", _this.line));
}
p=true;
- tokens.push(new Token(nm, "NAME", "NAME", this.line));
+ tokens.push(new Token(nm, "NAME", "NAME", _this.line));
});
return true;
read_space : function(/**JSDOC.TokenStream*/stream, tokens) {
var found = "";
- while (!stream.look().eof && Lang.isSpace(stream.look())) {
+ while (!stream.look().eof && Lang.isSpace(stream.look()) && !Lang.isNewline(stream.look())) {
found += stream.next();
}
if (found === "") {
return false;
}
- else {
- if (this.collapseWhite) found = " ";
- if (this.keepWhite) tokens.push(new Token(found, "WHIT", "SPACE", this.line));
- return true;
- }
+ //print("WHITE = " + JSON.stringify(found));
+ if (this.collapseWhite) found = " ";
+ if (this.keepWhite) tokens.push(new Token(found, "WHIT", "SPACE", this.line));
+ return true;
+
},
/**
if (found === "") {
return false;
}
- else {
- if (this.collapseWhite) found = "\n";
- if (this.keepWhite) tokens.push(new Token(found, "WHIT", "NEWLINE", this.line));
- return true;
+ //this.line++;
+ if (this.collapseWhite) {
+ found = "\n";
+ }
+ if (this.keepWhite) {
+ var last = tokens.pop();
+ if (last.name != "WHIT") {
+ tokens.push(last);
+ }
+
+ tokens.push(new Token(found, "WHIT", "NEWLINE", this.line));
}
+ return true;
},
/**