// test code
-
-void main() {
- var tr = new JSDOC.TokenReader();
- tr.keepDocs =true;
- tr.keepWhite = true;
- tr.keepComments = true;
- tr.sepIdents = true;
- tr.collapseWhite = false;
- tr.filename = "test";
- string str;
- FileUtils.get_contents("/home/alan/gitlive/gnome.introspection-doc-generator/JSDOC/Walker2.js", out str);
-
- var toks = tr.tokenize(new JSDOC.TextStream(str)); // dont merge xxx + . + yyyy etc.
- toks.dump();
-}
-
+
//const Token = imports.Token.Token;
//const Lang = imports.Lang.Lang;
/**
@class Search a {@link JSDOC.TextStream} for language tokens.
*/
-
-
-
-
+
namespace JSDOC {
public class TokenArray: Object {
public void dump()
{
foreach(var token in this.tokens) {
- print(token.asString());
+ print(token.asString() +"\n");
}
}
{
string found = "";
- while (!stream.lookEOF() && Lang.punc(found + stream.look().to_string()).length > 0) {
+ while (!stream.lookEOF()) {
+ var ns = stream.look().to_string();
+
+ if (null == Lang.punc(found + ns )) {
+ break;
+ }
found += stream.next();
}
(stream.look() == '<' && stream.look(1) == '!' && stream.look(2) == '-' && stream.look(3) == '-' && (""!=(found=stream.next(4))))
) {
var line = this.line;
- while (!stream.lookEOF() && !Lang.isNewline(stream.look().to_string())) {
+ while (!stream.lookEOF()) {
+ //print(stream.look().to_string());
+ if ( Lang.isNewline(stream.look().to_string())) {
+ break;
+ }
found += stream.next();
}
- //if (!stream.lookEOF()) { // what? << eat the EOL?
+ if (!stream.lookEOF()) { // lookinng for end of line... if we got it, then do not eat the character..
found += stream.next();
- //}
+ }
if (this.keepComments) {
tokens.push(new Token(found, "COMM", "SINGLE_LINE_COMM", line));
}