Re-arrange files, add support for classic JS Documentor, and packer
[gnome.introspection-doc-generator] / JSDOC / TokenReader.js
index 6f5d959..e367849 100644 (file)
@@ -1,34 +1,44 @@
 //<script type="text/javascript">
 
-imports['Object.js'].load(Object);
-console = imports['console.js'].console;
+XObject = imports.XObject.XObject;
+console = imports.console.console;
 
-JSDOC   = imports['JSDOC.js'].JSDOC;
-Token   = imports['JSDOC/Token.js'].Token;
-Lang    = imports['JSDOC/Token.js'].Lang;
+
+Token   = imports.Token.Token;
+Lang    = imports.Lang.Lang;
 
 /**
        @class Search a {@link JSDOC.TextStream} for language tokens.
 */
-TokenReader = Object.define(
+TokenReader = XObject.define(
     function(o) {
         
         this.keepDocs = true;
         this.keepWhite = false;
         this.keepComments = false;
-        Roo.apply(this, o || {});
+        this.sepIdents = false; // seperate '.' in identifiers..
+        XObject.extend(this, o || {});
         
     },
     Object,
     {
-            
+        collapseWhite : false, // only reduces white space...
 
         /**
-            @type {JSDOC.Token[]}
+         * tokenize a stream
+         * @return {Array} of tokens
+         * 
+         * ts = new TextStream(File.read(str));
+         * tr = TokenReader({ keepComments : true, keepWhite : true });
+         * tr.tokenize(ts)
+         * 
          */
+            
 
 
         tokenize : function(/**JSDOC.TextStream*/stream) {
+            this.line =1;
             var tokens = [];
             /**@ignore*/ tokens.last    = function() { return tokens[tokens.length-1]; }
             /**@ignore*/ tokens.lastSym = function() {
@@ -50,7 +60,7 @@ TokenReader = Object.define(
                 if (this.read_word(stream, tokens))      continue;
                 
                 // if execution reaches here then an error has happened
-                tokens.push(new Token(stream.next(), "TOKN", "UNKNOWN_TOKEN"));
+                tokens.push(new Token(stream.next(), "TOKN", "UNKNOWN_TOKEN", this.line));
             }
             
             
@@ -72,9 +82,26 @@ TokenReader = Object.define(
             }
             else {
                 var name;
-                if ((name = Lang.keyword(found))) tokens.push(new Token(found, "KEYW", name));
-                else tokens.push(new Token(found, "NAME", "NAME"));
+                if ((name = Lang.keyword(found))) {
+                    tokens.push(new Token(found, "KEYW", name, this.line));
+                    return true;
+                }
+                if (!this.sepIdents || found.indexOf('.') < 0 ) {
+                    tokens.push(new Token(found, "NAME", "NAME", this.line));
+                    return true;
+                }
+                var n = found.split('.');
+                var p = false;
+                var _this = this;
+                n.forEach(function(nm) {
+                    if (p) {
+                        tokens.push(new Token('.', "PUNC", "DOT", _this.line));
+                    }
+                    p=true;
+                    tokens.push(new Token(nm, "NAME", "NAME", _this.line));
+                });
                 return true;
+                
             }
         },
 
@@ -92,7 +119,7 @@ TokenReader = Object.define(
                 return false;
             }
             else {
-                tokens.push(new Token(found, "PUNC", Lang.punc(found)));
+                tokens.push(new Token(found, "PUNC", Lang.punc(found), this.line));
                 return true;
             }
         },
@@ -103,18 +130,18 @@ TokenReader = Object.define(
         read_space : function(/**JSDOC.TokenStream*/stream, tokens) {
             var found = "";
             
-            while (!stream.look().eof && Lang.isSpace(stream.look())) {
+            while (!stream.look().eof && Lang.isSpace(stream.look()) && !Lang.isNewline(stream.look())) {
                 found += stream.next();
             }
             
             if (found === "") {
                 return false;
             }
-            else {
-                if (this.collapseWhite) found = " ";
-                if (this.keepWhite) tokens.push(new Token(found, "WHIT", "SPACE"));
-                return true;
-            }
+            //print("WHITE = " + JSON.stringify(found)); 
+            if (this.collapseWhite) found = " ";
+            if (this.keepWhite) tokens.push(new Token(found, "WHIT", "SPACE", this.line));
+            return true;
+        
         },
 
         /**
@@ -122,19 +149,28 @@ TokenReader = Object.define(
          */
         read_newline : function(/**JSDOC.TokenStream*/stream, tokens) {
             var found = "";
-            
+            var line = this.line;
             while (!stream.look().eof && Lang.isNewline(stream.look())) {
+                this.line++;
                 found += stream.next();
             }
             
             if (found === "") {
                 return false;
             }
-            else {
-                if (this.collapseWhite) found = "\n";
-                if (this.keepWhite) tokens.push(new Token(found, "WHIT", "NEWLINE"));
-                return true;
+            //this.line++;
+            if (this.collapseWhite) {
+                found = "\n";
+            }
+            if (this.keepWhite) {
+                var last = tokens.pop();
+                if (last && last.name != "WHIT") {
+                    tokens.push(last);
+                }
+                
+                tokens.push(new Token(found, "WHIT", "NEWLINE", line));
             }
+            return true;
         },
 
         /**
@@ -143,14 +179,17 @@ TokenReader = Object.define(
         read_mlcomment : function(/**JSDOC.TokenStream*/stream, tokens) {
             if (stream.look() == "/" && stream.look(1) == "*") {
                 var found = stream.next(2);
-                
+                var c = '';
+                var line = this.line;
                 while (!stream.look().eof && !(stream.look(-1) == "/" && stream.look(-2) == "*")) {
-                    found += stream.next();
+                    c = stream.next();
+                    if (c == "\n") this.line++;
+                    found += c;
                 }
                 
                 // to start doclet we allow /** or /*** but not /**/ or /****
-                if (/^\/\*\*([^\/]|\*[^*])/.test(found) && this.keepDocs) tokens.push(new Token(found, "COMM", "JSDOC"));
-                else if (this.keepComments) tokens.push(new Token(found, "COMM", "MULTI_LINE_COMM"));
+                if (/^\/\*\*([^\/]|\*[^*])/.test(found) && this.keepDocs) tokens.push(new Token(found, "COMM", "JSDOC", this.line));
+                else if (this.keepComments) tokens.push(new Token(found, "COMM", "MULTI_LINE_COMM", line));
                 return true;
             }
             return false;
@@ -166,14 +205,17 @@ TokenReader = Object.define(
                 || 
                 (stream.look() == "<" && stream.look(1) == "!" && stream.look(2) == "-" && stream.look(3) == "-" && (found=stream.next(4)))
             ) {
-                
+                var line = this.line;
                 while (!stream.look().eof && !Lang.isNewline(stream.look())) {
                     found += stream.next();
                 }
-                
+                if (!stream.look().eof) {
+                    found += stream.next();
+                }
                 if (this.keepComments) {
-                    tokens.push(new Token(found, "COMM", "SINGLE_LINE_COMM"));
+                    tokens.push(new Token(found, "COMM", "SINGLE_LINE_COMM", line));
                 }
+                this.line++;
                 return true;
             }
             return false;
@@ -201,7 +243,7 @@ TokenReader = Object.define(
                     }
                     else if (stream.look() == "\"") {
                         string += stream.next();
-                        tokens.push(new Token(string, "STRN", "DOUBLE_QUOTE"));
+                        tokens.push(new Token(string, "STRN", "DOUBLE_QUOTE", this.line));
                         return true;
                     }
                     else {
@@ -226,7 +268,7 @@ TokenReader = Object.define(
                     }
                     else if (stream.look() == "'") {
                         string += stream.next();
-                        tokens.push(new Token(string, "STRN", "SINGLE_QUOTE"));
+                        tokens.push(new Token(string, "STRN", "SINGLE_QUOTE", this.line));
                         return true;
                     }
                     else {
@@ -255,8 +297,8 @@ TokenReader = Object.define(
                 return false;
             }
             else {
-                if (/^0[0-7]/.test(found)) tokens.push(new Token(found, "NUMB", "OCTAL"));
-                else tokens.push(new Token(found, "NUMB", "DECIMAL"));
+                if (/^0[0-7]/.test(found)) tokens.push(new Token(found, "NUMB", "OCTAL", this.line));
+                else tokens.push(new Token(found, "NUMB", "DECIMAL", this.line));
                 return true;
             }
         },
@@ -293,7 +335,7 @@ TokenReader = Object.define(
             
             while (!stream.look().eof) {
                 if (Lang.isHexDec(found) && !Lang.isHexDec(found+stream.look())) { // done
-                    tokens.push(new Token(found, "NUMB", "HEX_DEC"));
+                    tokens.push(new Token(found, "NUMB", "HEX_DEC", this.line));
                     return true;
                 }
                 else {
@@ -338,7 +380,7 @@ TokenReader = Object.define(
                             regex += stream.next();
                         }
                         
-                        tokens.push(new Token(regex, "REGX", "REGX"));
+                        tokens.push(new Token(regex, "REGX", "REGX", this.line));
                         return true;
                     }
                     else {