// <script type="text/javascript">
-XObject = imports.XObject.XObject;
-File = imports.File.File;
+const XObject = imports.XObject.XObject;
+const File = imports.File.File;
-TokenReader = imports['JSDOC/TokenReader.js'].TokenReader;
-ScopeParser = imports['JSDOC/ScopeParser.js'].ScopeParser;
-TokenStream = imports['JSDOC/TokenStream.js'].TokenStream;
+const TextStream = imports.JSDOC.TextStream.TextStream ;
+const TokenReader = imports.TokenReader.TokenReader;
+const ScopeParser = imports.ScopeParser.ScopeParser;
+const TokenStream = imports.TokenStream.TokenStream;
+const CompressWhite = imports.CompressWhite.CompressWhite;
+const Collapse = imports.Collapse.Collapse;
+
+const GLib = imports.gi.GLib;
+const Gio = imports.gi.Gio;
/**
- * @class JSDOC.Packer
+ * @namespace JSDOC
+ * @class Packer
* Create a new packer
*
+ * Use with pack.js
+ *
+ *
* Usage:
* <code>
*
-var x = new JSDOC.Packer(
- [ "/location/of/file1.js", "/location/of/file2.js", ... ],
- "/location/of"
+Packer = imports['JSDOC/Packer.js'].Packer;
+var x = new Packer({
+
+ files : [ "/location/of/file1.js", "/location/of/file2.js", ... ],
+ target : "/tmp/output.js",
+ debugTarget : "/tmp/output.debug.js", // merged file without compression.
+ debugTranslateTarget : "/tmp/output.translate.js", // merged with translation
+ // and no compression
+ translateJSON: "/tmp/translate.json",
+
+
);
x.packFiles(
"/location/of/temp_batch_dir",
* directly before an eval statement, it will compress all the code around the eval,
* and not rename the variables 'avarname'
*
- * Dont try running this on a merged uncompressed large file - it's horrifically slow.
+ * Dont try running this on a merged uncompressed large file - it's used to be horrifically slow. not sure about now..
* Best to use lot's of small classes, and use it to merge, as it will cache the compaction
*
*
*
- * @param {Array} files List of Files - MUST BE WITH ABSOLUTE PATH eg. [ '/usr/src/xyz/abc.js', .... ]
- * @param {String} source_path top level directory of source (used to work out the relative names for the minimized temp files)
+ * Notes for translation
+ * - translation relies on you using double quotes for strings if they need translating
+ * - single quoted strings are ignored.
+ *
+ * Generation of indexFiles
+ * - translateIndex = the indexfile
+ *
+ *
+ *
+ *
+
*/
-Packer = function(files, spath)
+const Packer = function(cfg)
{
- this.files = files;
- this.spath = spath; // source path
- this.aliasList = { }; // list of maps Roo.asdfasdfasf => Roo.A1
+
+ XObject.extend(this, cfg);
+ var _this = this;
+ if (this.srcfiles && this.srcfiles.length) {
+ this.srcfiles.forEach(function(f) {
+ _this.loadSourceFile(f);
+ });
+
+ }
+
+ if (!this.files) {
+ throw "No Files";
+ }
+
+ var link = false;
+ if (cfg.autoBuild) {
+
+ function dateString(d){
+ function pad(n){return n<10 ? '0'+n : n}
+ return d.getFullYear() +
+ pad(d.getMonth()+1)+
+ pad(d.getDate())+'_'+
+ pad(d.getHours())+
+ pad(d.getMinutes())+
+ pad(d.getSeconds());
+ }
+
+
+
+ var version = 0;
+ this.files.forEach(function(f) {
+ version = Math.max(File.mtime(f), version);
+ });
+ var version = dateString(new Date(version));
+
+ var dirname = GLib.path_get_dirname(this.files[0]);
+ var outname = this.module ? this.module : GLib.path_get_basename(dirname);
+ this.target = dirname + '/compiled/' + outname + '-' + version + '.js';
+ if (File.exists(this.target)) {
+ print("Target file already exists: " + this.target);
+ Seed.quit();
+ }
+ this.prefix = dirname +'/';
+ this.translateJSON = dirname + '/compiled/_translation_.js';
+
+ }
+
+ print(this.translateJSON);
this.timer = new Date() * 1;
- this.translate = true;
+ this.packAll();
+
+
+
+
}
Packer.prototype = {
+ /**
+ * @cfg {String} srcfiles file containing a list of files/or classes to use.
+ */
+ srcfile : false,
+
+ /**
+ * @cfg {Array} files list of files to compress (must be full path)
+ */
+ files : false,
+ /**
+ * @cfg {String} target to write files to - must be full path.
+ */
+ target : '',
+ /**
+ * @cfg {Boolean} autoBuild - turn on autobuild feature (puts files in compiled directory,
+ * and enables translation toolkit.
+ */
+ autoBuild : false,
+ /**
+ * @cfg {String} module used with autoBuild to force a file name
+ */
+ module: false,
+ /**
+ * @cfg {String} debugTarget target to write files debug version to (uncompacted)- must be full path.
+ */
+ debugTarget : '', // merged file without compression.
+ /**
+ * @cfg {String} debugTranslateTarget target to write files debug version
+ * to (uncompacted) but with translation- must be full path.
+ */
- bpath : '',
+ debugTranslateTarget : '',
- // set to false to stop translation happening..
+ /**
+ * @cfg {String} tmpDir (optional) where to put the temporary files.
+ * if you set this, then files will not be cleaned up
+ */
+ tmpDir : '/tmp',
+ translateJSON : '', // json based list of strings in all files.
+
/**
- * Pack the files.
- *
- * @param {String} batch_path location of batched temporary min files.
- * @param {String} compressed_file eg. roo-all.js
- * @param {String} debug_file eg. roo-debug.js
+ * @cfg {Boolean} cleanup (optional) clean up temp files after done -
+ * Defaults to false if you set tmpDir, otherwise true.
+ */
+ cleanup : true,
+ /**
+ * @cfg {Boolean} keepWhite (optional) do not remove white space in output.
+ * usefull for debugging compressed files.
+ */
+
+ keepWhite: true,
+
+ /**
+ * @cfg {String} prefix (optional) prefix of directory to be stripped of when
+ * Calculating md5 of filename
+ */
+ prefix : '',
+ out : '', // if no target is specified - then this will contain the result
+
+ /**
+ * load a dependancy list -f option
+ * @param {String} srcfile sourcefile to parse
*
*/
- packFiles : function(bpath, allfile, debugfile) {
- var str;
- var spath = this.spath;
- var files = this.files;
- this.bpath = bpath;
- // old var names - fixme..
- var dout = debugfile;
- //File.write(dout, "");
-
- var outpath = allfile;
-
- var transfile = bpath + '/_translation_.js';
+ loadSourceFile : function(srcfile)
+ {
+ var lines = File.read(srcfile).split("\n");
+ var _this = this;
+ lines.forEach(function(f) {
+
+ if (/^\s*\//.test(f) || !/[a-z]+/i.test(f)) { // skip comments..
+ return;
+ }
+ if (/\.js$/.test(f)) {
+ _this.files.push( f);
+ // js file..
+ return;
+ }
+
+ //println("ADD"+ f.replace(/\./g, '/'));
+ var add = f.replace(/\./g, '/').replace(/\s+/g,'')+'.js';
+ if (_this.files.indexOf(f) > -1) {
+ return;
+ }
+ _this.files.push( add );
+
+ })
+ },
+
+
+ packAll : function() // do the packing (run from constructor)
+ {
+
//this.transOrigFile= bpath + '/../lang.en.js'; // needs better naming...
//File.write(this.transfile, "");
- File.write(dout, "");
- File.write(allfile, "");
- for(var i=0; i < files.length; i++) {
+ if (this.target) {
+ File.write(this.target, "");
+ }
+
+ if (this.debugTarget) {
+ File.write(this.debugTarget, "");
+ }
+ if (this.debugTranslateTarget) {
+ File.write(this.debugTarget, "");
+ }
+
+ for(var i=0; i < this.files.length; i++) {
+ var file = this.files[i];
- print("reading " +files[i] );
- if (!File.exists(files[i])) {
- print("SKIP (does not exist) " + files[i]);
+ print("reading " +file );
+ if (!File.isFile(file)) {
+ print("SKIP (is not a file) " + file);
continue;
}
+ // debug Target
- File.append(dout, File.read(files[i]));
+ if (this.debugTarget) {
+ File.append(this.debugTarget, File.read(file));
+ }
// it's a good idea to check with 0 compression to see if the code can parse!!
// debug file..
- var minfile = bpath + '/' +files[i].substr(spath.length+1).replace(/\//g, '.');
- var transfile = bpath + '/' +files[i].substr(spath.length+1).replace(/\//g, '.') +'.lang';
+ var minfile = this.tmpDir + '/' +file.replace(/\//g, '.');
+
+
// let's see if we have a min file already?
+ // this might happen if tmpDir is set ..
if (true && File.exists(minfile)) {
var mt = File.mtime(minfile);
- var ot = File.mtime(files[i]);
+ var ot = File.mtime(file);
print("compare : " + mt + "=>" + ot);
if (mt >= ot) {
continue;
- /*
- // then the min'files time is > than original..
- var str = File.read(minfile);
- print("using MIN FILE "+ minfile);
- if (str.length) {
- File.append(outpath, str + "\n");
- }
- continue;
- */
}
}
-
+
print("COMPRESSING ");
//var codeComp = pack(str, 10, 0, 0);
- var str = File.read(files[i]);
- var str = this.pack(str, files[i], minfile);
- if (str.length) {
- File.write(minfile, str);
+ if (File.exists(minfile)) {
+ File.remove(minfile);
}
-
+ var str = File.read(file);
+ var str = this.pack(str, file, minfile);
-
- //var str = File.read(minfile);
- //print("using MIN FILE "+ minfile);
- //File.append(outpath, str + "\n");
- //this.timerPrint("Wrote Files");
- /*
- if (codeComp.length) {
- //print(codeComp);
-
- File.append(outpath, codeComp+"\n");
- File.write(minfile, codeComp);
- }
- */
- //print(codeComp);
- // if (i > 10) return;
- }
- if (this.translate) {
+
+ }
+
+
+
+ // if we are translating, write the translations strings at the top
+ // of the file..
+
+ if (this.translateJSON) {
print("MERGING LANGUAGE");
- File.write(outpath, "if (typeof(_T) == 'undefined') { _T={};}\n");
-
- var transfileAll = bpath + '/_translation_.js';
- File.write(transfileAll, "");
- for(var i=0; i < files.length; i++) {
- var transfile= bpath + '/' +files[i].substr(spath.length+1).replace(/\//g, '.') +'.lang.trans';
- var transmd5 = bpath + '/' +files[i].substr(spath.length+1).replace(/\//g, '.') +'.lang';
+ var out = "if (typeof(_T) == 'undefined') { _T={};}\n"
+ if (this.target) {
+ File.write(this.target, out);
+ } else {
+ this.out += out;
+ }
+
+ File.write(this.translateJSON, "");
+ for(var i=0; i < this.files.length; i++) {
+ var file = this.files[i];
+ var transfile= this.tmpDir + '/' +file.replace(/\//g, '.') +'.lang.trans';
+ var transmd5 = this.tmpDir + '/' +file.replace(/\//g, '.') +'.lang';
if (File.exists(transmd5)) {
var str = File.read(transmd5);
if (str.length) {
- File.append(outpath, str + "\n");
+ if (this.target) {
+ File.append(this.target, str + "\n");
+ } else {
+ this.out += str + "\n";
+ }
+
+ }
+ if (this.cleanup) {
+ File.remove(transmd5);
}
}
if (File.exists(transfile)) {
var str = File.read(transfile);
if (str.length) {
- File.append(transfileAll, str);
+ File.append(this.translateJSON, str);
+ }
+ if (this.cleanup) {
+ File.remove(transfile);
}
}
}
}
+
print("MERGING SOURCE");
- for(var i=0; i < files.length; i++) {
-
- var minfile = bpath + '/' +files[i].substr(spath.length+1).replace(/\//g, '.');
+ for(var i=0; i < this.files.length; i++) {
+ var file = this.files[i];
+ var minfile = this.tmpDir + '/' + file.replace(/\//g, '.');
+
+
if (!File.exists(minfile)) {
continue;
}
var str = File.read(minfile);
print("using MIN FILE "+ minfile);
if (str.length) {
- File.append(outpath, str + "\n");
+ if (this.target) {
+ File.append(this.target, '//' + file + "\n");
+ File.append(this.target, str + "\n");
+ } else {
+ this.out += '//' + file + "\n";
+ this.out += str + "\n";
+ }
+
+ }
+ if (this.cleanup) {
+ File.remove(minfile);
}
+
}
+ print("Output file: " + this.target);
+ if (this.debugTarget) print("Output debug file: " + this.debugTarget);
-
- //File.append(dout, "\n");// end the function
-
+
},
pack : function (str,fn,minfile)
{
- var tr = new TokenReader();
+ var tr = new TokenReader( {
+ keepDocs :true,
+ keepWhite : true,
+ keepComments : true,
+ sepIdents : true,
+ collapseWhite : false,
+ filename : fn
+ });
this.timerPrint("START" + fn);
// we can load translation map here...
- var toks = tr.tokenize(str,false); // dont merge xxx + . + yyyy etc.
+ var toks = tr.tokenize(new TextStream(str)); // dont merge xxx + . + yyyy etc.
// at this point we can write a language file...
- if (this.translate) {
- this.writeTranslateFile(fn, minfile, tr.translateMap);
+ if (this.translateJSON) {
+
+ this.writeTranslateFile(fn, minfile, toks);
}
this.activeFile = fn;
// and replace if we are generating a different language..
-
-
-
this.timerPrint("Tokenized");
+ //var ts = new TokenStream(toks);
+ //print(JSON.stringify(toks, null,4 )); Seed.quit();
+ var ts = new Collapse(toks);
+ // print(JSON.stringify(ts.tokens, null,4 )); Seed.quit();
//return;//
- var sp = new ScopeParser(new TokenStream(toks, str.length));
+ var sp = new ScopeParser(ts);
this.timerPrint("Converted to Parser");
sp.packer = this;
sp.buildSymbolTree();
sp.mungeSymboltree();
this.timerPrint("Munged Sym tree");
print(sp.warnings.join("\n"));
- var out = JSDOC.CompressWhite(sp.ts, this);
this.timerPrint("Compressed");
+
+ var out = CompressWhite(new TokenStream(toks), this, this.keepWhite); // do not kill whitespace..
+
+
+ this.timerPrint("Compressed");
+
+ if (out.length) {
+ File.write(minfile, out);
+ this.timerPrint("Write (" + out.length + "bytes) " + minfile);
+ }
+
return out;
+
+
},
timerPrint: function (str) {
* -> this file will need inserting at the start of the application....
* -> we need to generate 2 files,
* -> a reference used to do the translation, and the _T file..
+ *
+ *
+ * We store the trsum on the token...
*
*/
- writeTranslateFile : function(fn, minfile, map)
+ writeTranslateFile : function(fn, minfile, toks)
{
+
+ var map = {}; // 'string=> md5sum'
+ var _this = this;
+ var t, last, next;
+
+
+ var tokfind = function (j,dir) {
+ while (1) {
+ if ((dir < 0) && (j < 0)) {
+ return false;
+ }
+ if ((dir > 0) && (j >= toks.length)) {
+ return false;
+ }
+ j += dir;
+ if (toks[j].type != 'WHIT') {
+ return toks[j];
+ }
+ }
+ return false;
+
+ }
+
+
+ for (var i=0;i<toks.length;i++) {
+
+ t = toks[i];
+ if (t.type != 'STRN') {
+ continue;
+ }
+ if (t.name != 'DOUBLE_QUOTE') {
+ continue;
+ }
+
+ last = tokfind(i,-1);
+ next = tokfind(i,+1);
+
+ // we have to ignore key values on objects
+
+ // defined by
+ // last == '{' or ',' and
+ // next == ':'
+
+ if (next &&
+ next.type == 'PUNC' &&
+ next.data == ':' &&
+ last &&
+ last.type == 'PUNC' &&
+ (last.data == ',' || last.data == '{')
+ ){
+ continue; // found object key... - we can not translate these
+ }
+
+ var sval = t.data.substring(1,t.data.length-1);
+ var ffn = fn.substring(_this.prefix.length);
+
+ t.trsum = _this.md5(ffn + '-' + sval);
+ map[sval] = t.trsum;
+
+
+
+ }
+
+
var transfile = minfile + '.lang.trans';
var transmd5 = minfile + '.lang';
+ print("writeTranslateFile " + transfile);
var i = 0;
var v = '';
if (File.exists(transfile)) {
if (!i ) {
return; // no strings in file...
}
- var ff = fn.split('/');
- var ffn = ff[ff.length-1];
+ var ffn = fn.substring(this.prefix.length);
- File.write(transfile, "\n" + ffn.toSource() + " : {");
+ File.write(transfile, "\n'" + ffn + "' : {");
var l = '';
var _tout = {}
File.write(transmd5, '');
for(v in map) {
- File.append(transfile, l + "\n\t \"" + v + '" : "' + v + '"');
+ if (!v.length) {
+ continue;
+ }
+ File.append(transfile, l + "\n\t\"" + v + "\" : \"" + v +"\"");
l = ',';
// strings are raw... - as the where encoded to start with!!!
- File.append(transmd5, '_T[' + (ffn + '-' + v).md5().toSource() + ']="'+v+"\";\n");
+ // so we should not need to encode them again.. - just wrap with "
+ File.append(transmd5, '_T["' + this.md5(ffn + '-' + v) + '"]="'+v+"\";\n");
}
File.append(transfile, "\n},"); // always one trailing..
return GLib.compute_checksum_for_string(GLib.ChecksumType.MD5, string, string.length);
- }
+ },
stringHandler : function(tok)
{
//print("STRING HANDLER");
// callback when outputing compressed file,
- if (!this.translate) {
+ var data = tok.data;
+ if (!this.translateJSON) {
// print("TURNED OFF");
- return tok.outData;
+ return data;
}
- if (tok.qc != '"') {
- return tok.outData;
+ if (tok.name == 'SINGLE_QUOTE') {
+ return data;
+ }
+
+ if (typeof(tok.trsum) == 'undefined') {
+ return data;
}
- var sval = tok.data.substring(1,tok.data.length-1);
+
+ return '_T["' + tok.trsum + '"]';
+
+ var sval = data.substring(1,data.length-1);
+ // we do not clean up... quoting here!??!!?!?!?!?
+
+
// blank with tabs or spaces..
//if (!sval.replace(new RegExp("(\\\\n|\\\\t| )+",'g'), '').length) {
// return tok.outData;
// }
+ var sval = tok.data.substring(1,data.length-1);
+ var fn = this.activeFile.substring(this.prefix.length);
-
- var ff = this.activeFile.split('/');
- var ffn = ff[ff.length-1];
- return '_T[' + (ffn + '-' + sval).md5().toSource() + ']';
+ return '_T["' + this.md5(fn + '-' + sval) + '"]';
}
-});
+};