XObject = imports.XObject.XObject;
File = imports.File.File;
-TokenReader = imports['JSDOC/TokenReader.js'].TokenReader;
-ScopeParser = imports['JSDOC/ScopeParser.js'].ScopeParser;
-TokenStream = imports['JSDOC/TokenStream.js'].TokenStream;
-CompressWhite = imports['JSDOC/CompressWhite.js'].CompressWhite;
+TextStream = imports.TextStream.TextStream;
+TokenReader = imports.TokenReader.TokenReader;
+ScopeParser = imports.ScopeParser.ScopeParser;
+TokenStream = imports.TokenStream.TokenStream;
+CompressWhite = imports.CompressWhite.CompressWhite;
+Collapse = imports.Collapse.Collapse;
GLib = imports.gi.GLib;
/**
* @class Packer
* Create a new packer
*
+ * Use with pack.js
+ *
+ *
* Usage:
* <code>
*
Packer = imports['JSDOC/Packer.js'].Packer;
-var x = new Packer(
- [ "/location/of/file1.js", "/location/of/file2.js", ... ],
- "/location/of"
+var x = new Packer({
+
+ files : [ "/location/of/file1.js", "/location/of/file2.js", ... ],
+ target : "/tmp/output.js",
+ debugTarget : "/tmp/output.debug.js", // merged file without compression.
+ translateJSON: "/tmp/translate.json",
+
+
);
x.packFiles(
"/location/of/temp_batch_dir",
* directly before an eval statement, it will compress all the code around the eval,
* and not rename the variables 'avarname'
*
- * Dont try running this on a merged uncompressed large file - it's horrifically slow.
+ * Dont try running this on a merged uncompressed large file - it's used to be horrifically slow. not sure about now..
* Best to use lot's of small classes, and use it to merge, as it will cache the compaction
*
*
*
- * @param {Array} files List of Files - MUST BE WITH ABSOLUTE PATH eg. [ '/usr/src/xyz/abc.js', .... ]
- * @param {String} source_path top level directory of source (used to work out the relative names for the minimized temp files)
+ * Notes for translation
+ * - translation relies on you using double quotes for strings if they need translating
+ * - single quoted strings are ignored.
+ *
+ * Generation of indexFiles
+ * - translateIndex = the indexfile
+ *
+ *
+ *
+ *
+
*/
-Packer = function(files, spath)
+Packer = function(cfg)
{
- this.files = files;
- this.spath = spath; // source path
- this.aliasList = { }; // list of maps Roo.asdfasdfasf => Roo.A1
+
+ XObject.extend(this, cfg);
+
+ if (this.srcfile) {
+ this.loadSourceFile();
+ }
+
+ if (!this.files) {
+ throw "No Files";
+ }
+
+
this.timer = new Date() * 1;
- this.translate = true;
+ this.packAll();
+
+
}
Packer.prototype = {
+ /**
+ * @prop srcfiles {String} file containing a list of files/or classes to use.
+ */
+ srcfiles : false,
- bpath : '',
+ /**
+ * @prop files {Array} list of files to compress (must be full path)
+ */
+ files : false,
+ /**
+ * @prop target {String} target to write files to - must be full path.
+ */
+ target : '',
+ /**
+ * @prop debugTarget {String} target to write files debug version to (uncompacted)- must be full path.
+ */
+ debugTarget : '', // merged file without compression.
+ /**
+ * @prop tmpDir {String} (optional) where to put the temporary files.
+ * if you set this, then files will not be cleaned up
+ */
+ tmpDir : '/tmp',
- // set to false to stop translation happening..
+ translateJSON : '', // json based list of strings in all files.
+
+ /**
+ * @prop cleanup {Boolean} (optional) clean up temp files after done -
+ * Defaults to false if you set tmpDir, otherwise true.
+ */
+ cleanup : true,
/**
- * Pack the files.
- *
- * @param {String} batch_path location of batched temporary min files.
- * @param {String} compressed_file eg. roo-all.js
- * @param {String} debug_file eg. roo-debug.js
- *
+ * @prop prefix {String} (optional) prefix of directory to be stripped of when
+ * Calculating md5 of filename
*/
+ prefix : '',
+ out : '', // if no target is specified - then this will contain the result
+
- packFiles : function(bpath, allfile, debugfile) {
- var str;
- var spath = this.spath;
- var files = this.files;
- this.bpath = bpath;
- // old var names - fixme..
- var dout = debugfile;
- //File.write(dout, "");
+ loadSourceFile : function()
+ {
+ var lines = File.read(this.srcfile).split("\n");
+ var _this = this;
+ lines.forEach(function(f) {
+
+ if (/^\s*\//.test(f) || !/[a-z]+/i.test(f)) { // skip comments..
+ return;
+ }
+ if (/\.js$/.test(f)) {
+ _this.files.push( f);
+ // js file..
+ return;
+ }
+
+ //println("ADD"+ f.replace(/\./g, '/'));
+ var add = f.replace(/\./g, '/').replace(/\s+/g,'')+'.js';
+ if (_this.files.indexOf(f) > -1) {
+ return;
+ }
+ _this.files.push( add );
+
+ })
+ },
+
+
+ packAll : function() // do the packing (run from constructor)
+ {
- var outpath = allfile;
-
- var transfile = bpath + '/_translation_.js';
//this.transOrigFile= bpath + '/../lang.en.js'; // needs better naming...
//File.write(this.transfile, "");
- File.write(dout, "");
- File.write(allfile, "");
- for(var i=0; i < files.length; i++) {
+ if (this.target) {
+ File.write(this.target, "");
+ }
+
+ if (this.debugTarget) {
+ File.write(this.debugTarget, "");
+ }
+
+ for(var i=0; i < this.files.length; i++) {
+ var file = this.files[i];
- print("reading " +files[i] );
- if (!File.exists(files[i])) {
- print("SKIP (does not exist) " + files[i]);
+ print("reading " +file );
+ if (!File.isFile(file)) {
+ print("SKIP (is not a file) " + file);
continue;
}
-
- File.append(dout, File.read(files[i]));
+ if (this.debugTarget) {
+ File.append(this.debugTarget, File.read(file));
+ }
// it's a good idea to check with 0 compression to see if the code can parse!!
// debug file..
- var minfile = bpath + '/' +files[i].substr(spath.length+1).replace(/\//g, '.');
- var transfile = bpath + '/' +files[i].substr(spath.length+1).replace(/\//g, '.') +'.lang';
+ var minfile = this.tmpDir + '/' +file.replace(/\//g, '.');
+
+
// let's see if we have a min file already?
+ // this might happen if tmpDir is set ..
if (true && File.exists(minfile)) {
var mt = File.mtime(minfile);
- var ot = File.mtime(files[i]);
+ var ot = File.mtime(file);
print("compare : " + mt + "=>" + ot);
if (mt >= ot) {
continue;
print("COMPRESSING ");
//var codeComp = pack(str, 10, 0, 0);
- var str = File.read(files[i]);
- var str = this.pack(str, files[i], minfile);
+ if (File.exists(minfile)) {
+ File.remove(minfile);
+ }
+ var str = File.read(file);
+ var str = this.pack(str, file, minfile);
if (str.length) {
- File.write(minfile, str);
+ File.write(minfile, str);
}
-
- //var str = File.read(minfile);
- //print("using MIN FILE "+ minfile);
- //File.append(outpath, str + "\n");
- //this.timerPrint("Wrote Files");
- /*
- if (codeComp.length) {
- //print(codeComp);
-
- File.append(outpath, codeComp+"\n");
- File.write(minfile, codeComp);
- }
- */
- //print(codeComp);
- // if (i > 10) return;
+
}
- if (this.translate) {
+ if (this.translateJSON) {
print("MERGING LANGUAGE");
- File.write(outpath, "if (typeof(_T) == 'undefined') { _T={};}\n");
+ var out = "if (typeof(_T) == 'undefined') { _T={};}\n"
+ if (this.target) {
+ File.write(this.target, out);
+ } else {
+ this.out += out;
+ }
+
- var transfileAll = bpath + '/_translation_.js';
- File.write(transfileAll, "");
- for(var i=0; i < files.length; i++) {
- var transfile= bpath + '/' +files[i].substr(spath.length+1).replace(/\//g, '.') +'.lang.trans';
- var transmd5 = bpath + '/' +files[i].substr(spath.length+1).replace(/\//g, '.') +'.lang';
+
+ File.write(this.translateJSON, "");
+ for(var i=0; i < this.files.length; i++) {
+ var file = this.files[i];
+ var transfile= this.tmpDir + '/' +file.replace(/\//g, '.') +'.lang.trans';
+ var transmd5 = this.tmpDir + '/' +file.replace(/\//g, '.') +'.lang';
if (File.exists(transmd5)) {
var str = File.read(transmd5);
if (str.length) {
- File.append(outpath, str + "\n");
+ if (this.target) {
+ File.append(this.target, str + "\n");
+ } else {
+ this.out += str + "\n";
+ }
+
+ }
+ if (this.cleanup) {
+ File.remove(transmd5);
}
}
if (File.exists(transfile)) {
var str = File.read(transfile);
if (str.length) {
- File.append(transfileAll, str);
+ File.append(this.translateJSON, str);
+ }
+ if (this.cleanup) {
+ File.remove(transfile);
}
}
}
}
+
print("MERGING SOURCE");
- for(var i=0; i < files.length; i++) {
-
- var minfile = bpath + '/' +files[i].substr(spath.length+1).replace(/\//g, '.');
+ for(var i=0; i < this.files.length; i++) {
+ var file = this.files[i];
+ var minfile = this.tmpDir + '/' + file.replace(/\//g, '.');
+
+
if (!File.exists(minfile)) {
continue;
}
var str = File.read(minfile);
print("using MIN FILE "+ minfile);
if (str.length) {
- File.append(outpath, str + "\n");
+ if (this.target) {
+ File.append(this.target, str + "\n");
+ } else {
+ this.out += str + "\n";
+ }
+
+ }
+ if (this.cleanup) {
+ File.remove(minfile);
}
+
}
-
- //File.append(dout, "\n");// end the function
-
+
},
pack : function (str,fn,minfile)
{
- var tr = new TokenReader();
+ var tr = new TokenReader( {
+ keepDocs :true,
+ keepWhite : true,
+ keepComments : true,
+ sepIdents : true,
+ collapseWhite : false
+ });
this.timerPrint("START" + fn);
// we can load translation map here...
- var toks = tr.tokenize(str,false); // dont merge xxx + . + yyyy etc.
+ var toks = tr.tokenize(new TextStream(str)); // dont merge xxx + . + yyyy etc.
// at this point we can write a language file...
- if (this.translate) {
- this.writeTranslateFile(fn, minfile, tr.translateMap);
+ if (this.translateJSON) {
+
+ this.writeTranslateFile(fn, minfile, toks);
}
this.activeFile = fn;
// and replace if we are generating a different language..
-
-
-
this.timerPrint("Tokenized");
+ //var ts = new TokenStream(toks);
+ //print(JSON.stringify(toks, null,4 )); Seed.quit();
+ var ts = new Collapse(toks);
+ // print(JSON.stringify(ts.tokens, null,4 )); Seed.quit();
//return;//
- var sp = new ScopeParser(new TokenStream(toks, str.length));
+ var sp = new ScopeParser(ts);
this.timerPrint("Converted to Parser");
sp.packer = this;
sp.buildSymbolTree();
sp.mungeSymboltree();
this.timerPrint("Munged Sym tree");
print(sp.warnings.join("\n"));
- var out = JSDOC.CompressWhite(sp.ts, this);
+
+
+ var out = CompressWhite(new TokenStream(toks), this, true);
+ //var out = CompressWhite(new TokenStream(toks), this, false);
this.timerPrint("Compressed");
return out;
+
+
},
timerPrint: function (str) {
*
*/
- writeTranslateFile : function(fn, minfile, map)
+ writeTranslateFile : function(fn, minfile, toks)
{
+
+ var map = {};
+ var _this = this;
+ toks.forEach(function (t) {
+ if (t.type == 'STRN' && t.name == 'DOUBLE_QUOTE') {
+ var sval = t.data.substring(1,t.data.length-1);
+ var ffn = fn.substring(_this.prefix.length);
+ map[sval] = _this.md5(ffn + '-' + sval);
+ }
+ })
+
var transfile = minfile + '.lang.trans';
var transmd5 = minfile + '.lang';
+ print("writeTranslateFile " + transfile);
var i = 0;
var v = '';
if (File.exists(transfile)) {
if (!i ) {
return; // no strings in file...
}
- var ff = fn.split('/');
- var ffn = ff[ff.length-1];
+ var ffn = fn.substring(this.prefix.length);
- File.write(transfile, "\n" + ffn.toSource() + " : {");
+ File.write(transfile, "\n'" + ffn + "' : {");
var l = '';
var _tout = {}
{
//print("STRING HANDLER");
// callback when outputing compressed file,
- if (!this.translate) {
+ var data = tok.data;
+ if (!this.translateJSON) {
// print("TURNED OFF");
- return tok.outData;
+ return data;
}
- if (tok.qc != '"') {
- return tok.outData;
+ if (tok.name == 'SINGLE_QUOTE') {
+ return data;
}
- var sval = tok.data.substring(1,tok.data.length-1);
+
+ var sval = data.substring(1,data.length-1);
+ // we do not clean up... quoting here!??!!?!?!?!?
+
+
// blank with tabs or spaces..
//if (!sval.replace(new RegExp("(\\\\n|\\\\t| )+",'g'), '').length) {
// return tok.outData;
// }
+ var sval = tok.data.substring(1,data.length-1);
+ var fn = this.activeFile.substring(this.prefix.length);
-
- var ff = this.activeFile.split('/');
- var ffn = ff[ff.length-1];
- return '_T["' + this.md5(ffn + '-' + sval) + '"]';
+ return '_T["' + this.md5(fn + '-' + sval) + '"]';
}
-});
+};