*/
//public string tmpDir = "/tmp"; // FIXME??? in ctor?
-
-
- /**
- * @cfg {Boolean} cleanup (optional) clean up temp files after done -
- * Defaults to false if you set tmpDir, otherwise true.
- */
- public bool cleanup = false;
-
-
- /**
- * @cfg {Boolean} skipScope (optional) skip Scope parsing and replacement.
- * usefull for debugging...
- */
-
- public bool skipScope = false;
-
-
- /**
- * @cfg {Boolean} dumpTokens (optional) read the first file and dump the tokens.
- * usefull for debugging...
- */
-
- public bool dumpTokens = false;
-
+
+
// list of files to compile...
- Gee.ArrayList<string> files;
+ public Gee.ArrayList<string> files;
/**
* @cfg activeFile ??? used???
public string activeFile = "";
- /**
- * @cfg baseDir -- prefix the files listed in indexfiles with this.
- */
-
- public string baseDir = "";
-
-
+
public string outstr = ""; // if no target is specified - then this will contain the result
+ public PackerRun config;
-
- public Packer()
+ public Packer(PackerRun config)
{
+ this.config = PackerRun;
#if HAVE_JSON_GLIB
this.result = new Json.Object();
#else
var srcfile = in_srcfile;
if (srcfile[0] != '/') {
- srcfile = this.baseDir + in_srcfile;
+ // srcfile = PackerRun.opt_real_basedir + in_srcfile;
}
string str;
FileUtils.get_contents(srcfile,out str);
var add = f.replace(".", "/") + ".js";
if (add[0] != '/') {
- add = this.baseDir + add;
+ // add = PackerRun.opt_real_basedir + add;
}
if (this.files.contains(add)) {
print("COMPRESSING to %s\n", minfile);
//var codeComp = pack(str, 10, 0, 0);
- if (this.cleanup && FileUtils.test (minfile, FileTest.EXISTS)) {
- FileUtils.remove(minfile);
- }
+ // if (PackerRun.opt_clean_cache && FileUtils.test (minfile, FileTest.EXISTS)) {
+ // FileUtils.remove(minfile);
+ // }
if (!loaded_string) {
FileUtils.get_contents(file,out file_contents);
}
- if (this.dumpTokens || this.hasErrors("")) {
+ // if (packerrun.opt_dump_tokens || this.hasErrors("")) {
- GLib.Process.exit(0);
- }
+ // GLib.Process.exit(0);
+ // }
print("MERGING SOURCE\n");
for(var i=0; i < this.files.size; i++) {
print("using MIN FILE %s\n", minfile);
if (str.length > 0) {
if (this.targetStream != null) {
- this.targetStream.write(("// " +
- ( (file.length > this.baseDir.length) ? file.substring(this.baseDir.length) : file ) +
- "\n").data);
- this.targetStream.write((str + "\n").data);
+// this.targetStream.write(("// " +
+// ( (file.length > PackerRun.opt_real_basedir.length) ? file.substring(PackerRun.opt_real_basedir.length) : file ) +
+// "\n").data);
+ this.targetStream.write(("// " + file + "\n").data);
+ this.targetStream.write((str + "\n").data);
} else {
this.outstr += "//" +
- ( (file.length > this.baseDir.length) ? file.substring(this.baseDir.length) : file ) + "\n";
- this.outstr += str + "\n";
+ ( (file.length > PackerRun.opt_real_basedir.length) ? file.substring(PackerRun.opt_real_basedir.length) : file ) + "\n";
+ this.outstr += "//" + file +"\n";
+
+ this.outstr += str + "\n";
}
}
- if (this.cleanup) {
- FileUtils.remove(minfile);
- }
+// if (PackerRun.opt_clean_cache) {
+// FileUtils.remove(minfile);
+// }
}
- if (this.cleanup) {
- FileUtils.remove(tmpDir);
- }
+// if (PackerRun.opt_clean_cache) {
+// FileUtils.remove(tmpDir);
+// }
if (this.target.length > 0 ) {
print("Output file: " + this.target);
TokenArray toks = tr.tokenize(new TextStream(str)); // dont merge xxx + . + yyyy etc.
- if (this.dumpTokens) {
- toks.dump();
- return "";
- //GLib.Process.exit(0);
- }
+// if (PackerRun.opt_dump_tokens) {
+// toks.dump();
+// return "";
+// //GLib.Process.exit(0);
+// }
this.activeFile = fn;
// print(JSON.stringify(ts.tokens, null,4 )); Seed.quit();
//return;//
- if (!this.skipScope) {
+// if (!PackerRun.opt_skip_scope) {
var sp = new ScopeParser(ts, this, fn);
//sp.packer = this;
//print(sp.warnings.join("\n"));
//(new TokenStream(toks.tokens)).dumpAll(""); GLib.Process.exit(1);
// compress works on the original array - in theory the replacements have already been done by now
- var outf = CompressWhite(new TokenStream(toks.tokens), this); // do not kill whitespace..
+// var outf = CompressWhite(new TokenStream(toks.tokens), this, PackerRun.opt_keep_whitespace); // do not kill whitespace..
- debug("RESULT: \n %s\n", outf);
+ // debug("RESULT: \n %s\n", outf);