x.target = "output.pathname.js"
x.debugTarget = "output.pathname.debug.js"
-
+
x.pack(); // writes files etc..
// list of files to compile...
- Gee.ArrayList<string> files;
+ public Gee.ArrayList<string> files;
/**
* @cfg activeFile ??? used???
*/
public string activeFile = "";
-
-
- /**
- * @cfg baseDir -- prefix the files listed in indexfiles with this.
- */
-
- public string baseDir = "";
-
-
+
public string outstr = ""; // if no target is specified - then this will contain the result
+ public PackerRun config;
-
-
- public Packer()
+ public Packer(PackerRun config)
{
+ this.config = config;
#if HAVE_JSON_GLIB
this.result = new Json.Object();
#else
var srcfile = in_srcfile;
if (srcfile[0] != '/') {
- srcfile = this.baseDir + in_srcfile;
+ srcfile = config.opt_real_basedir + in_srcfile;
}
string str;
FileUtils.get_contents(srcfile,out str);
var add = f.replace(".", "/") + ".js";
if (add[0] != '/') {
- add = this.baseDir + add;
+ add = config.opt_real_basedir + add;
}
if (this.files.contains(add)) {
print("COMPRESSING to %s\n", minfile);
//var codeComp = pack(str, 10, 0, 0);
- if (PackerRun.opt_clean_cache && FileUtils.test (minfile, FileTest.EXISTS)) {
+ if (config.opt_clean_cache && FileUtils.test (minfile, FileTest.EXISTS)) {
FileUtils.remove(minfile);
}
if (!loaded_string) {
- if (PackerRun.opt_dump_tokens || this.hasErrors("")) {
+ if (config.opt_dump_tokens || this.hasErrors("")) {
GLib.Process.exit(0);
}
if (str.length > 0) {
if (this.targetStream != null) {
this.targetStream.write(("// " +
- ( (file.length > this.baseDir.length) ? file.substring(this.baseDir.length) : file ) +
+ ( (file.length > config.opt_real_basedir.length) ? file.substring(config.opt_real_basedir.length) : file ) +
"\n").data);
- this.targetStream.write((str + "\n").data);
+
+ this.targetStream.write((str + "\n").data);
} else {
this.outstr += "//" +
- ( (file.length > this.baseDir.length) ? file.substring(this.baseDir.length) : file ) + "\n";
- this.outstr += str + "\n";
+ ( (file.length > config.opt_real_basedir.length) ? file.substring(config.opt_real_basedir.length) : file ) + "\n";
+ this.outstr += "//" + file +"\n";
+
+ this.outstr += str + "\n";
}
}
- if (PackerRun.opt_clean_cache) {
+ if (config.opt_clean_cache) {
FileUtils.remove(minfile);
}
}
- if (PackerRun.opt_clean_cache) {
+ if (config.opt_clean_cache) {
FileUtils.remove(tmpDir);
}
TokenArray toks = tr.tokenize(new TextStream(str)); // dont merge xxx + . + yyyy etc.
- if (PackerRun.opt_dump_tokens) {
+ if (config.opt_dump_tokens) {
toks.dump();
return "";
//GLib.Process.exit(0);
// print(JSON.stringify(ts.tokens, null,4 )); Seed.quit();
//return;//
- if (!PackerRun.opt_skip_scope) {
+ if (!config.opt_skip_scope) {
var sp = new ScopeParser(ts, this, fn);
//sp.packer = this;
//print(sp.warnings.join("\n"));
//(new TokenStream(toks.tokens)).dumpAll(""); GLib.Process.exit(1);
// compress works on the original array - in theory the replacements have already been done by now
- var outf = CompressWhite(new TokenStream(toks.tokens), this, PackerRun.opt_keep_whitespace); // do not kill whitespace..
+ var outf = CompressWhite(new TokenStream(toks.tokens), this, config.opt_keep_whitespace); // do not kill whitespace..
- debug("RESULT: \n %s\n", outf);
+ // debug("RESULT: \n %s\n", outf);