[med-svn] [sprai] 01/02: Imported Upstream version 0.9.9.12+dfsg
Afif Elghraoui
afif at moszumanska.debian.org
Wed Apr 13 07:08:56 UTC 2016
This is an automated email from the git hooks/post-receive script.
afif pushed a commit to branch master
in repository sprai.
commit 297aabe048e62c5abffcab111698f39e398eae17
Author: Afif Elghraoui <afif at ghraoui.name>
Date: Tue Apr 12 22:02:26 2016 -0700
Imported Upstream version 0.9.9.12+dfsg
---
LICENSE.txt | 21 +
bfmt72s.c | 1015 ++++++++++++++++++++++
bfmtx2m4.pl | 84 ++
ca_ikki_v5.pl | 228 +++++
check_circularity.pl | 168 ++++
check_redundancy.pl | 74 ++
col2fqcell.h | 359 ++++++++
configure | 119 +++
dfq2fq_v2.pl | 329 ++++++++
doc/_build/html/.buildinfo | 4 +
doc/_build/html/_sources/Contact.txt | 12 +
doc/_build/html/_sources/Download.txt | 38 +
doc/_build/html/_sources/Example.txt | 128 +++
doc/_build/html/_sources/FAQ.txt | 31 +
doc/_build/html/_sources/README.txt | 353 ++++++++
doc/_build/html/_sources/index.txt | 61 ++
dumbbell_filter.pl | 165 ++++
ec.spec | 52 ++
extract_fq.pl | 141 ++++
ezez4qsub_vx1.pl | 1496 +++++++++++++++++++++++++++++++++
ezez_vx1.pl | 727 ++++++++++++++++
fa2fq.pl | 214 +++++
fq2fa.pl | 59 ++
fq2idfq.pl | 161 ++++
fqfilt.pl | 58 ++
get_target_fasta_records.pl | 98 +++
get_top_20x_fa.pl | 381 +++++++++
m52bfmt7.c | 151 ++++
makefile | 56 ++
myrealigner.c | 1332 +++++++++++++++++++++++++++++
nss2v_v3.c | 1098 ++++++++++++++++++++++++
partition_fa.pl | 143 ++++
pbasm.spec | 86 ++
waf | 164 ++++
waflib/Build.py | 769 +++++++++++++++++
waflib/ConfigSet.py | 152 ++++
waflib/Configure.py | 317 +++++++
waflib/Context.py | 319 +++++++
waflib/Errors.py | 37 +
waflib/Logs.py | 176 ++++
waflib/Node.py | 466 ++++++++++
waflib/Options.py | 135 +++
waflib/Runner.py | 197 +++++
waflib/Scripting.py | 373 ++++++++
waflib/Task.py | 677 +++++++++++++++
waflib/TaskGen.py | 400 +++++++++
waflib/Tools/__init__.py | 4 +
waflib/Tools/ar.py | 11 +
waflib/Tools/asm.py | 25 +
waflib/Tools/bison.py | 28 +
waflib/Tools/c.py | 24 +
waflib/Tools/c_aliases.py | 55 ++
waflib/Tools/c_config.py | 728 ++++++++++++++++
waflib/Tools/c_osx.py | 120 +++
waflib/Tools/c_preproc.py | 604 +++++++++++++
waflib/Tools/c_tests.py | 153 ++++
waflib/Tools/ccroot.py | 391 +++++++++
waflib/Tools/compiler_c.py | 39 +
waflib/Tools/compiler_cxx.py | 39 +
waflib/Tools/compiler_d.py | 29 +
waflib/Tools/compiler_fc.py | 43 +
waflib/Tools/cs.py | 132 +++
waflib/Tools/cxx.py | 26 +
waflib/Tools/d.py | 54 ++
waflib/Tools/d_config.py | 52 ++
waflib/Tools/d_scan.py | 133 +++
waflib/Tools/dbus.py | 29 +
waflib/Tools/dmd.py | 51 ++
waflib/Tools/errcheck.py | 161 ++++
waflib/Tools/fc.py | 116 +++
waflib/Tools/fc_config.py | 285 +++++++
waflib/Tools/fc_scan.py | 68 ++
waflib/Tools/flex.py | 32 +
waflib/Tools/g95.py | 55 ++
waflib/Tools/gas.py | 12 +
waflib/Tools/gcc.py | 97 +++
waflib/Tools/gdc.py | 36 +
waflib/Tools/gfortran.py | 69 ++
waflib/Tools/glib2.py | 173 ++++
waflib/Tools/gnu_dirs.py | 65 ++
waflib/Tools/gxx.py | 97 +++
waflib/Tools/icc.py | 30 +
waflib/Tools/icpc.py | 29 +
waflib/Tools/ifort.py | 49 ++
waflib/Tools/intltool.py | 77 ++
waflib/Tools/irixcc.py | 48 ++
waflib/Tools/javaw.py | 311 +++++++
waflib/Tools/kde4.py | 48 ++
waflib/Tools/ldc2.py | 37 +
waflib/Tools/lua.py | 18 +
waflib/Tools/msvc.py | 726 ++++++++++++++++
waflib/Tools/nasm.py | 14 +
waflib/Tools/perl.py | 80 ++
waflib/Tools/python.py | 340 ++++++++
waflib/Tools/qt4.py | 437 ++++++++++
waflib/Tools/ruby.py | 103 +++
waflib/Tools/suncc.py | 53 ++
waflib/Tools/suncxx.py | 54 ++
waflib/Tools/tex.py | 250 ++++++
waflib/Tools/vala.py | 201 +++++
waflib/Tools/waf_unit_test.py | 95 +++
waflib/Tools/winres.py | 85 ++
waflib/Tools/xlc.py | 45 +
waflib/Tools/xlcxx.py | 45 +
waflib/Utils.py | 412 +++++++++
waflib/__init__.py | 4 +
waflib/ansiterm.py | 177 ++++
waflib/extras/__init__.py | 4 +
waflib/extras/compat15.py | 220 +++++
waflib/fixpy2.py | 53 ++
wscript | 148 ++++
111 files changed, 21853 insertions(+)
diff --git a/LICENSE.txt b/LICENSE.txt
new file mode 100644
index 0000000..334ca10
--- /dev/null
+++ b/LICENSE.txt
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2013- Takamasa Imai, Tomoaki Nishiyama and Masahiro Kasahara
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/bfmt72s.c b/bfmt72s.c
new file mode 100644
index 0000000..b14c3b9
--- /dev/null
+++ b/bfmt72s.c
@@ -0,0 +1,1015 @@
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <unistd.h>
+
+#define LAUX 32
+
+int LSEQ = 4096;
+int LBUF;/* = (3*LSEQ); */
+int NUM_ALIGNMENTS = 512;
+
+int maxeval = 1.0;
+
+typedef struct cigar_t{
+ int num;
+ char sym;
+}cigar_t;
+
+typedef struct cmaux_t{
+ char sym;
+ int num;
+}cmaux_t;
+
+typedef struct sam_t{
+ char * qname;
+ int flag;
+ char * rname;
+ int pos;
+ int mapq;
+ struct cigar_t * cigar;
+ int cigar_length;
+ int cigar_capacity;
+ char * rnext;
+ int pnext;
+ int tlen;
+ char * seq;
+ char * qual;
+ double as;/* bit score */
+ double ev;/* e-value */
+ double pident;/* percentage of identical matches */
+}sam_t;
+
+typedef struct region{
+ int left;
+ int right;
+}region_t;
+
+typedef struct blast{
+ int sstart,send,qstart,qend;
+ double bitscore,evalue,pident;
+}blast_t;
+
+int opt_invsam=0;
+int opt_unique_ref=0;
+int opt_unique_que=0;
+double valid_evalue=-1.0;
+int opt_chimeric_filter=0;
+int opt_dummy_qv=0;
+int opt_unique_bfmt7=0;
+int opt_m5=0;
+int opt_graph=0;
+int opt_top_hit_only=0;
+int opt_pctidy=0;
+int opt_repeat=0;
+
+void print_sam(sam_t * s){
+ printf("%s",s->qname);
+ printf("\t");
+ printf("%d",s->flag);
+ printf("\t");
+ printf("%s",s->rname);
+ printf("\t");
+ printf("%d",s->pos);
+ printf("\t");
+ printf("%d",s->mapq);
+ printf("\t");
+ if(opt_graph){
+ printf("*");
+ }
+ else{
+ int i;
+ for(i=0; i<s->cigar_length; ++i){
+ printf("%d%c",s->cigar[i].num,s->cigar[i].sym);
+ }
+ }
+ printf("\t");
+ printf("%s",s->rnext);
+ printf("\t");
+ printf("%d",s->pnext);
+ printf("\t");
+ printf("%d",s->tlen);
+ printf("\t");
+ if(opt_graph){
+ printf("*");
+ }
+ else{
+ printf("%s",s->seq);
+ }
+ printf("\t");
+ if(opt_graph){
+ printf("*");
+ }
+ else{
+ printf("%s",s->qual);
+ }
+ printf("\t");
+ printf("AS:i:%d",(int)(s->as+0.499));/* already rounded? */
+ printf("\t");
+ if(s->ev > 0.0){
+ printf("EV:Z:%1.0e",s->ev);
+ }
+ else{
+ printf("EV:Z:%1.1f",s->ev);
+ }
+ printf("\t");
+ printf("PI:Z:%2.2f",s->pident);
+ printf("\n");
+ return;
+}
+
+void init_sam(sam_t * s, size_t LBUF, int LSEQ){
+ /* Because the string element are read with sscanf from a string
+ * of at most LBUF long, each string should have a capcity of LBUF.
+ * This structure is not allocated for many instance.
+ * LSEQ is used to determine the size of cigar, which is not
+ * just a string.
+ */
+ s->qname = (char*)malloc(LBUF);
+ if(s->qname == NULL){
+ fprintf(stderr,"cannot allocate memory: qname\n");
+ abort();
+ }
+ s->rname = (char*)malloc(LBUF);
+ if(s->rname == NULL){
+ fprintf(stderr,"cannot allocate memory: rname\n");
+ abort();
+ }
+ s->cigar = (cigar_t*)malloc(sizeof(cigar_t)*LSEQ);
+ if(s->cigar == NULL){
+ fprintf(stderr,"cannot allocate memory: cigar\n");
+ abort();
+ }
+ s->cigar_capacity = LSEQ;
+ s->rnext = (char*)malloc(LBUF);
+ if(s->rnext == NULL){
+ fprintf(stderr,"cannot allocate memory: rnext\n");
+ abort();
+ }
+ s->seq = (char*)malloc(LBUF);
+ if(s->seq == NULL){
+ fprintf(stderr,"cannot allocate memory: seq\n");
+ abort();
+ }
+ s->qual = (char*)malloc(LBUF);
+ if(s->qual == NULL){
+ fprintf(stderr,"cannot allocate memory: qual\n");
+ abort();
+ }
+ return;
+}
+
+sam_t * realloc_sam(sam_t * s, size_t LBUF, int LSEQ){
+ /* Because the string element are read with sscanf from a string
+ * of at most LBUF long, each string should have a capcity of LBUF.
+ * This structure is not allocated for many instance.
+ * LSEQ is used to determine the size of cigar, which is not
+ * just a string.
+ */
+ void *tmp_p;
+ tmp_p = realloc(s->qname, LBUF);
+ if(tmp_p == NULL){
+ fprintf(stderr,"cannot reallocate memory: qname\n");
+ fprintf(stderr,"LBUF: %lu, LSEQ: %d\n", (unsigned long) LBUF, LSEQ);
+ exit(EXIT_FAILURE);
+ }
+ s->qname = (char*)tmp_p;
+
+ tmp_p = realloc(s->rname, LBUF);
+ if(tmp_p == NULL){
+ fprintf(stderr,"cannot reallocate memory: rname\n");
+ fprintf(stderr,"LBUF: %lu, LSEQ: %d\n", (unsigned long) LBUF, LSEQ);
+ exit(EXIT_FAILURE);
+ }
+ s->rname = (char*)tmp_p;
+
+ tmp_p = realloc(s->cigar, sizeof(cigar_t)*LSEQ);
+ if(tmp_p == NULL){
+ fprintf(stderr,"cannot reallocate memory: cigar\n");
+ fprintf(stderr,"LBUF: %lu, LSEQ: %d\n", (unsigned long) LBUF, LSEQ);
+ exit(EXIT_FAILURE);
+ }
+ s->cigar = (cigar_t*)tmp_p;
+ s->cigar_capacity = LSEQ;
+
+ tmp_p = realloc(s->rnext, LBUF);
+ if(tmp_p == NULL){
+ fprintf(stderr,"cannot reallocate memory: rnext\n");
+ fprintf(stderr,"LBUF: %lu, LSEQ: %d\n", (unsigned long) LBUF, LSEQ);
+ exit(EXIT_FAILURE);
+ }
+ s->rnext = (char*) tmp_p;
+
+ tmp_p = realloc(s->seq, LBUF);
+ if(tmp_p == NULL){
+ fprintf(stderr,"cannot reallocate memory: seq\n");
+ fprintf(stderr,"LBUF: %lu, LSEQ: %d\n", (unsigned long) LBUF, LSEQ);
+ exit(EXIT_FAILURE);
+ }
+ s->seq = (char*) tmp_p;
+
+ tmp_p = realloc(s->qual, LBUF);
+ if(tmp_p == NULL){
+ fprintf(stderr,"cannot reallocate memory: qual\n");
+ fprintf(stderr,"LBUF: %lu, LSEQ: %d\n", (unsigned long) LBUF, LSEQ);
+ exit(EXIT_FAILURE);
+ }
+ s->qual = (char*) tmp_p;
+
+ return s;
+}
+
+void reset_cigar(sam_t * s){
+ s->cigar_length=0;
+}
+
+void reset_sam(sam_t * s){
+ s->qname[0] = '\0';
+ s->rname[0] = '\0';
+ s->rnext[0] = '\0';
+ s->cigar_length=0;
+ s->seq[0] = '\0';
+ s->qual[0] = '\0';
+ return;
+}
+
+void free_sam(sam_t * s){
+ free(s->qname);
+ free(s->rname);
+ free(s->cigar);
+ free(s->rnext);
+ free(s->seq);
+ free(s->qual);
+ return;
+}
+
+void blast_print_sam(sam_t * sam, cmaux_t * cmaux, blast_t * blast, char** quals);
+void aln2cm(sam_t * sam, char * q, char * s, cmaux_t * cmaux);
+
+char fqname[1024];
+
+int header=0;
+
+int main(int argc, char ** argv)
+{
+ int hitnum=0;
+ char * in_blastn;
+ char * prev_ref_name;
+ char * prev_que_name;
+ sam_t sam;
+ char * query_name;
+ FILE * fp;
+ char * buf;
+ char * bufq;
+ region_t * regions;
+ int region_index=0;
+ int scanfret;
+ blast_t blast;
+ char strand[255];
+ char ** quals = NULL;
+ int n_units=0;
+ LBUF = (3*LSEQ);
+
+ {
+ int result;
+ while((result=getopt(argc,argv,"iuUe:c:dbxgtprl:")) != -1){
+ switch(result){
+ case 'l':
+ LSEQ=atoi(optarg);
+ if(LSEQ < 1){
+ fprintf(stderr,"LSEQ>=1: %d is given.",LSEQ);
+ exit(1);
+ }
+ hitnum+=2;
+ break;
+ case 'r':
+ opt_repeat=1;
+ header=1;
+ ++hitnum;
+ break;
+ case 'p':
+ opt_pctidy=1;
+ opt_top_hit_only=1;
+ header=1;
+ ++hitnum;
+ break;
+ case 't':
+ opt_top_hit_only=1;
+ ++hitnum;
+ break;
+ case 'g':
+ opt_graph=1;
+ ++hitnum;
+ break;
+ case 'i':
+ opt_invsam=1;
+ ++hitnum;
+ break;
+ case 'u':
+ opt_unique_ref=1;
+ ++hitnum;
+ break;
+ case 'U':
+ opt_unique_que=1;
+ ++hitnum;
+ break;
+ case 'e':
+ valid_evalue=atof(optarg);
+ if(valid_evalue < 0.0){
+ fprintf(stderr,"e>=0.0: %f is given.",valid_evalue);
+ abort();
+ }
+ hitnum+=2;
+ break;
+ case 'c':
+ opt_chimeric_filter=atoi(optarg);
+ if(opt_chimeric_filter < 0){
+ fprintf(stderr,"c: %d is given.",opt_chimeric_filter);
+ fprintf(stderr,"\tmust be >= 0");
+ abort();
+ }
+ hitnum+=2;
+ break;
+ case 'd':
+ opt_dummy_qv=1;
+ ++hitnum;
+ break;
+ case 'b':
+ opt_unique_bfmt7=1;
+ ++hitnum;
+ break;
+ case 'x':
+ opt_m5=1;
+ opt_unique_bfmt7=1;
+ ++hitnum;
+ break;
+ case '?':
+ printf("humei\n");
+ break;
+ default:
+ break;
+ }
+ }
+ }
+ LBUF = (3*LSEQ);
+
+ if(argc != 2+hitnum){
+ char msg[] = "off";
+ fprintf(stderr, "USAGE: <this> <in.blastn | - >\n");
+ if(opt_invsam){
+ strcpy(msg,"on");
+ }
+ fprintf(stderr, "\t-i: regards blasted queries as references in the output sam (default: %s)\n",msg);
+ fprintf(stderr, "\t-U: avoids double voting\n");
+ fprintf(stderr, "\t-e <valid_evalue_threshold> : discards input records with more than the threshold\n");
+ fprintf(stderr, "\t-d: give dummy qvs to save memory\n");
+ fprintf(stderr, "\t-c <trim length> : trim both <trim length> bases of alignment against chimeric reads (default: 0)\n");
+ return 1;
+ }
+ if(opt_unique_ref + opt_unique_que > 1){
+ fprintf(stderr, "-u and -U are incompatible\n");
+ return 1;
+ }
+ if(opt_chimeric_filter && opt_graph){
+ opt_chimeric_filter = 0;
+ fprintf(stderr, "WARNING: because opt_graph was on, opt_chimeric_fileter was forced to be 0.\n");
+ }
+
+ in_blastn = argv[1+hitnum];
+ prev_ref_name = (char*)malloc(LBUF);
+ if(prev_ref_name == NULL){
+ fprintf(stderr,"cannot allocate memory: prev_ref_name\n");
+ abort();
+ }
+ prev_ref_name[0] = '\0';
+ prev_que_name = (char*)malloc(LBUF);
+ if(prev_que_name == NULL){
+ fprintf(stderr,"cannot allocate memory: prev_que_name\n");
+ abort();
+ }
+ prev_que_name[0] = '\0';
+ init_sam(&sam, LBUF, LSEQ);
+ reset_sam(&sam);
+ sam.qname[0]='\0';
+ sam.mapq = 255;
+ sam.rnext[0]='*';
+ sam.rnext[1]='\0';
+ sam.pnext=0;
+ sam.tlen=0;
+ query_name = (char*)malloc(LBUF);
+ if(query_name == NULL){
+ fprintf(stderr,"cannot allocate memory: query_name\n");
+ abort();
+ }
+ if(in_blastn[0] == '-'){
+ fp = stdin;
+ }
+ else{
+ fp = fopen(in_blastn,"r");
+ }
+ if(fp == NULL){
+ fprintf(stderr,"cannot open the file %s\n", in_blastn);
+ abort();
+ }
+ buf = (char*)malloc(LBUF);
+ if(buf == NULL){
+ fprintf(stderr, "cannot allocate memory: buf\n");
+ abort();
+ }
+ bufq = (char*)malloc(LBUF); /* to avoid overflow, the buffer should be as long as the line */
+ if(bufq == NULL){
+ fprintf(stderr, "cannot allocate memory: bufq\n");
+ abort();
+ }
+ regions = (region_t*)malloc(sizeof(region_t)*NUM_ALIGNMENTS);
+ if(regions == NULL){
+ fprintf(stderr, "cannot allocate memory: regions\n");
+ abort();
+ }
+ while(fgets(buf,LBUF,fp) != NULL){
+ cmaux_t cmaux;
+ int line_length;
+ line_length = strlen(buf);
+ if(line_length >= LBUF - 1){
+ /* The line is incompletely read if the buffer
+ * is fully occuppied and not ending with newline */
+ while(line_length >= LBUF - 1 && buf[line_length - 1] != '\n'){
+ char*newbuf;
+ newbuf = (char*)realloc(buf, LBUF*2);
+ if(!newbuf){
+ fputs("realloc for buf failed!\n", stderr);
+ exit(EXIT_FAILURE);
+ }
+ buf = newbuf;
+ fgets(buf+line_length, LBUF + 1, fp);
+ line_length = strlen(buf);
+ LBUF *= 2;
+ }
+ {
+ char *newbufq = (char*)realloc(bufq, LBUF);
+ if(newbufq == NULL){
+ fprintf(stderr, "cannot reallocate memory: bufq\n");
+ exit(EXIT_FAILURE);
+ }
+ bufq = newbufq;
+ LSEQ = LBUF / 2;
+/* now that the length of the buffer was measured,
+ * the line should contain qseq and sseq of the same length.
+ * Thus the sequence should be less than half of the line length */
+ realloc_sam(&sam, LBUF, LSEQ);
+ }
+ }
+ if(buf[0] == '#'){
+ prev_ref_name[0] = '\0';
+ prev_que_name[0] = '\0';
+ region_index=0;
+ n_units=0;
+ continue;
+ }
+ if(opt_top_hit_only){
+ if(n_units){
+ continue;
+ }
+ }
+ if(!opt_m5){
+ /* -outfmt '7 qseqid sstart send sacc qstart qend bitscore evalue pident qseq sseq' */
+ scanfret = sscanf(buf,"%s %d %d %s %d %d %lf %lf %lf %s %s", sam.qname, &blast.sstart, &blast.send, sam.rname, &blast.qstart, &blast.qend, &blast.bitscore, &blast.evalue, &blast.pident, sam.seq, sam.qual);/* sam.qual <- sseq */
+ if(opt_graph){
+ if(scanfret == 9){
+ /* set a dummy base */
+ sam.seq[0] = 'A';
+ sam.seq[1] = '\0';
+ sam.qual[0] = 'A';
+ sam.qual[1] = '\0';
+ }
+ else if(scanfret != 11){
+ fprintf(stderr, "sth strange: scanfret %d\n",scanfret);
+ fprintf(stderr, "buf: %s\n", buf);
+ exit(1);
+ }
+ }
+ else if(scanfret != 11){
+ fprintf(stderr, "sth strange: scanfret %d\n",scanfret);
+ fprintf(stderr, "buf: %s\n", buf);
+ exit(2);
+ }
+ }
+ else{
+ /* -outfmt '7 qseqid sacc bitscore pident qstart qend sstrand sstart send' */
+ scanfret = sscanf(buf,"%s %s %lf %lf %d %d %s %d %d", sam.qname, sam.rname, &blast.bitscore, &blast.pident, &blast.qstart, &blast.qend, strand, &blast.sstart, &blast.send);
+ if(scanfret != 9){
+ fprintf(stderr, "sth strange: scanfret %d\n",scanfret);
+ fprintf(stderr, "buf: %s\n", buf);
+ abort();
+ }
+ }
+ if(strcmp(prev_que_name,sam.qname) == 0){
+ ++n_units;
+ if(n_units>NUM_ALIGNMENTS){
+ /* discard this record */
+ continue;
+ }
+ }
+ else{
+ n_units = 1;
+ }
+ if(blast.qend-blast.qstart < 0){
+ fprintf(stderr, "unexpected blast qstt qend %d %d\n",blast.qstart,blast.qend);
+ exit(1);
+ }
+ if(opt_repeat){
+ int sstt = blast.sstart;
+ int send = blast.send;
+ int qstt = blast.qstart;
+ int qend = blast.qend;
+ int len = strlen(sam.seq);
+ /* qstt <= qend */
+ if(strcmp(sam.qname,sam.rname) == 0){
+ if((qstt <= sstt && sstt <= qend) || (qstt <= send && send <= qend)){
+ /* diagonal */
+ continue;
+ }
+ }
+ if(header){
+ header = 0;
+ printf("%s\n","len");
+ }
+ printf("%d\n",len);
+ continue;
+ }
+ if(strcmp(prev_que_name,sam.qname) == 0){
+ ++n_units;
+ if(n_units>NUM_ALIGNMENTS){
+ /* discard this record */
+ continue;
+ }
+ }
+ else{
+ n_units = 1;
+ }
+ if(valid_evalue >= 0.0){
+ if(blast.evalue > valid_evalue){
+ continue;
+ }
+ }
+ if(opt_pctidy){
+ int len = strlen(sam.seq);
+ int i;
+ int match=0;
+ int mm=0;
+ int indel=0;
+ double pctidy;
+ for(i=0; i<len; ++i){
+ if(sam.seq[i] == sam.qual[i]){
+ ++match;
+ }
+ else if(sam.seq[i] == '-' || sam.qual[i] == '-'){
+ ++indel;
+ }
+ else{
+ ++mm;
+ }
+ }
+ pctidy = (double)match/(double)len;
+ if(header){
+ header = 0;
+ printf("%s\t%s\t%s\t%s\t%s\n","pctidy","match","len","indel","mm");
+ }
+ printf("%.9f\t%d\t%d\t%d\t%d\n",pctidy,match,len,indel,mm);
+ continue;
+ }
+ if(opt_unique_ref){
+ if(strcmp(prev_ref_name, sam.rname)==0 && strcmp(prev_que_name,sam.qname)==0){
+ /* check overlap */
+ int i=0;
+ int cf=0;
+ for(i=0; i<region_index; ++i){
+ if(regions[i].left <= blast.sstart && blast.sstart <= regions[i].right){
+ cf=1;
+ break;
+ }
+ if(regions[i].left <= blast.send && blast.send <= regions[i].right){
+ cf=1;
+ break;
+ }
+ }
+ if(cf==1){
+ continue;
+ }
+ }
+ else{
+ /* another ref */
+ region_index=0;
+ strcpy(prev_ref_name, sam.rname);
+ strcpy(prev_que_name, sam.qname);
+ }
+ }
+ else if(opt_unique_que){
+ if(strcmp(prev_que_name, sam.qname)==0 && strcmp(prev_ref_name,sam.rname)==0){
+ /* check overlap */
+ int i=0;
+ int cf=0;
+ for(i=0; i<region_index; ++i){
+ if(regions[i].left <= blast.qstart && blast.qstart <= regions[i].right){
+ cf=1;
+ break;
+ }
+ if(regions[i].left <= blast.qend && blast.qend <= regions[i].right){
+ cf=1;
+ break;
+ }
+ }
+ if(cf==1){
+ continue;
+ }
+ }
+ else{
+ /* another que */
+ region_index=0;
+ strcpy(prev_que_name, sam.qname);
+ strcpy(prev_ref_name, sam.rname);
+ }
+ }
+ else{
+ region_index=0;
+ }
+
+ if(opt_unique_ref){/* blast.qend - blast.qstart >= 0 is always true */
+ if(blast.send - blast.sstart < 0){
+ /* reverse */
+ sam.flag = 0x10;
+ sam.pos = blast.send;
+ regions[region_index].left=blast.send;
+ regions[region_index].right=blast.sstart;
+ ++region_index;
+ }
+ else{
+ /* forward */
+ sam.flag = 0x0;
+ sam.pos = blast.sstart;
+ regions[region_index].left=blast.sstart;
+ regions[region_index].right=blast.send;
+ ++region_index;
+ }
+ }
+ else if(opt_unique_que){/* blast.qend - blast.qstart >= 0 is always true */
+ regions[region_index].left=blast.qstart;
+ regions[region_index].right=blast.qend;
+ ++region_index;
+ if(blast.send - blast.sstart < 0){
+ /* reverse */
+ sam.flag = 0x10;
+ sam.pos = blast.send;
+ }
+ else{
+ /* forward */
+ sam.flag = 0x0;
+ sam.pos = blast.sstart;
+ }
+ }
+ else{/* blast.qend - blast.qstart >= 0 is always true */
+ if(blast.send - blast.sstart < 0){
+ /* reverse */
+ sam.flag = 0x10;
+ sam.pos = blast.send;
+ }
+ else{
+ /* forward */
+ sam.flag = 0x0;
+ sam.pos = blast.sstart;
+ }
+ }
+ if(region_index >= NUM_ALIGNMENTS){
+ region_t * tmp;
+ NUM_ALIGNMENTS = NUM_ALIGNMENTS * 2;
+ tmp = (region_t*)realloc(regions,sizeof(region_t)*NUM_ALIGNMENTS);
+ if(tmp == NULL){
+ fprintf(stderr, "cannot reallocate memory: regions. NUM_ALIGNMENTS %d\n",NUM_ALIGNMENTS);
+ abort();
+ }
+ else{
+ regions = tmp;
+ }
+ }
+
+ if(opt_unique_bfmt7){
+ if(!opt_m5){
+ /* -outfmt '7 qseqid sstart send sacc qstart qend bitscore evalue pident qseq sseq' */
+ printf("%s\t%d\t%d\t%s\t%d\t%d\t%e\t%e\t%f\t%s\t%s\n", sam.qname, blast.sstart, blast.send, sam.rname, blast.qstart, blast.qend, blast.bitscore, blast.evalue, blast.pident, sam.seq, sam.qual);
+ }
+ else{
+ /* -outfmt '7 qseqid sacc bitscore pident qstart qend sstrand sstart send' */
+ printf("%s\t%s\t%e\t%f\t%d\t%d\t%s\t%d\t%d\n", sam.qname, sam.rname, blast.bitscore, blast.pident, blast.qstart, blast.qend, strand, blast.sstart, blast.send);
+ }
+ continue;
+ }
+
+ if(abs(blast.bitscore) > 255.0){
+ sam.mapq = 255;
+ }
+ else{
+ sam.mapq = (int)(abs(blast.bitscore)+0.499);
+ }
+ cmaux.sym='\0'; cmaux.num=0;
+ sam.cigar_length=0;
+
+ /* trim terminals agains chimera */
+ if(opt_chimeric_filter > 0){
+ if(strcmp(sam.qname,sam.rname) != 0){
+ int trim = opt_chimeric_filter;
+ int len_sseq = strlen(sam.qual);
+ int h_trim = trim;
+ int t_trim = trim;
+ if(len_sseq != strlen(sam.seq)){
+ fprintf(stderr, "lengths of que and sbj differ\n");
+ abort();
+ }
+ if(len_sseq <= trim*2){/* sam.qual <- sseq */
+ continue;
+ }
+
+ while(sam.seq[h_trim] == '-' || sam.qual[h_trim] == '-'){
+ ++h_trim;
+ }
+ while(sam.seq[len_sseq-1-t_trim] == '-' || sam.qual[len_sseq-1-t_trim] == '-'){
+ ++t_trim;
+ }
+ if(h_trim > len_sseq-t_trim){
+ continue;
+ }
+
+ /* adapt blast.qstart */
+ {
+ int nhyphen=0;
+ int i;
+ for(i=0; i<h_trim; ++i){
+ if(sam.seq[i] == '-')
+ ++nhyphen;
+ }
+ blast.qstart += (h_trim-nhyphen);
+ }
+
+ /* adapt blast.qend */
+ {
+ int nhyphen=0;
+ int i;
+ int endpos=strlen(sam.seq)-1;
+ for(i=0; i<t_trim; ++i){
+ if(sam.seq[endpos-i] == '-')
+ ++nhyphen;
+ }
+ blast.qend -= (t_trim-nhyphen);
+ }
+
+ /* adapt blast.sstart */
+ {
+ int nhyphen=0;
+ int i;
+ for(i=0; i<h_trim; ++i){
+ if(sam.qual[i] == '-')
+ ++nhyphen;
+ }
+ if(sam.flag == 0x0){
+ blast.sstart += (h_trim-nhyphen);
+ }
+ else if(sam.flag == 0x10){
+ blast.sstart -= (h_trim-nhyphen);
+ }
+ }
+
+ /* adapt blast.send */
+ {
+ int nhyphen=0;
+ int i;
+ int endpos=strlen(sam.qual)-1;
+ for(i=0; i<t_trim; ++i){
+ if(sam.qual[endpos-i] == '-')
+ ++nhyphen;
+ }
+ if(sam.flag == 0x0){
+ blast.send -= (t_trim-nhyphen);
+ }
+ else if(sam.flag == 0x10){
+ blast.send += (t_trim-nhyphen);
+ }
+ }
+
+ /* adapt sam.pos */
+ if(sam.flag == 0x0){
+ int nhyphen=0;
+ int i;
+ for(i=0; i<h_trim; ++i){
+ if(sam.qual[i] == '-')
+ ++nhyphen;
+ }
+ sam.pos += (h_trim-nhyphen);
+ }
+ else if(sam.flag == 0x10){
+ int nhyphen=0;
+ int i;
+ int endpos=strlen(sam.qual)-1;
+ for(i=0; i<t_trim; ++i){
+ if(sam.qual[endpos-i] == '-')
+ ++nhyphen;
+ }
+ sam.pos += (t_trim-nhyphen);
+ }
+ else{
+ fprintf(stderr, "souteigai flag\n");
+ abort();
+ }
+
+ strcpy(bufq, &sam.qual[h_trim]);
+ bufq[strlen(bufq)-t_trim]='\0';
+ strcpy(sam.qual, bufq);
+ strcpy(bufq, &sam.seq[h_trim]);
+ bufq[strlen(bufq)-t_trim]='\0';
+ strcpy(sam.seq, bufq);
+ }
+ else{
+ }
+ }
+
+ aln2cm(&sam, sam.seq, sam.qual, &cmaux);
+
+ sam.rnext[0] = '*';
+ sam.rnext[1] = '\0';
+ sam.pnext=0;
+ sam.tlen=0;
+ sam.as=abs(blast.bitscore);
+ sam.ev=blast.evalue;
+ sam.pident=blast.pident;
+ blast_print_sam(&sam, &cmaux, &blast, quals);
+ }
+
+ if(in_blastn[0] != '-'){
+ fclose(fp);
+ }
+ free_sam(&sam);
+ free(query_name);
+ free(buf);
+ free(bufq);
+ free(prev_ref_name);
+ free(prev_que_name);
+ free(regions);
+ return 0;
+}
+
+void blast_print_sam(sam_t * sam, cmaux_t * cmaux, blast_t * blast, char** quals){
+ if(cmaux->num > 0){
+ if(sam->cigar_length >= sam->cigar_capacity){
+ fputs("cigar capacity exceeded in blast_print_sam()!\n", stderr);
+ exit(EXIT_FAILURE);
+ }
+ sam->cigar[sam->cigar_length].sym = cmaux->sym;
+ sam->cigar[sam->cigar_length].num = cmaux->num;
+ ++sam->cigar_length;
+ }
+
+ if(opt_invsam == 1){
+ char * tmp = sam->rname;
+ sam->rname = sam->qname;
+ sam->qname = tmp;
+ sam->pos = blast->qstart;
+ {
+ int i;
+ int loop;
+ for(i=0,loop=sam->cigar_length; i<loop; ++i){
+ if(sam->cigar[i].sym == 'D'){
+ sam->cigar[i].sym = 'I';
+ }
+ else if(sam->cigar[i].sym == 'I'){
+ sam->cigar[i].sym = 'D';
+ }
+ else if(sam->cigar[i].sym == 'M'){
+ /* do nothing */
+ }
+ else{
+ fprintf(stderr, "souteigai cigar\n");
+ abort();
+ }
+ }
+ }
+ /* swap query for subject */
+ tmp = sam->seq;
+ sam->seq = sam->qual;
+ sam->qual = tmp;
+ }
+
+ {
+ int loop=strlen(sam->seq);
+ int i,j;
+ char * tmp;
+ for(i=0,j=0; i<loop; ++i){
+ if(sam->seq[i] != '-'){
+ sam->qual[j++] = sam->seq[i];
+ }
+ }
+ sam->qual[j]='\0';
+ tmp = sam->seq;
+ sam->seq = sam->qual;
+ sam->qual = tmp;
+ }
+
+ /* append quality values */
+ sam->qual[0] = '*';
+ sam->qual[1] = '\0';
+ if(opt_invsam != 1 && sam->flag & 0x10){
+ {
+ int i;
+ int loop;
+ int len;
+ for(i=0,len=sam->cigar_length,loop=len/2; i<loop; ++i){
+ char tmp = sam->cigar[i].sym;
+ int tmp2 = sam->cigar[i].num;
+ sam->cigar[i].sym = sam->cigar[len-1-i].sym;
+ sam->cigar[i].num = sam->cigar[len-1-i].num;
+ sam->cigar[len-1-i].sym = tmp;
+ sam->cigar[len-1-i].num = tmp2;
+ }
+ for(i=0,len=strlen(sam->seq),loop=len/2; i<loop; ++i){
+ char tmp = sam->seq[i];
+ sam->seq[i] = sam->seq[len-1-i];
+ sam->seq[len-1-i] = tmp;
+ }
+ for(i=0,loop=strlen(sam->seq); i<loop; ++i){
+ /* complementary nucleotide */
+ char t = sam->seq[i];
+ switch(t){
+ case 'a':
+ case 'A':
+ t = 'T';
+ break;
+ case 'c':
+ case 'C':
+ t = 'G';
+ break;
+ case 'g':
+ case 'G':
+ t = 'C';
+ break;
+ case 't':
+ case 'T':
+ t = 'A';
+ break;
+ case 'N':
+ case 'n':
+ t = 'N';
+ break;
+ default:
+ fprintf(stderr, "souteigai : %c\n",t);
+ abort();
+ break;
+ }
+ sam->seq[i] = t;
+ }
+ }
+ }
+
+ /* RNEXT is used for qstt,qend,sstt,send in this program. */
+ /* these are trimmed values. */
+ /* 1-origin */
+ if(opt_invsam){
+ sprintf(sam->rnext,"%d,%d,%d,%d",blast->sstart,blast->send,blast->qstart,blast->qend);
+ }
+ else{
+ sprintf(sam->rnext,"%d,%d,%d,%d",blast->qstart,blast->qend,blast->sstart,blast->send);
+ }
+
+ print_sam(sam);
+ return;
+}
+
+void aln2cm(sam_t * sam, char * q, char * s, cmaux_t * cmaux){
+ int len = strlen(q);
+ int i;
+ for(i=0; i<len; ++i){
+ int op;
+ if(q[i] == '-'){
+ op = 'D';
+ }
+ else if(s[i] == '-'){
+ op = 'I';
+ }
+ else{
+ op = 'M';
+ }
+ if(cmaux->sym == op){
+ ++cmaux->num;
+ }
+ else{
+ if(cmaux->num > 0){
+ if(sam->cigar_length >= sam->cigar_capacity){
+ fputs("cigar capacity exceeded in aln2cm()!\n", stderr);
+ exit(EXIT_FAILURE);
+ }
+ sam->cigar[sam->cigar_length].sym = cmaux->sym;
+ sam->cigar[sam->cigar_length].num = cmaux->num;
+ ++sam->cigar_length;
+ }
+ cmaux->sym = op;
+ cmaux->num = 1;
+ }
+ }
+ return;
+}
diff --git a/bfmtx2m4.pl b/bfmtx2m4.pl
new file mode 100755
index 0000000..3e3bb3a
--- /dev/null
+++ b/bfmtx2m4.pl
@@ -0,0 +1,84 @@
+#!/usr/bin/perl
+use strict;
+use warnings;
+use Getopt::Long;
+
+my $opt_shortname;
+#GetOptions('n'=>\$opt_shortname);
+
+if(@ARGV != 2){
+ die "USAGE: <this> <m4.pre> <all_norm.fa>\n";
+}
+
+my $m4pre = $ARGV[0];
+my $all_norm = $ARGV[1];
+
+#print $m4pre,"\n";
+#print $all_norm,"\n";
+#exit;
+
+my %name2len=();
+
+open my $fa_fh, "<", $all_norm or die "cannot open $all_norm: $!\n";
+
+my $counter=0;
+
+my $name = <$fa_fh>;
+chomp $name;
+$name =~ s/^>//;
+
+my $bases = "";
+my $qval = "";
+while(1){
+ while(my $buf=<$fa_fh>){
+ chomp $buf;
+ if($buf =~ /^>/){
+ $name2len{$name} = length($bases);
+
+ $name = $buf;
+ $bases= "";
+ $qval = "";
+ $name =~ s/^>//;
+ last;
+ }
+ else{
+ $bases .= $buf;
+ }
+ }
+ if(eof){
+ last;
+ }
+}
+
+$name2len{$name} = length($bases);
+close $fa_fh;
+
+open my $pre_fh, "<", $m4pre or die "cannot open $m4pre: $!\n";
+while(my $line=<$pre_fh>){
+ chomp $line;
+ if($line =~ /^#/){
+ next;
+ }
+ # qseqid sacc bitscore pident qstart qend sstrand sstart send
+ my @tmp = split /\s+/,$line;
+ if(@tmp != 9){
+ die "strange format: $line\n";
+ }
+ my ($qseqid,$sacc,$bitscore,$pident,$qstart,$qend,$sstrand,$sstart,$send) = @tmp;
+ $bitscore = (-1.0)*$bitscore*5.0/2.0;
+ # qname tname score pctsimilarity qstrand qstart qend qseqlength tstrand tstart tend tseqlength mapqv ncells clusterScore probscore numSigClusters
+ # qname tname score pctsimilarity qstrand qstart qend qseqlength tstrand tstart tend tseqlength mapqv
+ if($sstrand eq "plus"){
+ $sstrand = 0;
+ }
+ elsif($sstrand eq "minus"){
+ $sstrand = 1;
+ }
+ else{
+ die "souteigai: $sstrand\n";
+ }
+ printf("%s %s %d %f %s %d %d %d %s %d %d %d %d\n",$qseqid, $sacc, $bitscore, $pident, "0", $qstart-1, $qend, $name2len{$qseqid}, $sstrand, $sstart-1, $send, $name2len{$sacc}, 254);
+}
+
+close $pre_fh;
+
diff --git a/ca_ikki_v5.pl b/ca_ikki_v5.pl
new file mode 100755
index 0000000..b382c09
--- /dev/null
+++ b/ca_ikki_v5.pl
@@ -0,0 +1,228 @@
+#!/usr/bin/perl
+use strict;
+use warnings;
+use Getopt::Long;
+
+my $DEVEL;
+
+my $from=0;
+my $to=1;
+my $fastqdir="./";
+my $ca_path="";
+my $out_dir="CA";
+#my $tmp_dir=$out_dir;
+my $sprai_path="";
+my $coverage=20;
+my $raw_fastq="";
+
+my @msg=(
+"USAGE: <this> <asm.spec> estimated_genome_size",
+#"[-from integer]",
+#"[-to integer ]",
+"[-d directory in which fin.fq.gzs exist (default: $fastqdir)]",
+"[-ca_path /path/to/your/wgs/Linux-amd64/bin (default: $ca_path)]",
+#"[-tmp_dir temporary directory (default: $tmp_dir)]",
+"[-out_dir output directory (default: $out_dir)]",
+"[-sprai_path the path to sprai installed (default: $sprai_path)]",
+"[-coverage int : use longer than N(coverage) reads for assembly (default: $coverage)]",
+"",
+"[-raw_fastq in.fq : use all reads in in.fq (default: off)]",
+);
+
+GetOptions(
+ 'from=i' => \$from,
+ 'to=i' => \$to,
+ 'd=s' => \$fastqdir,
+ 'devel' => \$DEVEL,
+ 'ca_path=s'=>\$ca_path,
+# 'tmp_dir=s'=>\$tmp_dir,
+ 'out_dir=s'=>\$out_dir,
+ 'coverage=i'=>\$coverage,
+ 'raw_fastq=s'=>\$raw_fastq,
+ 'sprai_path=s'=>\$sprai_path
+ );
+
+if(@ARGV != 2){
+ my $tmp = join "\n\t", at msg;
+ die "$tmp\n";
+ #die "USAGE: <this> <asm.spec> estimated_genome_size [-from integer -to integer]\n\t[-d directory in which fin.fq.gzs exist]\n\t[-ca_path /path/to/your/wgs/Linux-amd64/bin]\n";
+}
+
+my $spec = $ARGV[0];
+printf STDERR ("%s is given\n",$spec);
+my $estimated_genome_size = $ARGV[1];
+if($estimated_genome_size <= 0){
+ die "estimated_genome_size must be > 0\n";
+}
+
+printf STDERR ("#>- params -<#\n");
+printf STDERR ("spec\t%s\n",$spec);
+printf STDERR ("estimated_genome_size\t%s\n",$estimated_genome_size);
+#printf STDERR ("from\t%s\n",$from);
+#printf STDERR ("to\t%s\n",$to);
+printf STDERR ("fastq_dir\t%s\n",$fastqdir);
+#printf STDERR ("tmp_dir\t%s\n",$tmp_dir);
+printf STDERR ("out_dir\t%s\n",$out_dir);
+printf STDERR ("sprai_path\t%s\n",$sprai_path);
+
+if($DEVEL){
+ printf STDERR ("development mode\t%s\n","true");
+}
+if($ca_path){
+ printf STDERR ("ca_path\t%s\n",$ca_path);
+}
+if($coverage){
+ printf STDERR ("coverage\t%s\n",$coverage);
+}
+if($raw_fastq){
+ printf STDERR ("raw_fastq\t%s\n",$raw_fastq);
+}
+printf STDERR ("#>- params -<#\n");
+#exit;
+
+my $PWD=`pwd`;
+chomp $PWD;
+
+my $now = `date +%Y%m%d_%H%M%S`;
+chomp $now;
+
+if($out_dir !~ /^\//){
+ $out_dir = "$PWD/$out_dir";
+}
+
+if($out_dir =~ /\/$/){
+ chop $out_dir;
+}
+my @out_dirs=();
+my @p2=();
+for(my $i=$from+1; $i<=$to; ++$i){
+ my $tmp;
+ my $now_used=0;
+ if($to-$from > 1){
+ $tmp = sprintf("%s_%02d_%s",$out_dir,$i,$now);
+ }
+ else{
+ $tmp = sprintf("%s",$out_dir);
+ }
+ if(-d $tmp){
+ my $now = `date +%Y%m%d_%H%M%S`;
+ chomp $now;
+ $tmp = sprintf("%s_%02d_%s",$out_dir,$i,$now);
+ if($to-$from>1){
+ redo;
+ }
+ $now_used=1;
+ }
+ mkdir "$tmp" or die "cannot mkdir $tmp: $!\n";
+ $out_dirs[$i] = $tmp;
+ my @foo = split /\//,$out_dirs[$i];
+ if($to-$from>1 || $now_used){
+ $p2[$i] = $foo[$#foo];
+ }
+ else{
+ my $now = `date +%Y%m%d_%H%M%S`;
+ chomp $now;
+ $p2[$i] = sprintf("%s_%02d_%s",$foo[$#foo],$from+1,$now);
+ }
+}
+
+=pod
+if($tmp_dir !~ /^\//){
+ $tmp_dir = "$PWD/$tmp_dir";
+}
+if(-d "$tmp_dir"){
+ my $tmp = "${tmp_dir}_$now";
+ mkdir "$tmp" or die "cannot mkdir $tmp: $!\n";
+}
+else{
+ my $tmp = "${tmp_dir}";
+ mkdir "$tmp" or die "cannot mkdir $tmp: $!\n";
+}
+=cut
+
+#$tmp_dir = $out_dir;
+
+my $bashcommand="";
+
+my $suffix = "top20x";
+
+
+if(!$raw_fastq){
+ #printf STDERR ("start idfq2fq\n");
+ for(my $i=$from+1; $i<=$to; ++$i){
+ my $prefix=sprintf("c%02d.fin",$i);
+ my $PG1 = "get_top_20x_fa.pl";
+ if($sprai_path){
+ $PG1 = "$sprai_path/$PG1";
+ }
+ my $uuid = $now;
+ #my $uuid = `uuidgen`;
+ chomp $uuid;
+ $bashcommand .= "gzip -d -c $fastqdir/$prefix.idfq.gz > $PWD/$uuid.tmp && $PG1 $PWD/$uuid.tmp -l -c $coverage -g $estimated_genome_size -q > $out_dirs[$i]/$prefix.$suffix.fq && rm $PWD/$uuid.tmp &\n";
+ }
+
+ `
+ $bashcommand
+ wait
+ `;
+ #printf STDERR ("done idfq2fq\n");
+}
+else{
+ if($to-$from != 1){
+ printf STDERR ("strange 'from' and 'to'.\n");
+ printf STDERR ("from - to must be one if you specify -raw_fastq.\n");
+ exit(1);
+ }
+ if($raw_fastq !~ /^\//){
+ $raw_fastq = "$PWD/$raw_fastq";
+ }
+ my $prefix=sprintf("c%02d.fin",$from+1);
+ `ln -s $raw_fastq $out_dirs[$from+1]/$prefix.$suffix.fq`;
+}
+
+#printf STDERR ("start fastqToCA\n");
+for(my $i=$from+1; $i<=$to; ++$i){
+ my $prefix=sprintf("c%02d.fin",$i);
+ my $fastqToCA = "fastqToCA";
+ if($ca_path){
+ $fastqToCA = "$ca_path/$fastqToCA";
+ }
+ `$fastqToCA -libraryname foo -technology pacbio-corrected -reads $out_dirs[$i]/$prefix.$suffix.fq > $out_dirs[$i]/$prefix.$suffix.frg`;
+}
+#printf STDERR ("done fastqToCA\n");
+
+#printf STDERR ("start CA (stopAfter=unitigger)\n");
+
+my $now_used=0;
+
+my $runCA="runCA";
+if($ca_path){
+ $runCA = "$ca_path/$runCA";
+}
+
+=pod
+for(my $i=$from+1; $i<=$to; ++$i){
+ my $prefix=sprintf("c%02d.fin",$i);
+ `$runCA stopAfter=unitigger -dir $out_dirs[$i] -p asm_$p2[$i] -s $spec $out_dirs[$i]/$prefix.$suffix.frg 2>> $out_dirs[$i]/do_$prefix.$suffix.$now.log`;
+}
+#printf STDERR ("done CA (stopAfter=unitigger)\n");
+
+#printf STDERR ("start CA (the rest)\n");
+$bashcommand="";
+for(my $i=$from+1; $i<=$to; ++$i){
+ my $prefix=sprintf("c%02d.fin",$i);
+ $bashcommand .= "($runCA -dir $out_dirs[$i] -p asm_$p2[$i] -s $spec $out_dirs[$i]/$prefix.$suffix.frg 2>> $out_dirs[$i]/do_$prefix.$suffix.$now.log) &\n";
+}
+`
+ $bashcommand
+`;
+=cut
+$bashcommand="";
+for(my $i=$from+1; $i<=$to; ++$i){
+ my $prefix=sprintf("c%02d.fin",$i);
+ $bashcommand .= "$runCA -dir $out_dirs[$i] -p asm_$p2[$i] -s $spec $out_dirs[$i]/$prefix.$suffix.frg 2>> $out_dirs[$i]/do_$prefix.$suffix.$now.log &\n";
+}
+`
+ $bashcommand
+ wait
+`;
diff --git a/check_circularity.pl b/check_circularity.pl
new file mode 100755
index 0000000..5f98c96
--- /dev/null
+++ b/check_circularity.pl
@@ -0,0 +1,168 @@
+#!/usr/bin/env perl
+
+use strict;
+use Getopt::Long;
+# use Data::Dump qw(dump);
+
+my $debug = 0;
+my $flag_force = 0;
+# Maximum Read Length. We assume that no reads are longer than this (bp).
+my $param_max_read_len = 30000;
+# We assume that a circular chromosome must yield a contig whose ends
+# overlap longer than this length (bp) and more identical than this match ratio.
+my $param_min_overlap_len = 1500;
+my $param_min_overlap_match_ratio = 0.97;
+# When the match ratio of the overlap is lower than this, show a warning. (This aims at not missing potential overlaps)
+my $param_alert_overlap_match_ratio = 0.90;
+# Circular chromosomes (a contig whose ends overlap) may have a region of
+# bad sequence quality at the both ends up to this length.
+my $param_max_overlap_hang_len = 100;
+
+GetOptions(
+ "force" => \$flag_force,
+ "max_read_len=i" => \$param_max_read_len,
+ "min_overlap_len=i" => \$param_min_overlap_len,
+ "min_overlap_ratio=f" => \$param_min_overlap_match_ratio,
+ "alt_overlap_ratio=f" => \$param_alert_overlap_match_ratio,
+ "max_hang_len=i" => \$param_max_overlap_hang_len,
+ "debug" => \$debug
+);
+
+my $input_fasta_file_name = shift;
+my $temporary_directory_name = shift;
+
+unless(defined $input_fasta_file_name && defined $temporary_directory_name) {
+ print STDERR "Usage: check_circurarity.pl <input FASTA (assembly)> <temporary dir>\n";
+ exit 0;
+}
+
+if(-e $temporary_directory_name) {
+ if($flag_force) {
+ print STDERR "WARNING: '$temporary_directory_name' already exists, but you gave --force option.\n";
+ print STDERR " We remove the entire directory and start from scratch.\n";
+ `rm -rf "$temporary_directory_name"`;
+ if($?) {
+ print STDERR "ERROR: $temporary_directory_name could not be removed.\n";
+ exit 1;
+ }
+ } else {
+ print STDERR "ERROR: '$temporary_directory_name' already exists. We abort for safety.\n";
+ print STDERR " If you wish to remove the directory (and the things in it), and start from scratch,\n";
+ print STDERR " you can give '--force' to do that, but use it carefully.\n";
+ exit 1;
+ }
+}
+
+mkdir $temporary_directory_name or die "ERROR: We could not create '$temporary_directory_name'.";
+
+sub split_fasta_into_separate_fasta
+{
+ my ($fasta, $dir) = @_;
+ my @array_of_sequence_name_and_its_length = ();
+ open my $fh, "<", $input_fasta_file_name or die "ERROR: We could not open '$input_fasta_file_name' for input";
+ my $ofh;
+ my $current_sequence_name = undef;
+ while(<$fh>) {
+ chomp; chop if(/\r$/);
+ if(m|^>(\S+)|) {
+ my $sequence_name = $1;
+ if(defined $current_sequence_name) { close $ofh; }
+ $current_sequence_name = $sequence_name;
+ $current_sequence_name =~ s|[^\w\d_ -]||g;
+ my $output_file_name = "$dir/$current_sequence_name.fa";
+ push(@array_of_sequence_name_and_its_length, {seq_name => $current_sequence_name, len => 0, file_name => $output_file_name});
+ open $ofh, ">", $output_file_name or die "ERROR: Could not open '$output_file_name' for output";
+ print $ofh ">$current_sequence_name\n";
+ } else {
+ s|[^ACGT]||ig;
+ my $l = length($_);
+ if(0 < $l) {
+ $array_of_sequence_name_and_its_length[-1]->{len} += $l;
+ print $ofh "$_\n";
+ }
+ }
+ }
+ if(defined $current_sequence_name) { close $ofh; }
+ close $fh;
+ return @array_of_sequence_name_and_its_length;
+}
+
+sub output_with_folding
+{
+ my ($fh, $seq) = @_;
+ my $l = length($seq);
+ my $t = 70;
+ for(my $i = 0; $i < $l; ) {
+ my $tl = $l - $i; if($t < $tl) { $tl = $t; }
+ print $fh substr($seq, $i, $tl), "\n";
+ $i += $tl;
+ }
+}
+
+my @sequence_objects = split_fasta_into_separate_fasta($input_fasta_file_name, $temporary_directory_name);
+for my $seq_obj (@sequence_objects) {
+ my $command_line = "blastn -task megablast -subject $seq_obj->{file_name} -query $seq_obj->{file_name} -evalue 1e-100 -outfmt 7";
+ my @results;
+ unless($seq_obj->{len} < $param_max_read_len * 2) {
+ my $end_query_pos = $param_max_read_len - 1;
+ my $start_subj_pos = $seq_obj->{len} - $param_max_read_len + 1;
+ $command_line .= " -subject_loc $start_subj_pos-$seq_obj->{len} -query_loc 1-$end_query_pos";
+ }
+ print STDERR "\$ $command_line\n" if($debug > 0);
+ @results = `$command_line`;
+ print @results if($debug > 0);
+ my $is_circular = 0;
+ my $preserve_pos_from_here_0origin;
+ my $preserve_pos_to_here_0origin;
+ my $comment;
+ for(@results) {
+ chomp; chop if(/\r$/);
+ next if(m/^#/);
+ # Fields: query id, subject id, % identity, alignment length, mismatches, gap opens, q. start, q. end, s. start, s. end, evalue, bit score
+ my ($query_id, $subj_id, $ident_percent, $align_len, $mismatches, $gap_opens, $qstart, $qend, $sstart, $send, $evalue, $bit_score) = split(/\t/);
+ next if($qstart == $sstart && $qend == $send);
+ next if($align_len < $param_min_overlap_len);
+ next if($ident_percent < $param_alert_overlap_match_ratio * 100.0);
+ print STDERR " len=$seq_obj->{len} [$qstart-$qend] => [$sstart-$send]\n" if($debug);
+ if($ident_percent < $param_min_overlap_match_ratio * 100.0) {
+ unless($is_circular) {
+ $comment = "Possibly circular. [$qstart-$qend] matches [$sstart-$send] with $ident_percent\% identity.";
+ }
+ } else {
+ if($qstart <= $param_max_overlap_hang_len && $seq_obj->{len} - $param_max_overlap_hang_len <= $send) {
+ $comment = "[$qstart-$qend] matches [$sstart-$send] with $ident_percent\% identity.";
+ $is_circular = 1;
+ $preserve_pos_from_here_0origin = $qstart - 1;
+ $preserve_pos_to_here_0origin = $sstart - 1;
+ }
+ }
+ }
+ print "$seq_obj->{seq_name}\t" . ($is_circular ? "circular" : "linear") . "\t$comment\n";
+ if($is_circular) {
+ my $fname = $seq_obj->{file_name} . ".cut.fa";
+ my $fname2 = $seq_obj->{file_name} . ".cut_halfrot.fa";
+ open my $ifh, "<", $seq_obj->{file_name} or die "ERROR: Could not open '$seq_obj->{file_name}'.";
+ open my $ofh, ">", $fname or die "ERROR: Could not open '$fname'";
+ open my $ofh2, ">", $fname2 or die "ERROR: Could not open '$fname2'";
+ my $header = <$ifh>;
+ print $ofh $header;
+ print $ofh2 $header;
+ my @lines;
+ while(<$ifh>) {
+ chomp; chop if(/\r$/);
+ push(@lines, $_);
+ }
+ my $seq = join('', @lines);
+ my $cut_seq = substr($seq, $preserve_pos_from_here_0origin, $preserve_pos_to_here_0origin - $preserve_pos_from_here_0origin + 1);
+ output_with_folding($ofh, $cut_seq);
+ {
+ my $l = length($cut_seq);
+ my $first_half_len = int($l / 2);
+ output_with_folding($ofh2, substr($cut_seq, $first_half_len) . substr($cut_seq, 0, $first_half_len));
+ }
+ close $ifh;
+ close $ofh;
+ close $ofh2;
+ }
+}
+
diff --git a/check_redundancy.pl b/check_redundancy.pl
new file mode 100755
index 0000000..0503f56
--- /dev/null
+++ b/check_redundancy.pl
@@ -0,0 +1,74 @@
+#!/usr/bin/env perl
+
+use strict;
+use Getopt::Long;
+# use Data::Dump qw(dump);
+
+my $debug = 0;
+my $flag_force = 0;
+# Minimum match ratio to consider
+my $param_min_ratio = 0.94;
+my $param_min_len_ratio = 0.98;
+# This length may not have a good match to a larger contig.
+my $param_max_hang_len = 100;
+
+GetOptions(
+ "force" => \$flag_force,
+ "max_hang_len=i" => \$param_max_hang_len,
+ "min_align_ratio=f" => \$param_min_ratio,
+ "debug" => \$debug
+);
+
+my $input_fasta_file_name = shift;
+
+unless(defined $input_fasta_file_name) {
+ print STDERR "Usage: check_redundancy.pl <input FASTA (assembly)>\n";
+ exit 0;
+}
+
+my $command_line = "blastn -task blastn -subject $input_fasta_file_name -query $input_fasta_file_name -evalue 1e-100 -outfmt 7";
+my @results;
+print STDERR "\$ $command_line\n" if($debug > 0);
+ at results = `$command_line`;
+print @results if($debug > 0);
+
+my %id_2_len;
+my $current_seq;
+my @ids;
+for(@results) {
+ chomp; chop if(/\r$/);
+ next if(m/^#/);
+ my ($query_id, $subj_id, $ident_percent, $align_len, $mismatches, $gap_opens, $qstart, $qend, $sstart, $send, $evalue, $bit_score) = split(/\t/);
+ if($query_id eq $subj_id && $current_seq ne $query_id) {
+ $current_seq = $query_id;
+ $id_2_len{$current_seq} = $align_len;
+ push(@ids, $current_seq);
+ }
+}
+
+my %id_2_redundant_arr;
+for(@results) {
+ chomp; chop if(/\r$/);
+ next if(m/^#/);
+ # Fields: query id, subject id, % identity, alignment length, mismatches, gap opens, q. start, q. end, s. start, s. end, evalue, bit score
+ my ($query_id, $subj_id, $ident_percent, $align_len, $mismatches, $gap_opens, $qstart, $qend, $sstart, $send, $evalue, $bit_score) = split(/\t/);
+ next if($query_id eq $subj_id);
+ next if($ident_percent < $param_min_ratio * 100.0);
+ next if($align_len < $id_2_len{$query_id} * $param_min_len_ratio);
+ push(@{$id_2_redundant_arr{$query_id}}, "\[$qstart-$qend\]($id_2_len{$query_id})=($ident_percent\%)=>$subj_id\[$sstart-$send\]($id_2_len{$subj_id})");
+}
+
+print "#ID\tunique\tcomment\n";
+for my $id (@ids) {
+ print "$id\t";
+ if(exists $id_2_redundant_arr{$id}) {
+ print "redundant\t";
+ print join(' ', @{$id_2_redundant_arr{$id}});
+ } else {
+ print "unique\t";
+ }
+ print "\n";
+}
+
+# dump(%id_2_redundant_arr);
+
diff --git a/col2fqcell.h b/col2fqcell.h
new file mode 100644
index 0000000..60d87c4
--- /dev/null
+++ b/col2fqcell.h
@@ -0,0 +1,359 @@
+
+typedef struct base_t{
+ char base;
+ char qv;
+}base_t;
+
+void set_vals(int col_index, int coded_base, int * ballot, char * max_qvs, base_t * col){
+ ++ballot[coded_base];
+ max_qvs[coded_base] = (max_qvs[coded_base] < (col[col_index].qv-'!')) ? (col[col_index].qv-'!') : max_qvs[coded_base];
+}
+
+// for each col in vertical scrolls
+void col2fqcell(base_t * col, char * seq, char * depth, char * qual, int maximum_ballots, int minimum_ballots, double distinguishable){
+ if(maximum_ballots<2){
+ fprintf(stderr, "maximum_ballots must be >= 2. %d was given.\n",maximum_ballots);
+ abort();
+ }
+ if(minimum_ballots<0){
+ fprintf(stderr, "minimum_ballots must be >= 0. %d was given.\n",minimum_ballots);
+ abort();
+ }
+
+ int i;
+ int ballot[6];
+ char max_qvs[6];
+ for(i=0; i<6; ++i){
+ ballot[i]=0;
+ max_qvs[i]=0;
+ }
+
+ int total_ballots = 0;
+
+ for(i=0; col[i].base != '\0'; ++i){
+ if(total_ballots <= maximum_ballots){
+ switch(col[i].base){
+ case ' ':
+ break;
+ case 'a':
+ set_vals(i,1,ballot,max_qvs,col);
+ ++total_ballots;
+ break;
+ case 'A':
+ set_vals(i,1,ballot,max_qvs,col);
+ ++total_ballots;
+ break;
+ case 'c':
+ set_vals(i,2,ballot,max_qvs,col);
+ ++total_ballots;
+ break;
+ case 'C':
+ set_vals(i,2,ballot,max_qvs,col);
+ ++total_ballots;
+ break;
+ case 'g':
+ set_vals(i,3,ballot,max_qvs,col);
+ ++total_ballots;
+ break;
+ case 'G':
+ set_vals(i,3,ballot,max_qvs,col);
+ ++total_ballots;
+ break;
+ case 't':
+ set_vals(i,4,ballot,max_qvs,col);
+ ++total_ballots;
+ break;
+ case 'T':
+ set_vals(i,4,ballot,max_qvs,col);
+ ++total_ballots;
+ break;
+ case 'n':
+ set_vals(i,5,ballot,max_qvs,col);
+ ++total_ballots;
+ break;
+ case 'N':
+ set_vals(i,5,ballot,max_qvs,col);
+ ++total_ballots;
+ break;
+ case '-':
+ set_vals(i,0,ballot,max_qvs,col);
+ ++total_ballots;
+ break;
+ default:
+ fprintf(stderr, "arienai: %c\n",col[i].base);
+ abort();
+ //++ballot[(int)encode[(int)buf4print[i]]];
+ }
+ }
+ }
+
+ int number_of_ballots = 0;
+ {
+ int i;
+ for(i=0; i<6; ++i){
+ number_of_ballots += ballot[i];
+ }
+ }
+ if(number_of_ballots < 1){
+ fprintf(stderr, "sth buggy: consensus\n");
+ abort();
+ }
+ *depth = (number_of_ballots < 93) ? (char)(number_of_ballots+33) : '~';
+
+ // to elect or not
+ if(number_of_ballots < minimum_ballots){// do not elect
+ // keep
+ *seq = col[0].base;
+ *qual = col[0].qv;
+ if(*seq == '-'){
+ *seq = ' ';// this column will not be printed in output fastq
+ }
+ if(*qual < '!' && *seq != ' '){
+ // error
+ int i;
+ for(i=0; col[i].base != '\0'; ++i){
+ fprintf(stderr,"%c",col[i].base);
+ }
+ fprintf(stderr,"\n");
+ for(i=0; col[i].qv != '\0'; ++i){
+ fprintf(stderr,"%c",col[i].qv);
+ }
+ fprintf(stderr,"\n");
+ abort();
+ }
+ return;
+ }
+ else{ // do elect
+ int max_ballot=0;
+ for(i=0; i<6; ++i){
+ max_ballot = (max_ballot < ballot[i]) ? (ballot[i]) : max_ballot;
+ }
+ int second_ballot=0;
+ {
+ for(i=0; i<6; ++i){
+ if(ballot[i] < max_ballot && second_ballot < ballot[i]){
+ second_ballot = ballot[i];
+ }
+ }
+ int num_top=0;
+ for(i=0; i<6; ++i){
+ if(ballot[i] == max_ballot){
+ ++num_top;
+ }
+ }
+ if(num_top > 1){
+ second_ballot = max_ballot;
+ }
+ }
+// double distinguishable = 0.70;
+ double rate = (double)max_ballot/(double)(max_ballot+second_ballot);
+ if(rate>=distinguishable){
+ for(i=0; i<6; ++i){
+ if(ballot[i] == max_ballot){
+ *seq = "-ACGTN"[i];
+ *qual = max_qvs[i]+'!';
+ }
+ }
+ return;
+ }
+ else{
+ // keep
+ *seq = col[0].base;
+ *qual = '!';// set to minimum
+ //*qual = col[0].qv;
+ if(*seq == '-'){
+ *seq = ' ';// this column will not be printed in output fastq
+ }
+ if(*qual < '!' && *seq != ' '){
+ fprintf(stderr,"sth strange base\n");
+ int i;
+ for(i=0; col[i].base != '\0'; ++i){
+ fprintf(stderr,"%c",col[i].base);
+ }
+ fprintf(stderr,"\n");
+ for(i=0; col[i].qv != '\0'; ++i){
+ fprintf(stderr,"%c",col[i].qv);
+ }
+ fprintf(stderr,"\n");
+ abort();
+ }
+ return;
+ }
+ }
+
+ fprintf(stderr,"never come here\n");
+ abort();
+ return;
+}
+
+/*
+void col2fqcell_bak(base_t * col, char * seq, char * depth, char * qual, int maximum_ballots, int minimum_ballots){
+ if(maximum_ballots<2){
+ fprintf(stderr, "maximum_ballots must be >= 2. %d was given.\n",maximum_ballots);
+ abort();
+ }
+ if(minimum_ballots<0){
+ fprintf(stderr, "minimum_ballots must be >= 0. %d was given.\n",minimum_ballots);
+ abort();
+ }
+
+ int i;
+ int ballot[6];
+ int sum_qvs[6];
+ char max_qvs[6];
+ //int pi = strlen(col);
+ for(i=0; i<6; ++i){
+ ballot[i]=0;
+ sum_qvs[i]=0;
+ max_qvs[i]=0;
+ }
+ void set_vals(int col_index, int coded_base){
+ ++ballot[coded_base];
+ sum_qvs[coded_base] += (int)(col[col_index].qv-'!');
+ max_qvs[coded_base] = (max_qvs[coded_base] < (col[col_index].qv-'!')) ? (col[col_index].qv-'!') : max_qvs[coded_base];
+ }
+
+ for(i=0; col[i].base != '\0'; ++i){
+ switch(col[i].base){
+ case 'a':
+ set_vals(i,1);
+ break;
+ case 'A':
+ set_vals(i,1);
+ break;
+ case 'c':
+ set_vals(i,2);
+ break;
+ case 'C':
+ set_vals(i,2);
+ break;
+ case 'g':
+ set_vals(i,3);
+ break;
+ case 'G':
+ set_vals(i,3);
+ break;
+ case 't':
+ set_vals(i,4);
+ break;
+ case 'T':
+ set_vals(i,4);
+ break;
+ case 'n':
+ set_vals(i,5);
+ break;
+ case 'N':
+ set_vals(i,5);
+ break;
+ case '-':
+ set_vals(i,0);
+ break;
+ case ' ':
+ break;
+ default:
+ fprintf(stderr, "arienai: %c\n",col[i].base);
+ abort();
+ //++ballot[(int)encode[(int)buf4print[i]]];
+ }
+ }
+
+ int number_of_ballots = 0;
+ {
+ int i;
+ for(i=0; i<6; ++i){
+ number_of_ballots += ballot[i];
+ }
+ }
+ if(number_of_ballots < 1){
+ fprintf(stderr, "sth buggy: consensus\n");
+ abort();
+ }
+ *depth = (number_of_ballots < 93) ? (char)(number_of_ballots+33) : '~';
+
+ // to elect or not
+ if(number_of_ballots < minimum_ballots){
+ // do not change the base and qv of the base read.
+ *seq = col[0].base;
+ // *depth = (char)(1+33);
+ *qual = col[0].qv;
+ if(*seq == '-'){
+ *seq = ' ';// this column will not be printed in output fastq
+ }
+ if(*qual < '!' && *seq != ' '){
+ fprintf(stderr,"kita #%c#, #%c#\n",*seq,*qual);
+ int i;
+ for(i=0; col[i].base != '\0'; ++i){
+ fprintf(stderr,"%c",col[i].base);
+ }
+ fprintf(stderr,"\n");
+ for(i=0; col[i].qv != '\0'; ++i){
+ fprintf(stderr,"%c",col[i].qv);
+ }
+ fprintf(stderr,"\n");
+ //abort();
+ }
+ return;
+ }
+ else{
+ // suppose 1-p ~ 1
+ // pi(p(~x)) * pi(1-p(x)) ~ pi(p(~x))
+ // {x| min{sum(qv of ~x)}}
+ // = {x| min{sum(qvall) - sum(qv of x)}}
+ // = {x| max{sum(qv of x)}}
+ int maxsumqv=0;
+ for(i=0; i<6; ++i){
+ maxsumqv = (maxsumqv < sum_qvs[i]) ? (sum_qvs[i]) : maxsumqv;
+ }
+ int second_sum_qv=0;
+ for(i=0; i<6; ++i){
+ if(sum_qvs[i] < maxsumqv){
+ second_sum_qv = (second_sum_qv < sum_qvs[i]) ? sum_qvs[i] : second_sum_qv;
+ }
+ }
+
+ int num_top=0;
+ for(i=0; i<6; ++i){
+ if(sum_qvs[i] == maxsumqv){
+ ++num_top;
+ }
+ }
+
+ if(num_top > 1){
+ second_sum_qv = maxsumqv;
+ }
+
+ for(i=0; i<6; ++i){
+ if(sum_qvs[i] == maxsumqv){
+ *seq = "-ACGTN"[i];
+ // *seq = Decode[i];
+ int q_cand = (maxsumqv-second_sum_qv);
+ //int q_cand = (maxsumqv-second_sum_qv)/(maximum_ballots-1);// 1/(maximum_ballots-1) is a scaling factor
+ if(q_cand > 1){
+ if(q_cand < 93){
+ *qual = (char)(q_cand+33);
+ }
+ else{
+ *qual = '~';
+ }
+ }
+ else{
+ *qual = (char)(1+33);
+ }
+ }
+ }
+ if(num_top==1){
+ return;
+ }
+ else{
+ // do not change the base (but change qv)
+ *seq = col[0].base;
+ *qual = (char)(1+33);// (0-0)/(maximum_ballots-1) = 0; -> 1
+ return;
+ }
+ }
+
+ fprintf(stderr,"never come here\n");
+ abort();
+ return;
+}
+*/
diff --git a/configure b/configure
new file mode 100755
index 0000000..03fab4b
--- /dev/null
+++ b/configure
@@ -0,0 +1,119 @@
+#! /bin/sh
+
+# waf configure wrapper
+
+# Fancy colors used to beautify the output a bit.
+#
+if [ "$NOCOLOR" ] ; then
+ NORMAL=""
+ BOLD=""
+ RED=""
+ YELLOW=""
+ GREEN=""
+else
+ NORMAL='\033[0m'
+ BOLD='\033[01;1m'
+ RED='\033[01;91m'
+ YELLOW='\033[00;33m'
+ GREEN='\033[01;92m'
+fi
+
+EXIT_SUCCESS=0
+EXIT_FAILURE=1
+EXIT_ERROR=2
+EXIT_BUG=10
+
+CUR_DIR=$PWD
+
+#possible relative path
+WORKINGDIR=`dirname $0`
+cd $WORKINGDIR
+#abs path
+WORKINGDIR=`pwd`
+cd $CUR_DIR
+
+
+# Checks for WAF. Honours $WAF if set. Stores path to 'waf' in $WAF.
+# Requires that $PYTHON is set.
+#
+checkWAF()
+{
+ printf "Checking for WAF\t\t\t: "
+ #installed miniwaf in sourcedir
+ if [ -z "$WAF" ] ; then
+ if [ -f "${WORKINGDIR}/waf" ] ; then
+ WAF="${WORKINGDIR}/waf"
+ if [ ! -x "$WAF" ] ; then
+ chmod +x $WAF
+ fi
+ fi
+ fi
+ if [ -z "$WAF" ] ; then
+ if [ -f "${WORKINGDIR}/waf-light" ] ; then
+ ${WORKINGDIR}/waf-light --make-waf
+ WAF="${WORKINGDIR}/waf"
+ fi
+ fi
+ #global installed waf with waf->waf.py link
+ if [ -z "$WAF" ] ; then
+ WAF=`which waf 2>/dev/null`
+ fi
+ # neither waf nor miniwaf could be found
+ if [ ! -x "$WAF" ] ; then
+ printf "$RED""not found""$NORMAL""\n"
+ echo "Go to http://code.google.com/p/waf/"
+ echo "and download a waf version"
+ exit $EXIT_FAILURE
+ else
+ printf "$GREEN""$WAF""$NORMAL""\n"
+ fi
+}
+
+# Generates a Makefile. Requires that $WAF is set.
+#
+generateMakefile()
+{
+ cat > Makefile << EOF
+#!/usr/bin/make -f
+# Waf Makefile wrapper
+WAF_HOME=$CUR_DIR
+
+all:
+#@$WAF build
+
+all-debug:
+ @$WAF -v build
+
+all-progress:
+ @$WAF -p build
+
+install:
+ $WAF install --yes;
+
+uninstall:
+ $WAF uninstall
+
+clean:
+ @$WAF clean
+
+distclean:
+ @$WAF distclean
+ @-rm -rf build
+ @-rm -f Makefile
+
+check:
+ @$WAF check
+
+dist:
+ @$WAF dist
+
+.PHONY: clean dist distclean check uninstall install all
+
+EOF
+}
+
+checkWAF
+generateMakefile
+
+"${WAF}" configure $*
+exit $?
diff --git a/dfq2fq_v2.pl b/dfq2fq_v2.pl
new file mode 100755
index 0000000..1dd3b85
--- /dev/null
+++ b/dfq2fq_v2.pl
@@ -0,0 +1,329 @@
+#!/usr/bin/perl
+use strict;
+use warnings;
+use Getopt::Long;
+
+my $VALIDDEPTH=1;
+my $VALIDLENGTH=1;
+my $fasta=0;
+my $finish=0;
+my $opt_dfq_check=0;
+my $opt_pnp=0;
+my $opt_list=0;
+my $opt_nlist=0;
+my $opt_orig_depth=0;
+
+my $valid_voter=11;
+my $trim=42;
+
+my $confident_depth_coefficient=0.0;
+my $confident_length_coefficient=0.0;
+
+my $confident_depth=0;
+my $confident_length=0;
+
+GetOptions(
+ 'valid_depth=i' => \$VALIDDEPTH,
+ 'valid_read_length=i' => \$VALIDLENGTH,
+ 'f'=>\$fasta,
+ 'finish'=>\$finish,
+ 'check'=>\$opt_dfq_check,
+ 'pnp'=>\$opt_pnp,
+ 'list'=>\$opt_list,
+ 'valid_voter=i'=>\$valid_voter,
+ 'trim=i'=>\$trim,
+ 'cdc=f'=>\$confident_depth_coefficient,
+ 'clc=f'=>\$confident_length_coefficient,
+ 'cd=i'=>\$confident_depth,
+ 'cl=i'=>\$confident_length,
+ 'nlist'=>\$opt_nlist,
+ 'orig_depth'=>\$opt_orig_depth,
+);
+
+my $cdc = $confident_depth_coefficient;
+my $clc = $confident_length_coefficient;
+my $cd = $confident_depth;
+my $cl = $confident_length;
+
+if(@ARGV != 1){
+ die "USAGE: <this> <in.dfq>\n\t[-f (outputs in fasta)\n\t --valid_depth int\n\t --valid_read_length int\n\t --finish (chops low depth (<valid_depth) regions)\n\t --check (outputs not broken dfq records and discard the rest ('broken' was defined in this code))\n\t --pnp (outputs confidently corrected reads only ('confident' was defined in this code))\n\t --list (outputs confidently corrected read names)\n\t --nlist (outputs NOT confidently corrected read names)]\n";
+}
+if($opt_pnp && $opt_list){
+ die "pnp and list options are incompatible\n";
+}
+if(($opt_list || $opt_pnp) && $opt_nlist){
+ die "nlist option is incompatible with list or pnp options\n";
+}
+if($cdc && ($cdc < 0.0 or $cdc > 1.0)){
+ die "must be 0.0 <= cdc <= 1.0\n";
+}
+if($clc && ($clc < 0.0 or $clc > 1.0)){
+ die "must be 0.0 <= clc <= 1.0\n";
+}
+
+if($cd && $cd <1){
+ die "must be 0 < cd\n";
+}
+if($cl && $cl <1){
+ die "must be 0 < cl\n";
+}
+if($cdc && $cd){
+ die "cdc and cd are incompatible\n";
+}
+if($clc && $cl){
+ die "clc and cl are incompatible\n";
+}
+
+if($VALIDDEPTH <= 0){
+ $VALIDDEPTH = 1;
+}
+if($VALIDLENGTH <= 0){
+ $VALIDLENGTH = 1;
+}
+
+my $counter=0;
+my $printed_line=0;
+
+my $line=<>;
+++$counter;
+my $result;
+
+while(!eof){
+ chomp $line;
+ $result = $line =~ s/^\@//;
+ if(!$result){
+ die "1. strange input\n";
+ }
+ my $chr = $line;
+ ++$printed_line;
+
+ my $consensus = "";
+ $line=<>;
+ my $line_c = 1;
+ chomp $line;
+ while(1){# read bases
+ $consensus.=$line;
+ if(eof){
+ die "2. strange input\n";
+ }
+ $line=<>;
+ chomp $line;
+ if($line =~ /^\+/){
+ last;
+ }
+ else{
+ ++$line_c;
+ }
+ }
+ ++$printed_line;
+ ++$printed_line;
+
+ my $depths="";
+ my $orig_depths="";
+ if(!$fasta){
+ chomp $line;
+ ($depths,$orig_depths) = (split /\t/, $line)[1..2];
+ if(!defined($orig_depths)){
+ $orig_depths = "";
+ }
+ }
+ my $qvs = "";
+ for(my $i=0; $i<$line_c; ++$i){# # of lines of bases and qvs must be =
+ if(eof){
+ last;
+ }
+ $line=<>;
+ chomp $line;
+ $qvs.=$line;
+ }
+ ++$printed_line;
+
+ my $strange=0;
+ if($consensus =~ /[^acgtnACGTN\-]/){
+ $strange = 1;
+ }
+
+ if($strange){
+ # discard this record.
+ printf STDERR ("strange record found\n");
+ printf STDERR ("\@%s\n",$chr);
+ printf STDERR ("%s\n",$consensus);
+ printf STDERR ("+\t%s\t%s\n",$depths,$orig_depths);
+ printf STDERR ("%s\n",$qvs);
+ }
+ elsif(!$fasta){
+ if($opt_orig_depth){
+ printf("%s\n",$orig_depths);
+ }
+ elsif($finish){
+ &print_bases(\$chr,\$consensus,\$depths,\$qvs);
+ }
+ elsif($opt_dfq_check){
+ if(length($consensus) == length($depths) && length($depths) == length($qvs)){
+ printf("\@%s\n",$chr);
+ printf("%s\n",$consensus);
+ printf("+\t%s\n",$depths);
+ printf("%s\n",$qvs);
+ }
+ else{
+ die "broken record\n";
+ }
+ }
+ elsif($opt_pnp || $opt_list || $opt_nlist){
+ &flush4pnp(\$chr,\$consensus,\$qvs,\$depths);
+ }
+ else{
+ &flush(\$chr,\$consensus,\$qvs,\$depths);
+ }
+ }
+ else{
+ if(length($consensus) >= $VALIDLENGTH){
+ # writes in fasta
+ $consensus =~ s/\-//g;
+ printf(">%s\n",$chr);
+ printf("%s\n",$consensus);
+ }
+ }
+
+ if(!eof){
+ $line = <>;# next unit's name
+ }
+ ++$counter;
+}
+
+if(!eof){
+ printf STDERR "strange fastq\n";
+}
+if($printed_line%4 !=0){
+ printf STDERR ("strange fastq: the number of lines is $printed_line, $printed_line % 4 = %d\n",$printed_line%4);
+}
+
+sub print_bases($$$$){
+ my ($chr,$consensus,$depths,$qvs)=@_;
+ my $bases="";
+ my $qvs_tbp="";
+ my $part=0;
+ my $loop = length($$depths);
+ for(my $i=0; $i<$loop; ++$i){
+ if(ord(substr($$depths,$i,1))-33 >= $VALIDDEPTH){
+ $bases .= substr($$consensus,$i,1);
+ $qvs_tbp .= substr($$qvs,$i,1);
+ }
+ else{
+ &flush($chr,\$bases,\$qvs_tbp,\$part);
+ $bases="";
+ $qvs_tbp="";
+ }
+ }
+ &flush($chr,\$bases,\$qvs_tbp,\$part);
+ $bases="";
+ $qvs_tbp="";
+}
+
+sub flush($$$$){
+ my($chr,$bases,$qvs,$part) = @_;
+ if(length($$bases) != length($$qvs)){
+ die "FATAL: bases and qvs have different lengths\n";
+ }
+ #$$bases =~ s/\-//g;
+ {
+ my $tmp_bases="";
+ my $tmp_qvs="";
+ my $tmp_depths="";
+ my $loop = length($$bases);
+ for(my $i=0; $i<$loop; ++$i){
+ my $base = substr($$bases,$i,1);
+ my $qv = substr($$qvs,$i,1);
+ my $depth;
+ if(!$finish){
+ $depth = substr($$part,$i,1);#depth
+ }
+ if($base eq '-' or $base eq '~'){
+ next;
+ }
+ $tmp_bases .= $base;
+ $tmp_qvs .= $qv;
+ if(!$finish){
+ $tmp_depths .= $depth;
+ }
+ }
+ $$bases = $tmp_bases;
+ $$qvs = $tmp_qvs;
+ if(!$finish){
+ $$part = $tmp_depths;
+ }
+ }
+
+ $$bases =~ tr/a-z/A-Z/;
+ if(length($$bases)>= $VALIDLENGTH){
+ if($finish){
+ printf("\@%s/%06d/%d\n",$$chr,$$part++,length($$bases));
+ printf("%s\n",$$bases);
+ printf("+\n");
+ printf("%s\n",$$qvs);
+ }
+ else{
+ printf("\@%s\n",$$chr);
+ printf("%s\n",$$bases);
+ printf("+\t%s\n",$$part);# part <- depths
+ printf("%s\n",$$qvs);
+ }
+ }
+}
+
+sub flush4pnp($$$$){
+ my($chr,$bases,$qvs,$depths) = @_;
+ if(length($$bases) != length($$qvs)){
+ die "FATAL: bases and qvs have different lengths\n";
+ }
+
+ {
+ my $confident_bases=0;
+ for(my $i=0; $i<length($$depths); ++$i){
+ my $depth = substr($$depths,$i,1);
+ my $d_passed = 0;
+ if($cdc && ord($depth)-33 >= int($cdc*$valid_voter)){
+ $d_passed = 1;
+ }
+ elsif($cd && ord($depth)-33 >= $cd){
+ $d_passed = 1;
+ }
+ if(!$cdc && !$cd){
+ die "specify cdc or cd\n";
+ }
+
+ if($d_passed){
+ ++$confident_bases;
+ }
+ }
+
+ my $l_passed=0;
+ if($clc && $confident_bases >= int($clc*(length($$bases)-2*$trim))){
+ $l_passed = 1;
+ }
+ elsif($cl && $confident_bases >= $cl){
+ $l_passed = 1;
+ }
+ if(!$clc && !$cl){
+ die "specify clc or cl\n";
+ }
+
+ if($l_passed){
+ if($opt_pnp){
+ $$bases =~ tr/a-z/A-Z/;
+ printf("\@%s\n",$$chr);
+ printf("%s\n",$$bases);
+ printf("+\t%s\n",$$depths);
+ printf("%s\n",$$qvs);
+ }
+ else{
+ if($opt_list){
+ printf("%s\n",$$chr);
+ }
+ }
+ }
+ elsif($opt_nlist){
+ printf("%s\n",$$chr);
+ }
+ }
+}
diff --git a/doc/_build/html/.buildinfo b/doc/_build/html/.buildinfo
new file mode 100644
index 0000000..5d24b08
--- /dev/null
+++ b/doc/_build/html/.buildinfo
@@ -0,0 +1,4 @@
+# Sphinx build info version 1
+# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done.
+config: 149cf9bb4f50da9e118ca2cd5c1cc3be
+tags: fbb0d17656682115ca4d033fb2f83ba1
diff --git a/doc/_build/html/_sources/Contact.txt b/doc/_build/html/_sources/Contact.txt
new file mode 100644
index 0000000..7618f8d
--- /dev/null
+++ b/doc/_build/html/_sources/Contact.txt
@@ -0,0 +1,12 @@
+=============
+Contact
+=============
+imai at cb dot k dot u-tokyo dot ac dot jp
+
+.. | ■バグ報告用テンプレート(仮)
+.. | ・sprai バージョン
+.. | ・各マシンのメモリ量とコア数
+.. | ・バグ症状
+.. | ・バグ再現方法
+.. | ・その他(errファイルの中身など)
+
diff --git a/doc/_build/html/_sources/Download.txt b/doc/_build/html/_sources/Download.txt
new file mode 100644
index 0000000..3c584c0
--- /dev/null
+++ b/doc/_build/html/_sources/Download.txt
@@ -0,0 +1,38 @@
+========
+Download
+========
+http://zombie.cb.k.u-tokyo.ac.jp/sprai/dist/sprai-0.9.9.12.tar.gz
+
+http://zombie.cb.k.u-tokyo.ac.jp/sprai/dist/sprai-0.9.9.11.tar.gz
+
+http://zombie.cb.k.u-tokyo.ac.jp/sprai/dist/sprai-0.9.9.10.tar.gz
+
+http://zombie.cb.k.u-tokyo.ac.jp/sprai/dist/sprai-0.9.9.9.tar.gz
+
+http://zombie.cb.k.u-tokyo.ac.jp/sprai/dist/sprai-0.9.9.8.tar.gz
+
+http://zombie.cb.k.u-tokyo.ac.jp/sprai/dist/sprai-0.9.9.7.tar.gz
+
+http://zombie.cb.k.u-tokyo.ac.jp/sprai/dist/sprai-0.9.9.6.tar.gz
+
+http://zombie.cb.k.u-tokyo.ac.jp/sprai/dist/sprai-0.9.9.5.tar.gz
+
+http://zombie.cb.k.u-tokyo.ac.jp/sprai/dist/sprai-0.9.9.4.tar.gz
+
+http://zombie.cb.k.u-tokyo.ac.jp/sprai/dist/sprai-0.9.9.3.tar.gz
+
+http://zombie.cb.k.u-tokyo.ac.jp/sprai/dist/sprai-0.9.9.2.tar.gz
+
+http://zombie.cb.k.u-tokyo.ac.jp/sprai/dist/sprai-0.9.9.1.tar.gz
+
+http://zombie.cb.k.u-tokyo.ac.jp/sprai/dist/sprai-0.9.9.tar.gz
+
+http://zombie.cb.k.u-tokyo.ac.jp/sprai/dist/sprai-0.9.5.1.6.tar.gz
+
+.. http://zombie.cb.k.u-tokyo.ac.jp/sprai/dist/sprai-0.9.5.1.5.tar.gz
+
+.. http://zombie.cb.k.u-tokyo.ac.jp/sprai/dist/sprai-0.9.5.1.3.tar.gz
+
+.. http://zombie.cb.k.u-tokyo.ac.jp/sprai/dist/sprai-0.9.5.1.tar.gz
+
+.. https://bitbucket.org/yun2/sprai_doc/downloads/sprai-0.9.1.3.tar.gz
diff --git a/doc/_build/html/_sources/Example.txt b/doc/_build/html/_sources/Example.txt
new file mode 100644
index 0000000..165b3b0
--- /dev/null
+++ b/doc/_build/html/_sources/Example.txt
@@ -0,0 +1,128 @@
+=====================================
+Example: assemble phage genome
+=====================================
+
+After installing Sprai, assemble a small genome.
+
+For example, we showed how to assemble the phage genome.
+
+Prepare data
+=======================
+Go to `pacbiotoca wiki <http://sourceforge.net/apps/mediawiki/wgs-assembler/index.php?title=PacBioToCA>`_ and download phage data ( \http://www.cbcb.umd.edu/software/PBcR/data/sampleData.tar.gz ).
+
+::
+
+ mkdir tmp
+ cd tmp
+ wget http://www.cbcb.umd.edu/software/PBcR/data/sampleData.tar.gz
+ tar xvzf sampleData.tar.gz
+
+Convert fasta to fastq. You can use a fa2fq.pl script in Sprai.
+::
+
+ fa2fq.pl sampleData/pacbio.filtered_subreads.fasta > pacbio.filtered_subreads.fq
+
+Prepare Sprai
+==========================================
+
+::
+
+ mkdir tmp2
+ cd tmp2
+ ln -s ../pacbio.filtered_subreads.fq all.fq
+ cp /your/sprai/directory/asm.spec .
+ cp /your/sprai/directory/ec.spec .
+
+Open ec.spec and change values.
+::
+
+ #>- params -<#
+ input_fastq all.fq
+ estimated_genome_size 50000
+ estimated_depth 100
+ partition 12
+ evalue 1e-50
+ trim 42
+ ca_path /path/to/your/wgs/Linux-amd64/bin/
+ word_size 18
+ #>- params -<#
+
+*input_fastq* is your input file name.
+
+*estimated_genome_size* is the number of nucleotides of your target.
+If you do not know it, set large number. For example, set 1e+12.
+
+*estimated_depth* is the depth of coverage of input_fastq of your target.
+If you do not know it, set 0.
+
+*partition* is the number of processors Sprai uses.
+
+*evalue* is used by blastn.
+
+*trim* is the number of nucleotides Sprai cut from both sides of alignments.
+
+*word_size* is used by blastn.
+
+Correct errors & assemble
+==============================
+::
+
+ ezez_vx1.pl ec.spec pbasm.spec > log.txt 2>&1 &
+
+ezez_vx1.pl outputs into a *result_yyyymmdd_hhmmss* directory. The improved reads will be in a *c01.fin.idfq.gz* file and contigs will be in a *CA/9-terminator/asm.ctg.fasta* file.
+
+We explain temporary files in a *tmp* directory.
+Sprai gives ID to each read in *all.fq* and outputs *c00.idfq.gz*.
+Sprai partitions *all.fq* to *c00_00xx.fa*.
+*c00.nin*, *c00.nhr* and *c00.nsq* are output by makeblastdb.
+Sprai aligns *c00_00xx.fa* to *c00.nxx* database by using blastn, correct errors and output *c00_00xx.dfq.gz*.
+dfq format contains the IDs of reads, bases, aligned depths and quality values.
+(Current Sprai outputs dummy quality values.)
+Sprai cuts low depth regions of each read, deletes'-' characters to convert to FASTQ format and outputs corrected reads in *c01.fin.idfq.gz*.
+Sprai extracts longest 20X reads of the *estimated_genome_size* from *c01.fin.idfq.gz*.
+And feed them to Celera Assembler.
+Celera Assembler outputs files into *CA* directory.
+
+Find contigs
+===================
+You will find contigs in a *CA/9-terminator/asm.ctg.fasta* file.
+Files in *CA* are produced by the wgs-assembler (Celera Assembler).
+Read `the wgs-assembler document <http://sourceforge.net/apps/mediawiki/wgs-assembler/index.php?title=Main_Page>`_ for details.
+
+Get N50 value
+================
+If you install Statistics::Descriptive module to a directory Celera Assembler can see, Celera Assembler outputs N50 value and so on in a CA/do_*_c01.fin.top20x.log file.
+You will find lines in the file like below:
+::
+
+ [Contigs]
+ TotalContigsInScaffolds 1
+ TotalBasesInScaffolds 49848
+ TotalVarRecords 0
+ MeanContigLength 49848
+ MinContigLength 49848
+ MaxContigLength 49848
+ N25ContigBases 49848
+ N50ContigBases 49848
+ N75ContigBases 49848
+
+These are the statistics of contigs of the assembly.
+
+Notes
+==========================================
+If you would like to use more than about 1000 processors, we recommend to use a *pre_partition* parameter in the ec.spec file like:
+
+::
+
+ # ec.spec
+ pre_partition 4
+ partition 300
+
+And
+
+::
+
+ ezez4qsub_vx1.pl ec.spec pbasm.spec > log 2>&1 &
+
+In this example, Sprai will use 4*300 = 1200 processors.
+
diff --git a/doc/_build/html/_sources/FAQ.txt b/doc/_build/html/_sources/FAQ.txt
new file mode 100644
index 0000000..85bbca1
--- /dev/null
+++ b/doc/_build/html/_sources/FAQ.txt
@@ -0,0 +1,31 @@
+=======
+FAQ
+=======
+.. | Q. How many files does sprai create in each cycle?
+.. | A. Given P partition in ec.spec, the number of jobs in each cycle is 1 (by makeblastdb)+1 (by partition_fa.pl)+P (by blastn, bfmt72s, nss2v and myrealigner)+ceil(P/CURRENT) (by dfq->fq)+1 (by cat) = P+ceil(P/CURRENT)+3, where CURRENT is 1 in ezez_v3.pl and ezez4make.pl and 12 in ezez4qsub_v4.pl. The number of files created in each cycle is {3 (by makeblastdb)+P (by partition_fa.pl)+P (by blastn, bfmt72s, nss2v and myrealigner)+ceil(P/CURRENT) (by dfq->fq)+1 (by cat)}+2*(# of jobs)(. [...]
+
+Q. How much sequencing coverage depth would be required?
+---------------------------------------------------------
+A. It depends. If you can afford to sequence 100x of the estimated genome size, that would be the best.
+When the genome is large (e.g., 400Mbp or larger), you may reduce the depth to 30x or 40x.
+If you want to sequence less than 20x, you might consider hybrid assembly in which you combine Illumina reads and PacBio reads.
+Higher sequencing depth than 200x does not usually improve the assembled result.
+Note that the sequencing depth itself is not a good predictor of the assembly result.
+What matters is the sequencing depth of reads longer than repetitive elements in a target genome.
+If reads are short and only few of them are longer than some threshold, namely 2kbp (for bacteria), even 100x data would be beated by 20x data with longer reads.
+We strongly recommend the use of the size selection protocol using Blue Pippin, which is highly recommended by PacBio.
+
+Q. How large genomes can Sprai assemble?
+--------------------------------------------------------
+A. The largest genome we tested is 2Gbp, but there is no theoretical limit for Sprai (although you need more computational resources as the target genome gets larger).
+Error-correction process is highly scalable, so most probably, Celera assembler would be the bottleneck.
+
+Q. Can Sprai assemble both PacBio CLRs and other reads such as Illumina short reads?
+-------------------------------------------------------------------------------------
+A. No. Sprai is designed to take only PacBio long reads.
+If you want to combine both reads, you may first correct errors in PacBio long reads by Sprai, and then you can use whatever genome assembler that accepts both error-corrected long reads and Illumina reads.
+
+Q. Why is the number of the output nucleotides by Sprai smaller than the number of the nucleotides in the input file?
+----------------------------------------------------------------------------------------------------------------------
+A. Sprai trims nucleotides that will not be useful for subsequent analysis such as de novo assembly or mapping to a reference genome. Such nucleotides include low coverage regions (valid_depth paramter; default = 4), regions of too short reads (valid read length: default = 500 bp), and both ends of reads (default = 42 bp).
+The exact ratio of such nucleotides depends on input data, but however, it usually falls between 10% to 50% in our experience.
diff --git a/doc/_build/html/_sources/README.txt b/doc/_build/html/_sources/README.txt
new file mode 100644
index 0000000..bc64a07
--- /dev/null
+++ b/doc/_build/html/_sources/README.txt
@@ -0,0 +1,353 @@
+========
+README
+========
+Sprai (single-pass read accuracy improver) is a tool to correct sequencing errors in single-pass reads for de novo assembly.
+It is originally designed for correcting sequencing errors in single-molecule DNA sequencing reads, especially in Continuous Long Reads (CLRs) generated by PacBio RS sequencers.
+The goal of Sprai is not maximizing the accuracy of error-corrected reads; instead, Sprai aims at maximizing the continuity (i.e., N50 contig length) of assembled contigs after error correction.
+
+Introduction
+=============
+Error correction in sequencing reads is critical for de novo assembly of reads.
+Most error correction tools for next-generation DNA sequencers are designed for reads by the second DNA generation sequencers,
+and therefore they expected up to 1-5 % of sequencing errors in reads.
+However, PacBio RS, a third-generation DNA sequencer, yields reads with ~15 % errors (at nucleotide level).
+
+Therefore, several error correction algorithms for PacBio long reads were developed.
+Since the second-generation DNA sequencers such as Illumina HiSeq yields reads of very good quality but they all have systematic sequencing errors.
+In other words, the infinite sequencing depth would not lead to the perfect consensus sequence of a target genome.
+PacBio RS is the first sequencer that deemed to be free from systematic sequencing errors; all sequencing errors (at nucleotide level) seem to occur randomly and independently.
+Given that all sequencing errors occur randomly and independently, the consensus sequence of multiple alignment of sequencing reads would give the perfect reconstruction of the original DNA sequence.
+
+HGAP, which is published in 2013, is a error-correction and assembly tool based on this idea, and it proved that de novo assembly of genomes is possible only with long reads with 15% errors.
+Sprai also follows the same idea, but uses different algorithms in generating multiple alignment and consensus generation.
+
+Similar to HGAP, Sprai takes long reads generated by PacBio RS and corrects sequencing errors in them.
+Sprai has a pipeline to assemble the corrected reads using Celera assembler, so it automatically assembles the corrected reads as HGAP.
+
+The major difference between HGAP and Sprai is that Sprai usually outputs longer contigs (in terms of N50 contig length) than HGAP at modest sequencing depth (< 50x).
+The number of misassemblies is usually less than that by HGAP, and the nucleotide-level accuracy of assembled contigs is also higher than that by HGAP.
+With 100x or more sequencing depth, HGAP and Sprai performs almost equally in terms of continuity, but the nucleotide-level accuracy by HGAP is higher presumably because Quiver module (in HGAP) takes more information (e.g., three types of Quality Values) into account and because it uses HMM instead of trusting input multiple alignment.
+
+To always give the best assembly result (in terms of continuity and accuracy),
+Sprai has a module to polish the consensus sequences using Quiver module for the best nucleotide quality.
+
+Run Sprai for error correction, then it passes the error corrected reads to Celera assembler.
+The assembled contigs will be polished by Quiver module if you wish.
+Then you will get the best (as of writing, at least.) contigs among assembler pipelines. Happy genome sequencing!
+
+Install
+========
+Here we describe how to install Sprai using no additional packaging tool.
+If you would like to use LPM, see the next section.
+
+Sprai requires the following packages.
+
+* python 2.6 or newer
+* `NCBI BLAST+ ver. 2.2.27 or newer <ftp://ftp.ncbi.nlm.nih.gov/blast/executables/blast+/LATEST/>`_
+* `Celera Assembler ver. 8.1 or newer <http://sourceforge.net/apps/mediawiki/wgs-assembler/index.php?title=Main_Page>`_ (if you assemble reads after error-correction)
+
+Note that the legacy version of BLAST (without "a plus") is not compatible with Sprai.
+
+.. When you install Celera Assembler, you need to modify the source code. In ``AS_global.h`` in ``src`` directory of Celera Assembler, change from ::
+
+.. #define AS_READ_MAX_NORMAL_LEN_BITS 11
+
+.. to ::
+
+.. #define AS_READ_MAX_NORMAL_LEN_BITS 15
+
+.. This modification allows Celera assembler to accept reads longer than 2047 bp (up to 32767 bp).
+.. Without this modification, longer reads are not processed, which is a disaster for PacBio Continuous Long Reads.
+.. For details about the source code modification, please read `pacbiotoca wiki <http://sourceforge.net/apps/mediawiki/wgs-assembler/index.php?title=PacBioToCA>`_
+.. Celera assembler in PacBioToCA or SMRTAnalysis binary distributions is already patched, so you do not need to do on your own if you have installed either.
+
+After installing the prerequisites, extract the tar ball of Sprai, and do the following ::
+
+ cd sprai-some-version
+ ./waf configure [--prefix=/some/dir/]
+ ./waf build
+ ./waf install
+
+If you specify ``--prefix=/some/dir/``, Sprai will be installed under ``/some/dir/bin/`` directory. Otherwise, it will be installed in ``/usr/local/bin``.
+If you do not have a root access, probably you want to add ``--prefix=$HOME/local`` or so.
+If you are creating RPM package, you would add ``--prefix=/usr``.
+
+Install (with LPM)
+===================
+Alternatively, you can use `Local Package Manager (LPM) <http://www.kasahara.ws/lpm>`_ to automete the whole installation process.
+LPM is a software management tool for non-root users. It can download/build/install packages like yum/apt-get/zypper/MacPort/HomeBrew,
+but does not require root previledge. LPM install any packages under a user's home directory.
+It also takes care of setting environmental variables. Another feature might be that it can switch a set of installed software;
+for example, if you have software A and B but they conflicts with each other, you can exclusively enable either of them.
+
+With LPM, you can just type as follows
+(assuming that your Linux distribution is Red Hat Enterprise Linux or a compatible distribution such as CentOS or Scientific Linux)::
+
+ $ lpm install blast+
+ $ lpm install smrtanalysis-centos
+ $ lpm install sprai
+
+For Ubuntu users, the second command will not work, so you have to install SMRTAnalysis pipline for yourself from PacBio DevNet.
+Normally you have zlib installed on your system by default, but in case there is not, you should do the following before the above three commands ::
+
+ $ lpm install compiler-envs
+ $ lpm install zlib
+
+The first one might have been executed already if you have installed some other libraries before. In such a case, you can skip the first command.
+
+Uninstall
+================
+If you manually installed Sprai, you can uninstall Sprai by the following command::
+
+ cd sprai-some-version
+ ./waf uninstall
+
+If you installed Sprai by LPM, uninstalling Sprai would be easier::
+
+ lpm uninstall sprai
+
+Run Sprai
+================
+First, create a working directory. Here we assume that we use tmp::
+
+ mkdir tmp
+ cd tmp
+
+Next, we prepare input *subreads* in FASTQ format.
+Note that Sprai takes *subreads* as input, not *reads*.
+The difference between *reads* and *subreads* is that reads (of PacBio) may contain adaptor sequences, while *subreads* do not.
+
+If you only have FASTQ files and do not know which ones they contain, do as follows.
+If the file name is ``filtered_subreads.fastq``, it is most likely that the file contains subreads.
+Otherwise, it is safer to start from .bas.h5 files, which contain raw reads.
+To convert .bas.h5 file into subreads in FASTQ format, there are two ways.
+
+The first way is to use SMRT Pipe. If you have PacBio RS, you usually have it installed on your system, so this might be the easiest choice.
+Run P_Filter (or S_Filter) with ReadScore threshold 0.75 (or 0.80 if the sequencing depth is higher than 120-150x), with MinReadLen threshold 500 bp.
+The resulted ``filtered_subreads.fastq`` contains subreads, which are compatible with Sprai.
+The installation manual of SMRT Analysis (including SMRT Pipe) says that it requires a bunch of daemons including MySQL, but when we use it for Sprai, you can just download and extract it.
+Command line utilities including SMRT Pipe works without any daemons. So, please download it, extract it, put some environmental variables in a startup script, and then finished.
+Downloading SMRT Analysis package may take a while, but the others take less than 10 minutes.
+
+The second way is to use ``bash5tools.py``. It is a standalone command line utility that works in the UNIX way; it does not take an XML configuration file as SMRT Pipe, and instead everything can be controlled by command line switches.
+Therefore ``bash5tools.py`` is very handy when we create a pipeline by our own.
+To use ``bash5tools.py``, you have to install it from `PacBio GitHub (pbh5tools) <https://github.com/PacificBiosciences/pbh5tools>`_.
+Honestly speaking, it is not well-packaged (yet?) so that you may encounter a Python error even you exactly follow the installation instruction there.
+The problem we had was that newly installed pbcores went to a different directory than the old installation; the old one comes first in Python module search, so the newly installed ones were hidden.
+We had to remove the old pbcore manually. Please ask people in PacBio about how to install pbh5tools (and pbcores) because things change so quickly that we may know latest problems.
+Once you installed ``bash5tools.py``, you can convert .bas.h5 into FASTQ format by the following command::
+
+ bash5tools.py --outFilePrefix example_output --readType subreads --outType fastq --minReadScore 0.75 example.bas.h5
+
+You will get ``example_output.fastq`` as output.
+
+Once we have all subreads, we combine all FASTQ files into one::
+
+ cat a000.fastq a001.fastq ... > all.fq
+
+We also need parameter files, with which we specify various paramters for error-correction and sequence assembly (by Celera assembler)::
+
+ cp /path/to/sprai/pbasm.spec .
+ cp /path/to/sprai/ec.spec .
+
+Then, we modify parameters in the both template files.
+*pbasm.spec* is a parameter file for Celera assembler; see the documents of Celera assembler for details.
+If you only want error-correction and do not assemble the error-corrected reads, you do not need this file.
+This file controls how much memory and CPU cores you will use, so it is very likely that you have to understand various parameters.
+
+*ec.spec* is a parameter file for Sprai.
+The most important parameter in this file is *estimated_genome_size*.
+If you have not estimated the length of your target genome, give a large number (e.g., 1e+12).
+After the first assembly, you can calculate the depth distribution of reads to estimate the genome size, after which you can try a second run, which might give a better result.
+Modify other parameters in ec.spec as well, following instructions in the file.
+However, the result is not so sensitive to this parameter in our experience.
+
+Single Node Mode
+-----------------
+
+Sprai has several execution modes. The first mode we describe is single node mode, with which we can use only a single machine.
+If you have more than one server, please see the next section.
+You can still use multiple CPU cores with single node mode, as long as the cores are on the same machine.
+
+Edit *ec.spec*, and give *ca_path* parameter, which is the directory (full-path) in which you put wgs-assembler binaries.
+
+.. Then,
+.. ::
+
+.. fs2ctg_v4.pl ec.spec asm.spec -n
+
+.. You can confirm what will happen by using fs2ctg_v4.pl with '-n' option.
+
+Then, type the following commands
+::
+
+ ezez_vx1.pl ec.spec pbasm.spec > log 2>&1 &
+
+This will do sequencing-error correction, and contigs will be created.
+
+If you only need error-corrected reads and do not want Sprai (Celera assembler) to assemble them, do as follows
+
+.. ::
+
+.. fs2ctg_v4.pl ec.spec asm.spec -n -ec_only
+
+::
+
+ ezez_vx1.pl ec.spec /dev/null -ec_only > log 2>&1 &
+
+Note that parameter files for Sprai and Celera assembler are independent; you can run Celera with multiple nodes (machines) even with Sprai single node mode.
+
+Multi-node mode 1 (qsub mode)
+------------------------------
+
+There are two types of execution modes with Sprai. The first one is qsub mode; a single master process throws child jobs by qsub.
+This mode runs faster and more reliablely than the second mode. However, there is a drawbacks.
+The biggest problem might be that there is no way of restarting the process once a child process fails.
+Anyway, this mode is the most extensively tested, so you should use this mode if your target genome is small enough to be processed with a small number of nodes and thus with little chance of failure.
+Currently, Sprai supports Sun Grid Engine (SGE) or equivalents (e.g., N1GE, UGE).
+To correct sequencing errors of PacBio Continuous Long Reads and also would like to assemble them, specify *blast_path* and *sprai_path* in ec.spec, and do as follows
+::
+
+ ezez4qsub_vx1.pl ec.spec pbasm.spec > log 2>&1 &
+
+.. \or
+.. ::
+
+.. ezez4makefile.pl ec.spec asm.spec > ezez4makefile.log 2>&1 && make &
+
+If you only use error-corrected reads and do not want Sprai (Celera assembler) to assemble them, do as follows
+::
+
+ ezez4qsub_vx1.pl ec.spec /dev/null -ec_only > log 2>&1 &
+
+.. \or
+.. ::
+
+.. ezez4makefile.pl ec.spec asm.spec > ezez4makefile.log 2>&1 && make ec_only &
+
+Multi-node mode 2 (TGEW mode)
+------------------------------
+
+The second mode works with `TGEW <https://github.com/mkasa/TGEW>`_, which is a wrapper script of qsub.
+tge_make in the TGEW package interprets Makefile and submits jobs by qsub.
+::
+
+ ezez4makefile_v3.pl ec.spec pbasm.spec > log 2>&1
+ tge_make -bg > tge_make.log 2>&1 &
+
+ezez4makefile_v3.pl creates Makefile, and tge_make processes it.
+In the case of failure, you can just reexecute tge_make to restart. As make utility, tge_make compares the timestamps of files to see if any updates are needed.
+You can type the following command to see what would be reexecuted::
+
+ tge_make -n
+
+Since this mode submits a significant number of jobs at once to SGE, you may have to limit the number of partitions for not to exceed the job number limit.
+You might add a make target to limit the number of jobs being submitted simultaneously to SGE.
+For example, if you want only error-correction, you can specify ec_only target::
+
+ tge_make -bg ec_only
+
+tge_make actually calls GNU make to analyse dependencies between files, so you can give any valid target for GNU make.
+
+Before Sprai ver 0.9.6.1.4, Multi-node mode 2 considers only 'pre_partition'.
+Since Sprai ver 0.9.6.1.4, the number of jobs submitted to SGE became 'partition' * 'pre_partition' in Multi-node mode 2.
+
+Postprocessing
+===============
+Once you get contigs from an external de novo assembler (here we assume Celera assembler),
+you might want to polish them up because you still have a number of ways to improve the assembly.
+
+Re-consensuscalling
+--------------------
+Although Sprai can remove most sequencing errors, there often remain some sequencing errors in
+a systematic way.
+For example, two copies of a repetitive element with 0.1% of single nucleotide variants might
+have been collapsed into the same sequence during the error correction process. Even in such a case,
+you are often able to reconstruct the exact copies of the two repetitive elements by exploiting
+long reads that span the entire repetitive elements.
+To this end, we can use Quiver, which is a basecalling program developed by PacificBiosciences.
+Since you must have installed the prerequisites for Sprai, you must have Quiver on your system.
+
+You can manually prepare files necessary for running Quiver, though here we introduce an easy way
+with pbalign script, which is again developed by PacBio. Here are links to the software.
+
+* `pbalign <https://github.com/PacificBiosciences/pbalign>`_
+* `blasr <https://github.com/PacificBiosciences/blasr>`_ (pbalign uses new options of BLASR)
+
+You can see the documents on GitHub for installation.
+You might want to see `the installation log by a user <http://cell-innovation.nig.ac.jp/wiki/tiki-index.php?page=Sprai>`_ as well (in Japanese, but the command lines will help you even you cannot read the language).
+
+Anyway, we assume that both ``pbalign.py`` and blasr work well now.
+Then you can type as follows to align raw reads against the assembly::
+
+ pbalign.py --nproc 8 --forQuiver all.fofn result_here_comes_date_and_time/CA/9-terminator/asm.scf.fasta all_mapped_against_celera.cmp.h5
+
+The option ``--nproc`` specifies the number of parallelisms so you can change the number according to the number of CPUs you have.
+The next argument, all.fofn, is a file in which input bax.h5 files (raw data produced by the PacBio primary pipeline) are described line by line.
+The third argument is a FASTA file that contains the assembly, and the last one is an output file of ``pbalign.py``.
+The output file is a kind of a "PacBio-variant of BAM file". It basically stores the alignment information as BAM files do.
+The difference between cmp.h5 files and BAM files is that it stores PacBio-specific raw data, which is useful for more accurate consensus calling.
+After creating all_mapped_against_celera.cmp.h5, run Quiver::
+
+ quiver -j 8 all_mapped_against_celera.cmp.h5 -r result_here_comes_date_and_time/CA/9-terminator/asm.scf.fasta -o consensus.fa -o consensus.fq -o variants.gff
+
+The first option specifies the number of parallelisms, so you might want to change it.
+The next argument specifies the path of the reference genome against which the raw reads are aligned.
+A bunch of ``-o`` options are output file names; note that the file format is determined by the suffix of the output files. ``consensus.fa`` will be in FASTA format, while "consensus.fq" will be in FASTQ format.
+The GFF file contains only difference with the reference assembly.
+You usually need a FASTA (or FASTQ) file for the final result, so ``-o consensus.fa`` might be sufficient.
+
+Circularization
+----------------
+If your target genome does not contain circular chromosomes, skip this subsection.
+Bacterial genomes often have circular chromosomes, but most de novo assemblers do not consider circular chromosomes.
+Since assembling shotgun sequences into circular chromosomes was just a dream a decade ago, no one felt that
+circular chromosomes must have been considered. Assemblers might break somewhere in a chromosome and output a linear contig.
+Now that PacBio long reads of 80x can usually be assembled into circular chromosomes, we must take care of them.
+The best thing we can do is obviously to develop a new de novo assembler that considers circular chromosomes seriously,
+but it takes time. So here we introduce an ad-hoc solution until we see the best solution in public.
+Sprai package contains a Perl script, ``check_circularity.pl``, which checks if a contig is likely to be circular.
+Here is how to run it::
+
+ check_circularity.pl consensus.fa tmpdir
+
+The first argument is usually an output from Quiver (or an output from a de novo assembler if you do not use Quiver).
+``check_circularity.pl`` takes every contig in the input file and checks if the head and the tail of the contig overlap each other using BLAST.
+If that is the case, check_circularity.pl cuts the redundant part of the contig, and output the result to tmpdir.
+Output files with ``.cut`` in their name are (quite likely) circular chromosomes.
+
+Removing redundant contigs
+---------------------------
+The combination of Sprai + Celera assembler often yields a lot of small, fragmented contigs which are likely to be redundant.
+Those contigs come off from the "real" long contigs for some reason, so they are often artificial and, therefore, redundant.
+``check_redundancy.pl``, in Sprai package, is a tool to find such redundant contigs.
+It uses BLAST for finding potentially redundant contigs, and outputs the result to the standard output.
+There might be seemingly redundant but real contigs, so the elimination of seemingly redundant contigs is not done automatically.
+You might find `fatt <https://github.com/mkasa/>`_ useful for contig removal::
+
+ fatt extract --reverse --seq contig_to_remove1 --seq contig_to_remove2 ... consensus.fa > consensus_with_some_removed.fa
+
+If the number of contigs is too many to put them in a command line, then just put them in a file::
+
+ fatt extract --reverse --file contig_names.txt consensus.fa > consensus_with_some_removed.fa
+
+Note that small and circular contigs might be plasmids that share the sequence with the genome.
+
+Renaming contigs
+-----------------
+After all the postprocessing, you might want to rename circularized contigs to human-friendly names.
+Create an edit script like this::
+
+ loadseq consensus_with_some_removed.fa
+ rename "ctg000000000132" "ChromosomeI"
+ rename "ctg000000000076" "ChromosomeII"
+ saveseq final.fa
+
+and run fatt (explained in the previous section) as follows::
+
+ fatt edit editscript.txt
+
+Yikes! You finally get final.fa in which you see the two chromosomes of production quality.
+
+
+
diff --git a/doc/_build/html/_sources/index.txt b/doc/_build/html/_sources/index.txt
new file mode 100644
index 0000000..0f79d24
--- /dev/null
+++ b/doc/_build/html/_sources/index.txt
@@ -0,0 +1,61 @@
+.. foo documentation master file, created by
+ sphinx-quickstart on Mon Mar 18 15:10:12 2013.
+ You can adapt this file completely to your liking, but it should at least
+ contain the root `toctree` directive.
+
+sprai = single pass read accuracy improver
+==============================================================
+
+Contents
+============
+
+.. toctree::
+ :maxdepth: 2
+
+ README
+ Example
+ Download
+ FAQ
+ Contact
+
+Changelogs
+=============
+2016.4.12: v0.9.9.12: Sprai is released under MIT license. See LICENSE.txt .
+
+2015.10.20: v0.9.9.11: nss2v_v3.c: variable max read length. (Thanks to Tomoaki Nishiyama for code modifications)
+
+2015.10.16: v0.9.9.10: bfmt72s.c: variable max read length. bfmt72s.c & nss2v.c: refactored (Thanks to Tomoaki Nishiyama for code modifications)
+
+2015.10.14: v0.9.9.9: bfmt72s.c: max read length 65536 -> 131072 (Thanks to Tomoaki Nishiyama for a report)
+
+2015.9.7: v0.9.9.8: blastn -max_target_seqs 100: use less memory (Thanks to Adrian Platts for a report)
+
+2015.7.14: v0.9.9.7: ezez_vx1.pl & ezez4qsub_vx1.pl: zcat -> gzip -d -c (Thanks to Hikoyu Suzuki for a report)
+
+2015.7.1: v0.9.9.6: ezez_vx1.pl: around line 325, a problem that the file dfq2fq_v2.pl was being referred to without a path was fixed. (Thanks to Adrian Platts & Jeff Xing for a report)
+
+2015.5.5: v0.9.9.5: ca_ikki_v5.pl: can be executed without uuidgen (Thanks to Adithi for a report)
+
+2015.4.29: v0.9.9.4: ezez_vx1.pl and ezez4qsub_vx1.pl: can be executed without uuidgen (Thanks to Adithi for a report)
+
+2015.4.28: v0.9.9.3: ezez_vx1.pl: can be executed by dash (Thanks to Peter Szovenyi for a report)
+
+2015.2.17: v0.9.9.2: reduced the load on the SGE master daemon
+
+2014.11.4: v0.9.9.1: adapted to P6/C4 chemistry
+
+2014.8.31: v0.9.9: released
+
+2014.7.26: v0.9.5.1.6 : ezez4qsub_v8.pl doesn't stop Celera Assembler on SGE.
+
+2014.7.7: v0.9.5.1.5 : sprai (ezez_v7.pl and ezez4qsub_v8.pl) adapt to Celera Assembler version 8.1. (Thanks to Hiroaki Sakai for a report)
+
+2013.10.12: v0.9.5.1.3 bug fix: ezez_v7.pl does not stop after error correction and run Celera Assembler. (Thanks to Tomoko F. Shibata for a bug report)
+
+2013.09.09: v0.9.5.1 bug fix: We replaced '\|&' with '\|' (Thanks to Kazutoshi Yoshitake for a bug report).
+
+2013.09.02: v0.9.5 was released.
+
+2013.07.08: Example was added to this document.
+
+2013.06.29: v0.9.1 was released.
diff --git a/dumbbell_filter.pl b/dumbbell_filter.pl
new file mode 100755
index 0000000..6187f93
--- /dev/null
+++ b/dumbbell_filter.pl
@@ -0,0 +1,165 @@
+#!/usr/bin/perl
+use strict;
+use warnings;
+
+my @msgs=(
+#"USAGE: <this> <in.fasta> [<in2.fasta> ...]"
+"USAGE: <this> <in.fq> [<in2.fq> ...]"
+);
+
+my $prev_name_prefix="";
+
+=pod
+my $name = <>;
+chomp $name;
+$name =~ s/^>//;
+{
+ my @tmp = split /\//,$name;
+ $prev_name_prefix = join "/", at tmp[0..$#tmp-1];
+}
+my $bases = "";
+=cut
+
+
+my @names=();
+my @reads=();
+my @opts=();
+my @qvs=();
+
+=pod
+while(1){
+ while(my $buf=<>){
+ chomp $buf;
+ if($buf =~ /^>/){
+ push @names,$name;
+ push @reads,$bases;
+
+ $name = $buf;
+ $name =~ s/^>//;
+ {
+ my @tmp = split /\//,$name;
+ my $p = join "/", at tmp[0..$#tmp-1];
+ if($prev_name_prefix ne $p){
+ &flush(\@names,\@reads);
+ @names=();
+ @reads=();
+ $prev_name_prefix = $p;
+ }
+ }
+
+ $bases= "";
+ last;
+ }
+ else{
+ $bases .= $buf;
+ }
+ }
+ if(eof){
+ last;
+ }
+}
+push @names,$name;
+push @reads,$bases;
+&flush(\@names,\@reads);
+
+sub flush($$){
+ my $n = shift;
+ my $r = shift;
+ my @names = @$n;
+ my @reads = @$r;
+ my $longest = length($reads[0]);
+ my $i_longest=0;
+ for(my $i=1; $i<@reads; ++$i){
+ if(length($reads[$i]) > $longest){
+ $longest = length($reads[$i]);
+ $i_longest = $i;
+ }
+ }
+ printf(">%s\n",$names[$i_longest]);
+ printf("%s\n",$reads[$i_longest]);
+}
+=cut
+
+my $id_head_character = "\@";
+
+my $name;
+my $bases;
+my $opt;
+my $qv;
+
+my $line = <>;
+while(!eof){
+ chomp $line;
+ $line =~ s/^\@//;
+ $name = $line;
+
+ $bases="";
+ $line =<>;
+ while($line !~ /^\+/){
+ chomp $line;
+ $bases .= $line;
+ $line = <>;
+ }
+ chomp $line;
+ $opt = $line;
+
+ $qv="";
+ $line =<>;# qv
+ while($line !~ /^$id_head_character/ || length($qv) < length($bases)){
+ chomp $line;
+ $qv .= $line;
+ if(eof){
+ last;
+ }
+ $line = <>;
+ }
+ {
+ # name must be in @xxxx/yyyy/z1_z2 format
+ my @tmp = split /\//,$name;
+ my $p = join "/", at tmp[0..$#tmp-1];
+ if(!$prev_name_prefix){
+ $prev_name_prefix = $p;
+ }
+ if((@tmp != 3 && @names>0) || $prev_name_prefix ne $p){
+ &flush(\@names,\@reads,\@opts,\@qvs);
+ @names=();
+ @reads=();
+ @opts=();
+ @qvs=();
+ $prev_name_prefix = $p;
+ }
+ push @names,$name;
+ push @reads,$bases;
+ push @opts,$opt;
+ push @qvs,$qv;
+ }
+}
+&flush(\@names,\@reads,\@opts,\@qvs);
+
+sub flush($$$$){
+ my $n = shift;
+ my $r = shift;
+ my $o = shift;
+ my $q = shift;
+
+ my @names = @$n;
+ my @reads = @$r;
+ my @opts = @$o;
+ my @qvs = @$q;
+
+ my $longest = length($reads[0]);
+ my $i_adequate=0;
+ # discard last one (because it may have low quality)
+ # if # of subreads > 2, then use 1st or 2nd read.
+ # else use 1st read
+ if(@reads>2){
+ if(length($reads[1]) > $longest){
+ $longest = length($reads[1]);
+ $i_adequate = 1;
+ }
+ }
+ printf("\@%s\n",$names[$i_adequate]);
+ printf("%s\n",$reads[$i_adequate]);
+ printf("%s\n",$opts[$i_adequate]);
+ printf("%s\n",$qvs[$i_adequate]);
+}
diff --git a/ec.spec b/ec.spec
new file mode 100644
index 0000000..104bb7e
--- /dev/null
+++ b/ec.spec
@@ -0,0 +1,52 @@
+#### common ####
+# input_for_database: filtered subreads in fasta or fastq format
+input_for_database all.fq
+
+# min_len_for_query: the subreads longer than or equal to this value will be corrected
+min_len_for_query 500
+
+#if you don't know the estimated genome size, give a large number
+estimated_genome_size 50000
+#if you don't know the estimated depth of coverage, give 0
+estimated_depth 100
+
+# ca_path: where Celera Assembler exist in
+ca_path /home/imai/wgs/Linux-amd64/bin/
+
+# the number of processes used by all vs. all alignment
+# = 'partition' (in single node mode)
+# = 'pre_partition' * 'partition' (in many node mode)
+pre_partition 2
+partition 12
+
+# sprai prefer full paths
+# if you use ezez4qsub*.pl. you MUST specify blast_path & sprai_path
+# blast_path: where blastn and makeblastdb exist in
+blast_path /home/imai/bin/
+# sprai_path: where binaries of sprai (bfmt72s, nss2v_v3 and so on) exist in
+sprai_path /home/imai/sprai/bin/
+
+#### many node mode (advanced) ####
+
+#sge: options for all the SGE jobs
+#sge -soft -l ljob,lmem,sjob
+#queue_req: additional options for all the SGE jobs
+#queue_req -l s_vmem=4G -l mem_req=4
+#longestXx_queue_req: if valid, displaces queue_req
+#longestXx_queue_req -l s_vmem=64G -l mem_req=64
+#BLAST_RREQ: additional options for SGE jobs of all vs. all alignment
+#BLAST_RREQ -pe def_slot 4
+
+#### common (advanced) ####
+
+# used by blastn
+word_size 18
+evalue 1e-50
+num_threads 1
+max_target_seqs 100
+
+#valid_voters 11
+
+#trim: both ends of each alignment by blastn will be trimmed 'trim' bases to detect chimeric reads
+trim 42
+
diff --git a/extract_fq.pl b/extract_fq.pl
new file mode 100755
index 0000000..5a4d257
--- /dev/null
+++ b/extract_fq.pl
@@ -0,0 +1,141 @@
+#!/usr/bin/perl
+use strict;
+use warnings;
+use Getopt::Long;
+
+my $opt_makelist=0;
+my $id_head_character="\@";
+my $opt_include="";
+
+GetOptions('makelist'=>\$opt_makelist,'include=s'=>\$opt_include);
+
+my $err_msg = "USAGE:\t<this> <all.fq> <exclude.list> > <the.rest.fq>\n\t\thow to make 'exclude.list':\n\t\t\t<this> -makelist <exclude.fq> > <exclude.list>\n\t<this> <all=db.sorted_by_id.fq> -include <target_ids.sorted_by_id.list> > <target.fq>\n\t\thow to make 'target_ids.sorted_by_id.list':\n\t\t\tdfq2fq.pl -list <in.idfq> | sort -n > <target_ids.sorted_by_id.list>";
+
+if($opt_makelist){
+ if(@ARGV != 1){
+ die "$err_msg\n";
+ }
+ my $alldb = $ARGV[0];
+ open my $dbh,"<".$alldb or die "cannot open $alldb: $!\n";
+
+ my $line = <$dbh>;
+ while(!eof){
+ chomp $line;
+ $line =~ s/^$id_head_character//;
+ my $name = $line;
+
+ my $bases="";
+ $line =<$dbh>;
+ while($line !~ /^\+/){
+ chomp $line;
+ $bases .= $line;
+ $line = <$dbh>;
+ }
+ chomp $line;
+ my $depths = $line;
+
+ my $qvs="";
+ $line =<$dbh>;# qvs
+ while($line !~ /^$id_head_character/ || length($qvs) < length($bases)){
+ chomp $line;
+ $qvs.=$line;
+ if(eof){
+ last;
+ }
+ $line = <$dbh>;
+ }
+ print "$name\n";
+ }
+ close $dbh;
+ exit 0;
+}
+else{
+ if(@ARGV != 1){
+ die "$err_msg\n";
+ }
+}
+
+my $alldb = $ARGV[0];
+my $list = $opt_include;
+
+
+if(!$opt_include){
+ my %allfq=();
+ my %list=();# names of fastq records you would like to exclude from all.fq
+
+ open my $lh,"<".$list or die "cannot open $list: $!\n";
+ while(my $el=<$lh>){
+ chomp $el;
+ $list{$el} = 1;
+ }
+ close $lh;
+
+ open my $dbh,"<".$alldb or die "cannot open $alldb: $!\n";
+
+ my $line = <$dbh>;
+ while(!eof){
+ chomp $line;
+ $line =~ s/^$id_head_character//;
+ my $name = $line;
+
+ my $bases="";
+ $line =<$dbh>;
+ while($line !~ /^\+/){
+ chomp $line;
+ $bases .= $line;
+ $line = <$dbh>;
+ }
+ chomp $line;
+ my $depths = $line;
+
+ my $qvs="";
+ $line =<$dbh>;# qvs
+ while($line !~ /^$id_head_character/ || length($qvs) < length($bases)){
+ chomp $line;
+ $qvs.=$line;
+ if(eof){
+ last;
+ }
+ $line = <$dbh>;
+ }
+ if(!defined($list{$name})){
+ print "\@$name\n$bases\n$depths\n$qvs\n";
+ }
+ }
+
+ close $dbh;
+}
+else{# for include.list
+ open my $dbfh, "<".$alldb or die "cannot open $alldb : $!\n";
+ open my $listfh, "<".$list or die "cannot open $list : $!\n";
+ while(my $target=<$listfh>){
+ chomp $target;
+ my $printed=0;
+ while(my $name=<$dbfh>){
+ chomp $name;
+ $name =~ s/^\@//;
+ my $bases = <$dbfh>;
+ chomp $bases;
+ my $options = <$dbfh>;
+ chomp $options;
+ my $qvs = <$dbfh>;
+ chomp $qvs;
+ if($name eq $target){
+ printf("\@%s\n",$target);
+ printf("%s\n",$bases);
+ printf("%s\n",$options);
+ printf("%s\n",$qvs);
+ $printed=1;
+ last;
+ }
+ else{
+ next;
+ }
+ }
+ if(!$printed){
+ die "ERROR: NOT hit. \@$target\n";
+ }
+ }
+ close $dbfh;
+ close $listfh;
+}
diff --git a/ezez4qsub_vx1.pl b/ezez4qsub_vx1.pl
new file mode 100755
index 0000000..233994d
--- /dev/null
+++ b/ezez4qsub_vx1.pl
@@ -0,0 +1,1496 @@
+#!/usr/bin/perl
+use strict;
+use warnings;
+use Getopt::Long;
+use Scalar::Util;
+
+my $DEBUG=0;
+my $DEVEL=0;
+
+my $CURRENT=12;
+my $valid_depth=4;
+my $valid_read_length=500;
+my $scriptdir = "sgescript";
+my $data_dir_prefix="data";
+my $preprefix="c";
+my $ppp="p";
+
+my $confident_depth = $valid_depth;
+my $confident_length_coefficient = 0.75;
+
+my $opt_dryrun;
+my $opt_ec_only;
+my $opt_hgc;
+
+my $now = `date +%Y%m%d_%H%M%S`;
+chomp $now;
+
+my %original_time_stamps = ();
+my @modified_file_names = ();
+
+GetOptions(
+ "n" => \$opt_dryrun,
+ "devel" => \$DEVEL,
+ "debug"=>\$DEBUG,
+ "now=s"=>\$now,
+ "ec_only"=>\$opt_ec_only,
+ "hgc"=>\$opt_hgc
+);
+
+if($DEBUG){
+ die "debug mode is not supported yet, sorry.\nDon't use -debug option.\n";
+}
+
+my %params;
+
+my @emsgs=(
+ 'USAGE: <this> <ec.spec> <asm.spec>',
+ '[-debug: outputs intermediate files (not implemented)]',
+ '[-n: outputs qsub scripts and does NOT qsub]',
+ '[-now yyyymmdd_hhmmss: use a XXX_yyyymmdd_hhmmss directories, detect unfinished jobs and restart at the appropriate stage.]',
+ '[-ec_only: not assemble]',
+);
+
+if(@ARGV != 2){
+ my $msg = join "\n\t", at emsgs;
+ die "$msg\n";
+}
+
+my $pwd = `pwd`;
+chomp $pwd;
+
+my $asm_spec = $ARGV[1];
+$asm_spec =~ s/^\s+//;
+if(!-e $asm_spec){
+ die "$asm_spec does not exist.\n";
+}
+
+if($asm_spec =~ /^\//){
+ # real path; do nothing
+}
+else{
+ $asm_spec = "$pwd/$asm_spec";
+}
+
+{
+ my $ec_spec = $ARGV[0];
+ open my $fh,"<",$ec_spec or die "cannot open $ec_spec :$!\n";
+ while($_ = <$fh>){
+ next if($_ =~ /^\s+$/);
+ next if($_ =~ /^\s*#/);
+ chomp;
+ my @line = split /\s+/,$_;
+ # if(@line < 2){
+ # die "strange line in $ec_spec\n$_\n";
+ # }
+ for(my $i=0; $i<@line; ++$i){
+ if($line[$i] =~ /^\s*#/){
+ @line = @line[0..$i-1];
+ last;
+ }
+ }
+ $params{$line[0]}=join(" ", at line[1.. at line-1]);
+ if($params{$line[0]}){
+ # printf("%s %s#\n",$line[0],$params{$line[0]});
+ }
+ }
+ close $fh;
+}
+
+my $input_for_database;
+my $from=0;
+my $to=1;
+my $pre_partition=1;
+my $partition="";
+my $word_size=11;
+my $evalue=1e-50;
+#my $second_evalue="";
+my $valid_voters="";
+my $trim="";
+my $estimated_genome_size="";
+my $ca_path="";
+my $blast_path="";
+my $sprai_path="";
+my $queue_req="";
+my $longestXx_queue_req="";
+my $blast_rreq="";
+my $blasr_path="";
+my $blasr_opt="";
+my $num_threads=1;
+my $max_target_seqs=100;
+my $min_len_for_query=1;
+my $max_len_for_query=1000000000000000;
+my $sge="";
+my $estimated_depth=0;
+
+if(defined($params{input_for_database})){
+ $input_for_database = $params{input_for_database};
+ if(!-e $input_for_database){
+ die "$input_for_database does not exist.\n";
+ }
+}
+else{
+ die "specify input_for_database in ec.spec\n";
+}
+
+if(defined($params{estimated_genome_size})){
+ $estimated_genome_size = $params{estimated_genome_size};
+}
+else{
+ die "specify estimated_genome_size in ec.spec\n";
+}
+if($estimated_genome_size<=0){
+ die "estimated_genome_size must be > 0\n";
+}
+if(defined($params{estimated_depth})){
+ $estimated_depth = $params{estimated_depth};
+}
+else{
+ die "specify estimated_depth in ec.spec\n";
+}
+
+if(defined($params{from})){
+ $from = $params{from};
+}
+if(defined($params{to})){
+ $to = $params{to};
+}
+if(defined($params{pre_partition})){
+ $pre_partition = $params{pre_partition};
+}
+if(defined($params{partition})){
+ $partition = $params{partition};
+}
+if(defined($params{word_size})){
+ $word_size = $params{word_size};
+}
+if(defined($params{evalue})){
+ $evalue = $params{evalue};
+}
+if(defined($params{num_threads})){
+ $num_threads = $params{num_threads};
+}
+#if(defined($params{second_evalue})){
+# $second_evalue = $params{second_evalue};
+#}
+if(defined($params{valid_voters})){
+ $valid_voters = $params{valid_voters};
+}
+else{
+ $valid_voters = int(0.8*($estimated_depth+0.0));
+ if($valid_voters > 30){
+ $valid_voters = 30;
+ }
+ #my $n_base = -s $input_for_database;
+ #$n_base /= 2;
+ #$valid_voters = int(0.8*$n_base/$estimated_genome_size);
+ $valid_voters = ($valid_voters < 11) ? 11 : $valid_voters;
+}
+if(defined($params{trim})){
+ $trim = $params{trim};
+}
+if(defined($params{ca_path})){
+ $ca_path = $params{ca_path};
+}
+if(defined($params{blast_path})){
+ $blast_path = $params{blast_path};
+}
+if(defined($params{sprai_path})){
+ $sprai_path = $params{sprai_path};
+}
+if(defined($params{queue_req})){
+ $queue_req = $params{queue_req};
+}
+if(defined($params{longestXx_queue_req})){
+ $longestXx_queue_req = $params{longestXx_queue_req};
+}
+if(defined($params{BLAST_RREQ})){
+ $blast_rreq = $params{BLAST_RREQ};
+}
+if(defined($params{blasr_path})){
+ $blasr_path = $params{blasr_path};
+}
+if(defined($params{blasr_opt})){
+ $blasr_opt = $params{blasr_opt};
+}
+if(defined($params{min_len_for_query})){
+ $min_len_for_query = $params{min_len_for_query};
+}
+if(defined($params{max_len_for_query})){
+ $max_len_for_query = $params{max_len_for_query};
+}
+if(defined($params{sge})){
+ $sge = $params{sge};
+}
+
+if(defined($params{max_target_seqs})){
+ $max_target_seqs = $params{max_target_seqs};
+}
+
+printf STDERR ("#>- params -<#\n");
+printf STDERR ("input_for_database %s\n",$input_for_database);
+printf STDERR ("estimated_genome_size %g\n",$estimated_genome_size);
+printf STDERR ("estimated_depth %d\n",$estimated_depth);
+#printf STDERR ("from %s\n",$from);
+#printf STDERR ("to %s\n",$to);
+printf STDERR ("pre_partition %s\n",$pre_partition);
+printf STDERR ("partition %s\n",$partition);
+if($word_size){
+ printf STDERR ("word_size %d\n",$word_size);
+}
+printf STDERR ("evalue %g\n",$evalue);
+printf STDERR ("num_threads %d\n",$num_threads);
+printf STDERR ("valid_voters %s\n",$valid_voters);
+printf STDERR ("trim %d\n",$trim);
+printf STDERR ("ca_path %s\n",$ca_path);
+printf STDERR ("blast_path %s\n",$blast_path);
+printf STDERR ("sprai_path %s\n",$sprai_path);
+if($queue_req){
+ printf STDERR ("queue_req %s\n",$queue_req);
+}
+if($longestXx_queue_req){
+ printf STDERR ("longestXx_queue_req %s\n",$longestXx_queue_req);
+}
+if($blast_rreq){
+ printf STDERR ("blast_rreq %s\n",$blast_rreq);
+}
+if($DEVEL){
+ printf STDERR ("development_mode %s\n","true");
+}
+if($DEBUG){
+ printf STDERR ("debug_mode %s\n","true");
+}
+if($blasr_path){
+ printf STDERR ("blasr_path %s\n",$blasr_path);
+ if($blasr_opt){
+ printf STDERR ("blasr_opt %s\n",$blasr_opt);
+ }
+}
+if($min_len_for_query){
+ printf STDERR ("min_len_for_query %d\n",$min_len_for_query);
+}
+if($max_len_for_query){
+ printf STDERR ("max_len_for_query %d\n",$max_len_for_query);
+}
+if($sge){
+ printf STDERR ("sge %s\n",$sge);
+}
+if($max_target_seqs){
+ printf STDERR ("max_target_seqs %d\n",$max_target_seqs);
+}
+printf STDERR ("#>- params -<#\n");
+
+if(length($blast_path) > 0 && $blast_path !~ /\/$/){
+ $blast_path .= "/";
+}
+if(length($sprai_path) > 0 && $sprai_path !~ /\/$/){
+ $sprai_path .= "/";
+}
+
+my $outfmt = "7 qseqid sstart send sacc qstart qend bitscore evalue pident qseq sseq";
+#my $outfmt = "7 qseqid qlen qstart qend sacc slen sstart send bitscore evalue pident qseq sseq";
+
+my $command="";
+
+my $date="";
+my $message="";
+
+my $bindir=$sprai_path;
+my $path2blast=$blast_path;
+
+if($DEVEL){
+ $bindir = `pwd`;
+ chomp $bindir;
+}
+
+if(!-d $bindir){
+ die "$bindir does not exist\n"
+}
+if(!-d $path2blast && !-d $blasr_path){
+ die "specify aligner dir. $path2blast or $blasr_path does not exist\n"
+}
+if(!-e "$bindir/bfmt72s"){
+ die "$bindir/bfmt72s does not exist in $bindir\ninstall sprai programs in $bindir\n"
+}
+if(!-e "$path2blast/blastn"){
+ die "$path2blast/blastn does not exist in $path2blast\n"
+}
+
+$scriptdir = "$pwd/${scriptdir}_$now";
+if(!-d $scriptdir){
+ `mkdir $scriptdir`;
+}
+
+my @pre_array_jobs=();
+my @array_jobs=();
+my @post_array_jobs=();
+
+my @do_qsub_preaj=();
+my @do_qsub_aj=();
+my @do_qsub_postaj=();
+
+my $script;
+my $outputfile;
+my $errfile;
+
+my @list=(0..$partition-1);
+
+my $datadir="$pwd/${data_dir_prefix}_$now";
+if(!-d $datadir){
+ mkdir $datadir or die "cannot mkdir $datadir: $!\n";
+}
+my $logdir = sprintf("$pwd/log_$now");
+if(!-d $logdir){
+ mkdir $logdir or die "cannot mkdir $logdir: $!\n";
+}
+
+my $orig_idfq = sprintf("$datadir/$preprefix%02d.idfq.gz",0);
+
+my $db_idfq_gz = sprintf("$datadir/$preprefix%02d.db.idfq.gz",0);
+my $qu_idfq_gz = sprintf("$datadir/$preprefix%02d.qu.idfq.gz",0);
+
+if($from == 0)
+{
+ # fq -> idfq (& id2n)
+ my $PG = "$bindir/fq2idfq.pl";
+ my $PG2 = "$bindir/fqfilt.pl";
+ my $dumbbell_filter = "$bindir/dumbbell_filter.pl";
+
+ if($input_for_database !~ /^\//){
+ $input_for_database = sprintf("$pwd/$input_for_database");
+ }
+
+ my $command;
+ my $do_qsub=0;
+
+ my $f_do=1;
+ my @parents;
+ my $parent = $input_for_database;
+ push @parents,$parent;
+ my $child=$db_idfq_gz;
+ $f_do = &do_or_not(\@parents,\$child);
+
+ my $PG0 = "$bindir/fa2fq.pl";
+ $command = sprintf("cat $parent | $PG0 - | $dumbbell_filter - | $PG - -flag --prefix $datadir/$preprefix%02d | gzip -c -1 > $child.tmp && mv $child.tmp $child",0);
+ if($f_do){
+ $do_qsub=1;
+ }
+ else{
+ $command = sprintf("#%s",$command);
+ }
+
+ $parent = $db_idfq_gz;
+ @parents = ();
+ push @parents, $parent;
+ $child = $qu_idfq_gz;
+ $f_do = &do_or_not(\@parents,\$child);
+
+ my $com2;
+ if($min_len_for_query > 1){
+ $com2 = sprintf("gzip -d -c $parent | $PG2 - $min_len_for_query -max_len $max_len_for_query | gzip -c -1 > $child.tmp && mv $child.tmp $child",0);
+ }
+ else{
+ $com2 = sprintf("ln -s $parent $child");
+ }
+ if($f_do){
+ $do_qsub=1;
+ }
+ else{
+ $com2 = sprintf("#%s",$com2);
+ }
+
+# $command .= " & wait";
+
+ $script = sprintf("$scriptdir/fq2idfq.sh");
+ open my $fh, ">", $script or die $!;
+ printf $fh ("#!/bin/bash\n");
+ printf $fh ("#\$ -S /bin/bash\n");
+ printf $fh ("#\$ -cwd\n");
+ printf $fh ("#\$ -V\n");
+ printf $fh ("#\$ -N $preprefix%02d_fq2idfq_$now\n",0);
+ printf $fh ("#\$ -o $logdir\n");
+ printf $fh ("#\$ -e $logdir\n");
+ printf $fh ("time ($command)\n");
+ printf $fh ("time ($com2)\n");
+ close $fh;
+ push(@pre_array_jobs, $script);
+ push(@do_qsub_preaj, $do_qsub);
+}
+
+if($pre_partition and $pre_partition < 1){
+ printf STDERR ("WARNING: given pre_partition %d was changed to 1\n",$pre_partition);
+ $pre_partition = 1;
+}
+
+my @makeblastdb_holdjids=();
+my @prepartition_holdjids=();
+
+for(my $index=$from; $index<$to; ++$index){
+ if($index>0){
+ @pre_array_jobs=();
+ @array_jobs=();
+ @post_array_jobs=();
+ @do_qsub_preaj=();
+ @do_qsub_aj=();
+ @do_qsub_postaj=();
+ }
+
+ my $prepadir = sprintf("$datadir/$preprefix%02d",$index);
+ if(!-d $prepadir){
+ mkdir $prepadir or die "cannot mkdir $prepadir: $!\n";
+ }
+ my $script_sub_dir = sprintf("$scriptdir/$preprefix%02d",$index);
+ if(!-d $script_sub_dir){
+ mkdir $script_sub_dir or die "cannot mkdir $script_sub_dir: $!\n";
+ }
+ # XXX
+ my $errdir = sprintf("$logdir/$preprefix%02d/err/",$index);
+ my $doutdir = sprintf("$logdir/$preprefix%02d/dout/",$index);
+ {
+ my $tmp = sprintf("$logdir/$preprefix%02d/",$index);
+ if(!-d $tmp){
+ mkdir $tmp or die "cannot mkdir $tmp: $!\n";
+ }
+ }
+ if(!-d $errdir){
+ mkdir $errdir or die "cannot mkdir $errdir: $!\n";
+ }
+ if(!-d $doutdir){
+ mkdir $doutdir or die "cannot mkdir $doutdir: $!\n";
+ }
+
+ my @ec_holdjids=();
+
+ # makeblastdb
+ {
+ my $PG;
+ my $PG2;
+ my $do_qsub=0;
+ if($blasr_path){
+ $PG2= "$bindir/dfq2fq_v2.pl";
+ $command = sprintf("gzip -d -c $db_idfq_gz | $PG2 -f - > $datadir/$preprefix%02d.fasta ",$index);
+ $command .= sprintf(" && $blasr_path/sawriter $datadir/$preprefix%02d.fasta ",$index);
+ }
+ else{
+ my $f_do=1;
+ my $parent = $db_idfq_gz;
+ my @parents;
+ push @parents,$parent;
+ my $child = sprintf("$datadir/makeblastdb_%02d.done",$index);
+ $f_do = &do_or_not(\@parents,\$child);
+
+ $PG = "$path2blast/makeblastdb";
+ $PG2= "$bindir/dfq2fq_v2.pl";
+ $command = sprintf("gzip -d -c $parent | $PG2 -f - | $PG -in - -dbtype nucl -out $datadir/$preprefix%02d -title $preprefix%02d 1>$child.tmp && mv $child.tmp $child",$index,$index);
+ if($f_do){
+ $do_qsub=1;
+ }
+ else{
+ $command = sprintf("#%s",$command);
+ }
+
+ }
+
+ $script = sprintf("$script_sub_dir/$preprefix%02d_makeblastdb.sh",$index);
+ push(@pre_array_jobs,$script);
+ push(@do_qsub_preaj,$do_qsub);
+ open my $fh, ">", $script or die $!;
+ printf $fh ("#!/bin/bash\n");
+ printf $fh ("#\$ -S /bin/bash\n");
+ printf $fh ("#\$ -cwd\n");
+ printf $fh ("#\$ -V\n");
+ my $jobname = sprintf("$preprefix%02d_makeblastdb_$now",$index);
+ printf $fh ("#\$ -N $jobname\n");
+ push(@ec_holdjids,$jobname);
+ if($queue_req){
+ printf $fh ("#\$ $queue_req\n");
+ }
+ printf $fh ("#\$ -o $logdir\n");
+ printf $fh ("#\$ -e $logdir\n");
+ if($index > $from){
+ my $holdlist=join(',', at makeblastdb_holdjids);
+ printf $fh ("#\$ -hold_jid $holdlist\n");
+ }
+ elsif($index == 0){
+ printf $fh ("#\$ -hold_jid $preprefix%02d_fq2idfq_$now\n",0);
+ }
+ else{
+ printf STDERR ("strange index. %d\n", $index);
+ }
+ printf $fh ("time ($command)\n");
+ close $fh;
+ }
+
+ # prepartitioning
+ {
+ my $PG1= "$bindir/dfq2fq_v2.pl";
+ my $PG2="$bindir/partition_fa.pl -1origin";
+ my $do_qsub=0;
+
+ my $f_do=1;
+ my $parent = $qu_idfq_gz;
+ my @parents;
+ push @parents,$parent;
+ my $child = sprintf("$prepadir/pre_partition_fa_%02d.done",$index);
+ $f_do = &do_or_not(\@parents,\$child);
+ $command = sprintf("gzip -d -c $parent | $PG2 -q - %d -p $prepadir/$preprefix%02d 1>$child", $pre_partition,$index);
+ #$command = sprintf("gzip -d -c $parent | $PG1 -f - | $PG2 - %d -p $prepadir/$preprefix%02d 1>$child", $pre_partition,$index);
+ if($f_do){
+ $do_qsub=1;
+ }
+ else{
+ $command = sprintf("#%s",$command);
+ }
+
+ $script = sprintf("$script_sub_dir/$preprefix%02d_pre_partition_fa.sh",$index);
+ push(@pre_array_jobs,$script);
+ push(@do_qsub_preaj,$do_qsub);
+ open my $fh, ">", $script or die $!;
+ printf $fh ("#!/bin/bash\n");
+ printf $fh ("#\$ -S /bin/bash\n");
+ printf $fh ("#\$ -cwd\n");
+ printf $fh ("#\$ -V\n");
+ printf $fh ("#\$ -N $preprefix%02d_pre_partition_fa_$now\n",$index);
+ if($queue_req){
+ printf $fh ("#\$ $queue_req\n");
+ }
+ printf $fh ("#\$ -o $logdir\n");
+ printf $fh ("#\$ -e $logdir\n");
+ if($index > $from){
+ my $holdlist=join(',', at prepartition_holdjids);
+ printf $fh ("#\$ -hold_jid $holdlist\n");
+ }
+ elsif($index == 0){
+ printf $fh ("#\$ -hold_jid $preprefix%02d_fq2idfq_$now\n",0);
+ }
+ printf $fh ("time ($command)\n");
+ close $fh;
+ }
+
+
+ my @second_cat_pnp_finish_holdjids = ();
+
+ for(my $pp = 1,my $orig_datadir = $datadir; $pp<=$pre_partition; ++$pp,$datadir = $orig_datadir){
+ my $ppdir = sprintf("$datadir/$preprefix%02d/$pp/",$index);
+ if(!-d $ppdir){
+ mkdir $ppdir or die "cannot mkdir $ppdir: $!\n";
+ }
+ my $maindir= $ppdir;
+ if(!-d $maindir){
+ mkdir $maindir or die "cannot mkdir $maindir: $!\n";
+ }
+
+ # sub partitioning
+ {
+ my $PG="$bindir/partition_fa.pl";
+ my $do_qsub=0;
+
+ my $f_do=1;
+ my $parent = sprintf("$prepadir/$preprefix%02d_%04d.fq",$index,$pp);
+ #my $parent = sprintf("$prepadir/$preprefix%02d_%04d.fa",$index,$pp);
+ my $dummy_parent_1 = sprintf("$prepadir/pre_partition_fa_%02d.done",$index);
+ my @parents;
+ push @parents,$parent;
+ push @parents,$dummy_parent_1;
+ my $child = sprintf("$maindir/partition_fa_%02d.done",$index);
+ $f_do = &do_or_not(\@parents,\$child);
+
+ $command = sprintf("cat $parent | $PG -q - %d -p $maindir/$preprefix%02d 1>$child",scalar(@list),$index);
+ #$command = sprintf("cat $parent | $PG - %d -p $maindir/$preprefix%02d 1>$child",scalar(@list),$index);
+ if($f_do){
+ $do_qsub=1;
+ }
+ else{
+ $command = sprintf("#%s",$command);
+ }
+ $script = sprintf("$script_sub_dir/$preprefix%02d$ppp%02d_partition_fa.sh",$index,$pp);
+ push(@pre_array_jobs,$script);
+ push(@do_qsub_preaj,$do_qsub);
+ open my $fh, ">", $script or die $!;
+ printf $fh ("#!/bin/bash\n");
+ printf $fh ("#\$ -S /bin/bash\n");
+ printf $fh ("#\$ -cwd\n");
+ printf $fh ("#\$ -V\n");
+ #my $jobname = sprintf("$preprefix%02d$ppp%02d_partition_fa_$now",$index,$pp);
+ my $jobname = sprintf("$preprefix%02d_partition_fa_$now",$index);
+ printf $fh ("#\$ -N $jobname\n");
+ #printf $fh ("#\$ -N $preprefix%02d$ppp%02d_partition_fa_$now\n",$index,$pp);
+ if($pp==1){
+ push(@ec_holdjids,$jobname);
+ }
+ if($queue_req){
+ printf $fh ("#\$ $queue_req\n");
+ }
+ printf $fh ("#\$ -o $doutdir\n");
+ printf $fh ("#\$ -e $errdir\n");
+ printf $fh ("#\$ -hold_jid $preprefix%02d_pre_partition_fa_$now\n",$index);
+ printf $fh ("time ($command)\n");
+ close $fh;
+ }
+ }
+# print STDERR "pre_array_jobs printed\n";
+
+ # array jobs
+ my $ppdir = sprintf("$datadir/$preprefix%02d/\${SGE_TASK_ID}/",$index);
+ my $maindir= $ppdir;
+ my $mdprefix = sprintf("$datadir/$preprefix%02d/",$index);
+ my @dqa_offsets=();
+ my $dqa_offset=0;
+ push @dqa_offsets,$dqa_offset;
+ my $task_l=$pre_partition;
+ for(my $i=0; $i<@list; ++$i){
+ for(my $j=0; $j<$task_l; ++$j){
+ push @do_qsub_aj,0;
+ }
+ }
+ {
+ for(my $i=0; $i<@list; ++$i){
+ my $PG="$path2blast/blastn -dbsize 1 -num_threads \${nthreads}";
+ if($word_size){
+ $PG .= " -word_size $word_size";
+ }
+ if($max_target_seqs){
+ $PG .= " -max_target_seqs $max_target_seqs";
+ }
+ #my $PG1="$bindir/overlap_finder";
+ my $PG2;
+ {
+ my $t_trim;
+ #if($index == 0){
+ $t_trim = $trim;
+ #}
+ #else{
+ # $t_trim = 0;
+ #}
+ $PG2=sprintf("$bindir/bfmt72s -c %d -u -i",$t_trim);
+ }
+ #my $PG3="$bindir/nss2v_v3 -v $valid_voters -q";
+ my $PG3="$bindir/nss2v_v3 -v $valid_voters";
+ my $PG4="$bindir/myrealigner -f -B $valid_voters -b 3 -d 0.5";
+ #my $PG3="$bindir/nss2v_v3 -q";
+ #my $PG3="$bindir/nss2v_v3 -q -s";
+ #my $PG3="$bindir/nss2v_v3 -v $valid_voters";
+ #my $distinguishbale = 0.5;
+ #my $PG4="$bindir/realigner_v2 -f -b $valid_depth -d $distinguishbale -q";
+ #my $PG4="$bindir/realigner_v2 -f -b $valid_depth -d $distinguishbale -q -g";
+ #if($index+1 == $to){
+ # $PG4 = "$PG4 -k";# mask option
+ #}
+ #my $PG4="$bindir/realigner_v2 -f -B $valid_voters -b 3 -d 0.5";
+ my $do_qsub=0;
+ my @othercom = ();
+ if($DEBUG){# TODO
+ die "sorry. debug mode is under construction.\n";
+ #$PG4="$bindir/myrealigner -q -B $valid_voters -b 3";
+ #my $PG5="$bindir/myrealigner -f -n -q -B $valid_voters -b 3";
+ #$command = sprintf("cat $maindir/$preprefix%02d_%04d.fa | $PG -db $maindir/$preprefix%02d -query - -evalue $evalue -outfmt '$outfmt' | tee $datadir/$preprefix%02d_%04d.blastn | $PG2 - | tee $datadir/$preprefix%02d_%04d.nss | $PG3 - | tee $datadir/$preprefix%02d_%04d.vertical | $PG4 - | tee $datadir/$preprefix%02d_%04d.realigned | $PG5 - | gzip -c -1 > $maindir/$outputfile", $index,$list[$i],$index,$index,$list[$i],$index,$list[$i],$index,$list[$i],$index,$list[$i]);
+ }
+ else{
+ $outputfile = sprintf("$maindir/$preprefix%02d_%04d.dfq.gz",$index,$list[$i]);
+ if($blasr_path){
+ die "sorry. under construction.\nPlease use blastn, not blasr.\n";
+# my $PG6 = "$bindir/m52bfmt7";
+# $command = sprintf("$blasr_path/blasr $maindir/$preprefix%02d_%04d.fa $datadir/$preprefix%02d.fasta -sa $datadir/$preprefix%02d.fasta.sa -m 5 $blasr_opt -out $maindir/tmp%02d_%04d.m5 ",$index,$list[$i],0,0,$index,$list[$i]);
+# $command .= sprintf(" && cat $maindir/tmp%02d_%04d.m5 | $PG6 - | $PG2 - | $PG3 - | $PG4 - | gzip -c -1 > $outputfile", $index,$list[$i]);
+ }
+ else{
+ my $f_do=1;
+ my $input = sprintf("$maindir/$preprefix%02d_%04d.fq",$index,$list[$i]);
+ #my $input = sprintf("$maindir/$preprefix%02d_%04d.fa",$index,$list[$i]);
+ $command = sprintf("cat $input | $bindir/fq2fa.pl - | $PG -db $datadir/$preprefix%02d -query - -evalue $evalue -outfmt '$outfmt' | $PG2 - | $PG3 - | $PG4 - | gzip -c -1 > $outputfile.tmp && mv $outputfile.tmp $outputfile", $index);
+ #$command = sprintf("cat $input | $bindir/fq2fa.pl - | $PG -db $datadir/$preprefix%02d -query - -evalue $evalue -outfmt '$outfmt' | $PG2 - -f $input | $PG3 - | $PG4 - | gzip -c -1 > $outputfile.tmp && mv $outputfile.tmp $outputfile", $index);
+ push @othercom, "gzip -d -t $outputfile";
+ push @othercom, "while [ \$? -ne 0 ]";
+ push @othercom, "do";
+ push @othercom, " rm $outputfile";
+ push @othercom, " time ($command)";
+ push @othercom, " gzip -d -t $outputfile";
+ push @othercom, "done";
+ #push @othercom, "echo test";
+ for(my $j=1; $j<=$task_l; ++$j){
+ my $mdextended = sprintf("$mdprefix/%d",$j);
+ my $parent = sprintf("$mdextended/$preprefix%02d_%04d.fq",$index,$list[$i]);
+ my $dummy_parent_1 = sprintf("$datadir/makeblastdb_%02d.done",$index);
+ my $dummy_parent_2 = sprintf("$mdextended/partition_fa_%02d.done",$index);
+ my @parents=();
+ push @parents,$parent;
+ push @parents,$dummy_parent_1;
+ push @parents,$dummy_parent_2;
+ my $child = sprintf("$mdextended/$preprefix%02d_%04d.dfq.gz",$index,$list[$i]);
+ $f_do = &do_or_not(\@parents,\$child);
+ my $idx = $task_l*$i+($j-1);
+# printf STDERR ("%d\n",$idx);
+ if($f_do){
+ $do_qsub_aj[$idx] = 1;
+ }
+ else{
+ #$do_qsub_aj[$idx] = 0;
+ #$command = sprintf("#%s",$command);
+ }
+ }
+ }
+ }
+
+ $script = sprintf("$script_sub_dir/$preprefix%02d_ec_%04d.sh",$index,$list[$i]);
+ push(@array_jobs,$script);
+ #push(@do_qsub_aj,$do_qsub);
+ open my $fh, ">", $script or die $!;
+
+ printf $fh ("#!/bin/bash\n");
+ printf $fh ("#\$ -S /bin/bash\n");
+ printf $fh ("#\$ -cwd\n");
+ printf $fh ("#\$ -V\n");
+ #printf $fh ("#\$ -N $preprefix%02d_ec_%04d_$now\n",$index,$list[$i]);
+ printf $fh ("#\$ -N $preprefix%02d_ec_$now\n",$index);
+ if($queue_req){
+ printf $fh ("#\$ $queue_req\n");
+ }
+ if($blast_rreq){
+ printf $fh ("#\$ $blast_rreq\n");
+ }
+ printf $fh ("#\$ -o $doutdir\n");
+ printf $fh ("#\$ -e $errdir\n");
+
+ my $holdlist = join(",", at ec_holdjids);
+ printf $fh ("#\$ -hold_jid $holdlist\n");
+
+ if($opt_hgc){
+ printf $fh ("hostname=`hostname`\n");
+ printf $fh ("nthreads=1\n");
+ printf $fh ("if [[ \${hostname} =~ ^ncl ]]; then\n");
+ printf $fh (" nthreads=1\n");
+ printf $fh ("else\n");
+ printf $fh (" nthreads=%d\n",$num_threads);
+ printf $fh ("fi\n");
+ }
+ else{
+ printf $fh ("nthreads=%d\n",$num_threads);
+ }
+ printf $fh ("time ($command)\n");
+ for(my $i=0; $i<@othercom; ++$i){
+ printf $fh ("%s\n",$othercom[$i]);
+ }
+ close $fh;
+ }
+ }
+ $dqa_offset+=@list*$task_l;
+ push @dqa_offsets,$dqa_offset;
+
+ # finish
+ {
+ my $loop;
+ my $diff4dqa;
+ {
+ my $tmp = (@list+0.0)/($CURRENT+0.0);
+=pod
+ if($tmp == int($tmp)){
+ $loop = int($tmp);
+ }
+ else{
+ $loop = int($tmp+1.0);
+ }
+ $tmp = &round_up($tmp);
+ if($tmp != $loop){
+ die "strange $tmp $loop\n";
+ }
+=cut
+ $loop = &round_up($tmp);
+ }
+ for(my $i=0; $i<$loop; ++$i){
+ for(my $j=0; $j<$task_l; ++$j){
+ push @do_qsub_aj,0;
+ }
+ }
+ $diff4dqa = $loop*$task_l;
+ for(my $k=1; $k<=$task_l; ++$k)
+ {
+ my @tmp;
+ for(my $i=0; $i<@list; ++$i){
+ #$tmp[$i]=sprintf("$preprefix%02d_ec_%04d_$now",$index,$list[$i]);
+ }
+ $tmp[0]=sprintf("$preprefix%02d_ec_$now",$index);
+ my $holdlist=join(',', at tmp);
+
+ my $mdextended = sprintf("$mdprefix/%d",$k);
+ for(my $i=0; $i<@list; $i+=$CURRENT){
+ $outputfile = sprintf("$preprefix%02d_pnp_finish_%04d.idfq.gz",$index,$list[$i]);
+ my $child = sprintf("$mdextended/$outputfile");
+ $outputfile = sprintf("$maindir/$outputfile");
+ my $do_qsub=0;
+
+ my $f_do=1;
+ my $parent;
+ my @parents;
+ for(my $j=0; $j<$CURRENT && $i+$j<@list; ++$j){
+ $parent = sprintf("$mdextended/$preprefix%02d_%04d.dfq.gz",$index,$list[$i+$j]);
+ push @parents,$parent;
+ }
+ $f_do = &do_or_not(\@parents,\$child);
+
+ {
+ my @ta;
+ for(my $j=0; $j<$CURRENT && $i+$j<@list; ++$j){
+ my $p = sprintf("$maindir/$preprefix%02d_%04d.dfq.gz",$index,$list[$i+$j]);
+ push @ta,$p;
+ }
+ my $tmp = join " ", @ta;
+ if($index+1 == $to){
+ $command = sprintf("gzip -d -c $tmp | $bindir/dfq2fq_v2.pl - --finish --valid_depth $valid_depth -valid_read_length $valid_read_length | gzip -c -1 > $outputfile.tmp && mv $outputfile.tmp $outputfile");
+ }
+ else{
+ $command = sprintf("gzip -d -c $tmp | $bindir/dfq2fq_v2.pl - | gzip -c -1 > $outputfile.tmp && mv $outputfile.tmp $outputfile");
+ }
+ }
+
+ my $idx = $dqa_offset+&round_up((@list+0.0)/($CURRENT+0.0))*($k-1)+$i/$CURRENT;
+# printf STDERR ("%d\n",$idx);
+ if($f_do){
+ $do_qsub_aj[$idx] = 1;
+ }
+ else{
+ #$do_qsub_aj[$idx] = 0;
+ #$command = sprintf("#%s",$command);
+ }
+ if($k==1){# write once
+ $script = sprintf("$script_sub_dir/$preprefix%02d_pnp_finish_%04d.sh",$index,$list[$i]);
+ push(@array_jobs,$script);
+# push(@do_qsub_aj,$do_qsub);
+ open my $fh, ">", $script or die $!;
+
+ printf $fh ("#!/bin/bash\n");
+ printf $fh ("#\$ -S /bin/bash\n");
+ printf $fh ("#\$ -cwd\n");
+ printf $fh ("#\$ -V\n");
+ #printf $fh ("#\$ -N $preprefix%02d_pnp_finish_%04d_$now\n",$index,$list[$i]);
+ printf $fh ("#\$ -N $preprefix%02d_pnp_finish_$now\n",$index);
+ if($queue_req){
+ printf $fh ("#\$ $queue_req\n");
+ }
+ printf $fh ("#\$ -o $doutdir\n");
+ printf $fh ("#\$ -e $errdir\n");
+ printf $fh ("#\$ -hold_jid $holdlist\n");
+ printf $fh ("time ($command)\n");
+ close $fh;
+ }
+ }
+ }
+=pod
+ {
+ my $loop;
+ my $tmp = (@list+0.0)/($CURRENT+0.0);
+ if($tmp == int($tmp)){
+ $loop = int($tmp);
+ }
+ else{
+ $loop = int($tmp+1.0);
+ }
+ $dqa_offset += $loop*$task_l;
+ }
+=cut
+ $dqa_offset += $diff4dqa;
+ push @dqa_offsets,$dqa_offset;
+
+ for(my $j=0; $j<$task_l; ++$j){
+ push @do_qsub_aj,0;
+ }
+ for(my $k=1; $k<=$task_l; ++$k)
+ {
+ my $mdextended = sprintf("$mdprefix/%d",$k);
+ my @tmp;
+ for(my $i=0; $i<@list; $i+=$CURRENT){
+ #$tmp[$i/$CURRENT]=sprintf("$preprefix%02d_pnp_finish_%04d_$now",$index,$list[$i]);
+ }
+ $tmp[0]=sprintf("$preprefix%02d_pnp_finish_$now",$index);
+ my $holdlist=join(',', at tmp);
+
+ my $do_qsub=0;
+ my $f_do=1;
+ my $parent;
+ my @parents = ();
+
+ my $files="";
+ for(my $i=0; $i<@list; $i+=$CURRENT){
+ my $input = sprintf("$preprefix%02d_pnp_finish_%04d.idfq.gz",$index,$list[$i]);
+ my $parent = sprintf("$mdextended/$input");
+ push @parents,$parent;
+ $input = "$maindir/$input";
+ $files .= $input;
+ $files .= " ";
+ }
+ chop $files;
+
+ $outputfile = sprintf("$prepadir/$preprefix%02d$ppp\${SGE_TASK_ID}.pnp.fin.idfq.gz",$index+1);
+ my $child = sprintf("$prepadir/$preprefix%02d$ppp$k.pnp.fin.idfq.gz",$index+1);
+ $f_do = &do_or_not(\@parents,\$child);
+
+ $command = sprintf("cat $files > $outputfile");
+
+ my $idx = $dqa_offset+($k-1);
+# printf STDERR ("pnpfin %s\n",$f_do);
+ if($f_do){
+ $do_qsub_aj[$idx] = 1;
+ }
+ else{
+ #$do_qsub_aj[$idx] = 0;
+ #$command = sprintf("#%s",$command);
+ }
+
+ if($k==1){
+ $script = sprintf("$script_sub_dir/$preprefix%02d_cat_pnp_finish.sh",$index);
+ push(@array_jobs,$script);
+# push(@do_qsub_aj,$do_qsub);
+ open my $fh, ">", $script or die $!;
+
+ printf $fh ("#!/bin/bash\n");
+ printf $fh ("#\$ -S /bin/bash\n");
+ printf $fh ("#\$ -cwd\n");
+ printf $fh ("#\$ -V\n");
+ my $jobname = sprintf("$preprefix%02d_cat_pnp_finish_$now",$index);
+ push(@second_cat_pnp_finish_holdjids,$jobname);
+ printf $fh ("#\$ -N $jobname\n");
+ if($queue_req){
+ printf $fh ("#\$ $queue_req\n");
+ }
+ printf $fh ("#\$ -o $doutdir\n");
+ printf $fh ("#\$ -e $errdir\n");
+ printf $fh ("#\$ -hold_jid $holdlist\n");
+ printf $fh ("time ($command)\n");
+
+ close $fh;
+ }
+ }
+ $dqa_offset+=$task_l;
+ push @dqa_offsets,$dqa_offset;
+ }
+# print STDERR "array_jobs printed\n";
+
+ # second cat
+ my @get_topXx_holdjids=();
+ my $second_catted_file;
+ {
+ my $holdlist=join(',', at second_cat_pnp_finish_holdjids);
+
+ my $do_qsub = 0;
+ my $f_do=1;
+ my @parents=();
+
+ my $files="";
+ for(my $pp=1; $pp<=$pre_partition; ++$pp){
+ my $parent = sprintf("$prepadir/$preprefix%02d$ppp%d.pnp.fin.idfq.gz",$index+1,$pp);
+ push @parents,$parent;
+ $files .= $parent;
+ $files .= " ";
+ }
+
+ $outputfile = sprintf("$datadir/$preprefix%02d.fin.idfq.gz",$index+1);
+ my $child = $outputfile;
+
+ $f_do = &do_or_not(\@parents,\$child);
+
+ $second_catted_file = "$outputfile";
+ $command = sprintf("cat $files > $outputfile.tmp && mv $outputfile.tmp $outputfile");
+ if($f_do){
+ $do_qsub=1;
+ }
+ else{
+ $command = sprintf("#%s",$command);
+ }
+
+ $script = sprintf("$script_sub_dir/$preprefix%02d_second_cat_pnp_finish.sh",$index);
+ push(@post_array_jobs,$script);
+ push(@do_qsub_postaj,$do_qsub);
+ open my $fh, ">", $script or die $!;
+
+ printf $fh ("#!/bin/bash\n");
+ printf $fh ("#\$ -S /bin/bash\n");
+ printf $fh ("#\$ -cwd\n");
+ printf $fh ("#\$ -V\n");
+ my $jobname = sprintf("$preprefix%02d_second_cat_pnp_finish_$now",$index);
+ push(@get_topXx_holdjids,$jobname);
+ printf $fh ("#\$ -N $jobname\n");
+ if($queue_req){
+ printf $fh ("#\$ $queue_req\n");
+ }
+ printf $fh ("#\$ -o $logdir\n");
+ printf $fh ("#\$ -e $logdir\n");
+ printf $fh ("#\$ -hold_jid $holdlist\n");
+ printf $fh ("time ($command)\n");
+
+ close $fh;
+ }
+
+ my @fastqToCA_holdjids=();
+ my @prep_holdjids=@get_topXx_holdjids;
+ my $fastqToCA_input;
+ my $longestXx;
+ #if(!$opt_ec_only)
+ if($index+1 == $to)
+ {
+ my $holdlist=join(',', at get_topXx_holdjids);
+ my $f_do=1;
+ my $do_qsub=0;
+ my @parents=();
+
+ $outputfile = sprintf("$datadir/$preprefix%02d.fin.topXx.fq",$index+1);
+ my $child = $outputfile;
+ $fastqToCA_input = "$child";
+ $longestXx = "$child";
+
+ my $PG1 = "$bindir/get_top_20x_fa.pl";
+
+ my $parent = $second_catted_file;
+ push @parents,$parent;
+ $f_do = &do_or_not(\@parents,\$child);
+
+ my $uuid = $now;
+ #my $uuid = `uuidgen`;
+ chomp $uuid;
+ $command = sprintf("gzip -d -c $second_catted_file > $datadir/$uuid.tmp && $PG1 $datadir/$uuid.tmp -l -g $estimated_genome_size -q -c 20 > $child.tmp && mv $child.tmp $child && rm $datadir/$uuid.tmp");
+ #$command = sprintf("gzip -d -c $second_catted_file | $PG1 - -g $estimated_genome_size -q -c 20 > $child.tmp && mv $child.tmp $child");
+ if($f_do){
+ $do_qsub=1;
+ }
+ else{
+ $command = sprintf("#%s",$command);
+ }
+
+ $script = sprintf("$script_sub_dir/$preprefix%02d_get_topXx.sh",$index);
+ push(@post_array_jobs,$script);
+ push(@do_qsub_postaj,$do_qsub);
+ open my $fh, ">", $script or die $!;
+
+ printf $fh ("#!/bin/bash\n");
+ printf $fh ("#\$ -S /bin/bash\n");
+ printf $fh ("#\$ -cwd\n");
+ printf $fh ("#\$ -V\n");
+ my $jobname = sprintf("$preprefix%02d_get_topXx_$now",$index);
+ push @fastqToCA_holdjids,$jobname;
+ push @prep_holdjids,$jobname;
+ printf $fh ("#\$ -N $jobname\n");
+ if($longestXx_queue_req){
+ printf $fh ("#\$ $longestXx_queue_req\n");
+ }
+ elsif($queue_req){
+ printf $fh ("#\$ $queue_req\n");
+ }
+ printf $fh ("#\$ -o $logdir\n");
+ printf $fh ("#\$ -e $logdir\n");
+ printf $fh ("#\$ -hold_jid $holdlist\n");
+ printf $fh ("time ($command)\n");
+
+ close $fh;
+ }
+
+ # prep data
+ $qu_idfq_gz = sprintf("$datadir/$preprefix%02d.qu.idfq.gz",$index+1);
+ $db_idfq_gz = sprintf("$datadir/$preprefix%02d.db.idfq.gz",$index+1);
+ if($index+1 != $to)
+ {
+=pod
+ @makeblastdb_holdjids = ();
+ @prepartition_holdjids = ();
+ my $jobname = sprintf("$preprefix%02d_data_prep_$now",$index);
+ push(@makeblastdb_holdjids,$jobname);
+ push(@prepartition_holdjids,$jobname);
+
+ my $f_do=1;
+ my $do_qsub=0;
+ my @parents;
+ my $parent = $second_catted_file;
+ #my $parent = $longestXx;
+ push @parents,$parent;
+ my $child = $qu_idfq_gz;
+ $f_do = &do_or_not(\@parents,\$child);
+
+ my $PG = sprintf("$bindir/fq2idfq.pl");
+ my $com1 = sprintf("ln -s $parent $child");
+ #my $com1 = sprintf("cat $parent | $PG - --prefix $datadir/qu%02d | gzip -c -1 > $child.tmp && mv $child.tmp $child",$index+1);
+ if($f_do){
+ $do_qsub=1;
+ }
+ else{
+ $com1 = sprintf("#%s",$com1);
+ }
+
+ @parents=();
+ my $parent_1 = $second_catted_file;
+ my $parent_2 = sprintf("$datadir/$preprefix%02d.db.idfq.gz",0);
+ push @parents,$parent_1;
+ push @parents,$parent_2;
+ #push @parents,$longestXx;
+ #push @parents,$input_for_database;
+ $child = $db_idfq_gz;
+ $f_do = &do_or_not(\@parents,\$child);
+ my $com2;
+ {
+ $com2 = "ln -s $parent_2 $child";
+ #my $tmp = join " ", at parents;
+ #$com2 = sprintf("cat $tmp | $PG - --prefix $datadir/db%02d | gzip -c -1 > $child.tmp && mv $child.tmp $child",$index+1);
+ }
+ if($f_do){
+ $do_qsub=1;
+ }
+ else{
+ $com2 = sprintf("#%s",$com2);
+ }
+
+ my $script = sprintf("$script_sub_dir/$preprefix%02d_data_prep.sh",$index);
+ push(@post_array_jobs,$script);
+ push(@do_qsub_postaj,$do_qsub);
+ open my $fh, ">", $script or die $!;
+
+ printf $fh ("#!/bin/bash\n");
+ printf $fh ("#\$ -S /bin/bash\n");
+ printf $fh ("#\$ -cwd\n");
+ printf $fh ("#\$ -V\n");
+ printf $fh ("#\$ -N $jobname\n");
+ if($queue_req){
+ printf $fh ("#\$ $queue_req\n");
+ }
+ printf $fh ("#\$ -o $datadir\n");
+ printf $fh ("#\$ -e $datadir\n");
+ my $holdlist=join(',', at prep_holdjids);
+ printf $fh ("#\$ -hold_jid $holdlist\n");
+ printf $fh ("$com1 & $com2 & wait\n");
+
+ close $fh;
+=cut
+ }
+
+ my @runCA_holdjids=();
+ my $runCA_input;
+ if($index+1 == $to)
+ {
+ my $holdlist=join(',', at fastqToCA_holdjids);
+ $outputfile = sprintf("$datadir/$preprefix%02d.fin.topXx.frg",$index+1);
+ $runCA_input = "$outputfile";
+
+ my $f_do=1;
+ my $do_qsub=0;
+ my $parent = $fastqToCA_input;
+ my @parents;
+ push @parents,$parent;
+ my $child = $outputfile;
+ $f_do = &do_or_not(\@parents,\$child);
+
+ my $PG1 = "$ca_path/fastqToCA";
+ $command = sprintf("$PG1 -libraryname foo -technology pacbio-corrected -reads $parent > $child");
+
+ if($f_do){
+ $do_qsub = 1;
+ }
+ else{
+ $command = sprintf("#%s",$command);
+ }
+
+ $script = sprintf("$script_sub_dir/$preprefix%02d_fastqToCA.sh",$index);
+ if(!$opt_ec_only){
+ push(@post_array_jobs,$script);
+ push(@do_qsub_postaj,$do_qsub);
+ }
+ open my $fh, ">", $script or die $!;
+
+ printf $fh ("#!/bin/bash\n");
+ printf $fh ("#\$ -S /bin/bash\n");
+ printf $fh ("#\$ -cwd\n");
+ printf $fh ("#\$ -V\n");
+ my $jobname = sprintf("$preprefix%02d_fastqToCA_$now",$index);
+ push @runCA_holdjids,$jobname;
+ printf $fh ("#\$ -N $jobname\n");
+ if($queue_req){
+ printf $fh ("#\$ $queue_req\n");
+ }
+ printf $fh ("#\$ -o $logdir\n");
+ printf $fh ("#\$ -e $logdir\n");
+ printf $fh ("#\$ -hold_jid $holdlist\n");
+ printf $fh ("time ($command)\n");
+
+ close $fh;
+ }
+ if($index+1 == $to)
+ {
+ my $holdlist=join(',', at runCA_holdjids);
+
+ my $outdir = sprintf("$datadir/CA_c%02d",$index+1);
+ if(-d $outdir){
+ $outdir = "${outdir}_$now";
+ }
+ if(!-d $outdir){
+ mkdir "$outdir" or die "cannot mkdir $outdir: $!\n";
+ }
+
+ my $PG1 = "$ca_path/runCA";
+ my $foo = `date +%Y%m%d_%H%M%S`;
+ chomp $foo;
+
+ my $f_do=1;
+ my $parent = $runCA_input;
+ my @parents;
+ push @parents,$parent;
+ #my $child = sprintf("$datadir/do_${now}_c%02d.fin.topXx.log",$index+1);
+ my $child = sprintf("$outdir/9-terminator/asm_$foo.ctg.fasta");
+ $f_do = &do_or_not(\@parents,\$child);
+ my $do_qsub=0;
+ if($f_do){
+ $do_qsub=1;
+ }
+ else{
+ }
+
+ $command = sprintf("$PG1 -dir $outdir -p asm_$foo -s $asm_spec $parent");
+ #$command = sprintf("$PG1 -dir $outdir -p asm_$foo -s $asm_spec $parent | tee -a $child");
+
+ $script = sprintf("$script_sub_dir/$preprefix%02d_runCA.sh",$index);
+ if(!$opt_ec_only){
+ push(@post_array_jobs,$script);
+ push(@do_qsub_postaj,$do_qsub);
+ }
+ open my $fh, ">", $script or die $!;
+
+ printf $fh ("#!/bin/bash\n");
+ printf $fh ("#\$ -S /bin/bash\n");
+ printf $fh ("#\$ -cwd\n");
+ printf $fh ("#\$ -V\n");
+ my $jobname = sprintf("$preprefix%02d_runCA_$now",$index);
+ printf $fh ("#\$ -N $jobname\n");
+ if($queue_req){
+ printf $fh ("#\$ $queue_req\n");
+ }
+ printf $fh ("#\$ -o $logdir\n");
+ printf $fh ("#\$ -e $logdir\n");
+ printf $fh ("#\$ -hold_jid $holdlist\n");
+ printf $fh ("time ($command)\n");
+
+ close $fh;
+ }
+
+# print STDERR "post_array_jobs printed\n";
+
+ {
+ my $qsub = `which qsub`;
+ chomp $qsub;
+ if($sge){
+ $qsub = sprintf("%s %s",$qsub,$sge);
+ }
+
+ if($index+1 != $to){
+ my $script = sprintf("$script_sub_dir/$preprefix%02d_next_qsub.sh",$index);
+ push(@post_array_jobs,$script);
+ push(@do_qsub_postaj,1);
+
+ open my $fh, ">", $script or die $!;
+ printf $fh ("#!/bin/bash\n");
+ printf $fh ("#\$ -S /bin/bash\n");
+ printf $fh ("#\$ -cwd\n");
+ printf $fh ("#\$ -V\n");
+ my $jobname = sprintf("$preprefix%02d_next_qsub_$now",$index);
+ printf $fh ("#\$ -N $jobname\n");
+ printf $fh ("#\$ -o $logdir\n");
+ printf $fh ("#\$ -e $logdir\n");
+
+ my $prep = sprintf("$preprefix%02d_data_prep_$now",$index);
+ my $holdlist = $prep;
+ printf $fh ("#\$ -hold_jid $holdlist\n");
+
+ my $next_script_sub_dir = sprintf("$scriptdir/$preprefix%02d",$index+1);
+ my $next_qsub_script = sprintf("$next_script_sub_dir/$preprefix%02d_qsub.sh",$index+1);
+ printf $fh ("$qsub $next_qsub_script\n");
+ close $fh;
+ }
+
+
+ my $qsub_script = sprintf("$script_sub_dir/$preprefix%02d_qsub.sh",$index);
+ open my $fh, ">", $qsub_script or die $!;
+
+ printf $fh ("#!/bin/bash\n");
+ printf $fh ("#\$ -S /bin/bash\n");
+ printf $fh ("#\$ -cwd\n");
+ printf $fh ("#\$ -V\n");
+ my $jobname = sprintf("$preprefix%02d_qsub_$now",$index);
+ printf $fh ("#\$ -N $jobname\n");
+ printf $fh ("#\$ -o $logdir\n");
+ printf $fh ("#\$ -e $logdir\n");
+
+ my $task_f=1;
+ my $task_l=$pre_partition;
+ if($task_l<1){
+ die "task_l must be >= 1\n";
+ }
+
+# if($#pre_array_jobs != $#do_qsub_preaj){
+# die "#pre_array_jobs != #do_qsub_preaj\n";
+# }
+# if($#array_jobs != $#do_qsub_aj){
+# die "#array_jobs != #do_qsub_aj\n";
+# }
+# if($#post_array_jobs != $#do_qsub_postaj){
+# die "#post_array_jobs != #do_qsub_postaj\n";
+# }
+ for(my $i=0; $i<@pre_array_jobs; ++$i){
+ my $tmp = sprintf("$qsub $pre_array_jobs[$i]");
+ if($do_qsub_preaj[$i]){
+ printf $fh ("$tmp\n");
+ }
+ else{
+ printf $fh ("#$tmp\n");
+ }
+ }
+
+=pod
+ my $foofoo = @list*$task_l;
+ for(my $i=0; $i<@do_qsub_aj; ++$i){
+ printf STDERR ("%d ",$do_qsub_aj[$i]);
+ if($i % $task_l == $task_l-1){
+ if($i >= $foofoo){
+ printf STDERR (" post\n");
+ }
+ else{
+ printf STDERR (" pre\n");
+ }
+ }
+ if($i % (@list*$task_l) == (@list*$task_l)-1){
+ printf STDERR ("\n");
+ }
+ }
+ for(my $i=0; $i<@dqa_offsets; ++$i){
+ printf STDERR ("# %d\n",$dqa_offsets[$i]);
+ }
+=cut
+ for(my $i=0; $i<@array_jobs; ++$i){
+ my $c=$do_qsub_aj[$task_l*$i];
+ $task_f=1;
+ my $j;
+ for($j=0; $j<$task_l; ++$j){
+ if($do_qsub_aj[$task_l*$i+$j] == $c){
+ }
+ else{
+ my $tmp;
+ $tmp = sprintf("$qsub -t $task_f-$j:1 $array_jobs[$i]");
+ if($c){
+ printf $fh ("$tmp\n");
+ }
+ else{
+ printf $fh ("#$tmp\n");
+ }
+ $c = $do_qsub_aj[$task_l*$i+$j];
+ $task_f=$j+1;
+ }
+ }
+ my $tmp;
+ $tmp = sprintf("$qsub -t $task_f-$j:1 $array_jobs[$i]");
+ if($c){
+ printf $fh ("$tmp\n");
+ }
+ else{
+ printf $fh ("#$tmp\n");
+ }
+ }
+ for(my $i=0; $i<@post_array_jobs; ++$i){
+ my $tmp = sprintf("$qsub $post_array_jobs[$i]");
+ if($do_qsub_postaj[$i]){
+ printf $fh ("$tmp\n");
+ }
+ else{
+ printf $fh ("#$tmp\n");
+ }
+ }
+ close $fh;
+
+ if(!$opt_dryrun && $index == $from){
+ `qsub $qsub_script`;
+ }
+ }
+}
+
+# runCA
+#my $ca_command="ca_ikki_v3.pl ";
+#my $ikki_log="$pwd/ca_ikki_v3.log";
+#$ca_command .= "-d $datadir -from $from -to $to $asm_spec $estimated_genome_size -ca_path $ca_path > $ikki_log 2>&1";
+#if($opt_dryrun){
+# printf("%s\n",$ca_command);
+#}
+#else{
+# `$ca_command`;
+#}
+
+{
+ open my $fh, ">>$logdir/$now.tss" or die "cannot open $now.tss: $!\n";
+
+ #foreach my $key (keys %original_time_stamps){
+ foreach my $key (@modified_file_names){
+ printf $fh ("%s\t%d\n",$key,$original_time_stamps{$key});
+ }
+
+ close $fh;
+}
+
+sub do_or_not($$){
+ my $a = shift;
+ my $b = shift;
+
+ my @parents = @$a;
+ my $child = $$b;
+
+ my $f_do=0;
+
+# printf STDERR ("c %s\n",$child);
+# printf STDERR ("p %s\n",$parents[0]);
+ if(!-e $child){
+ $f_do=1;
+ }
+ else{
+ for(my $i=0; $i<@parents; ++$i){
+ if(!-e $parents[$i]){
+ $f_do=1;
+ my @c_fs=stat $child;
+ push @modified_file_names,$child;
+ $original_time_stamps{"$child"} = $c_fs[9];
+ `touch $child`;
+ last;
+ }
+ else{
+ my @p_fs=stat $parents[$i];
+ my @c_fs=stat $child;
+ my $p_bd = $p_fs[9];
+ my $c_bd = $c_fs[9];
+ if($p_bd > $c_bd){
+ $f_do=1;
+ push @modified_file_names,$child;
+ $original_time_stamps{"$child"} = $c_fs[9];
+ `touch $child`;
+ last;
+ }
+ else{
+ #$f_do=0;
+ }
+ }
+ }
+=pod
+ if(!$f_do && $child =~ /\.dfq\.gz$/){
+ my $ret = system("gzip -d -c -t $child 2> /dev/null");
+ if($ret){
+ printf STDERR "broken $child\n";
+ $f_do=1;
+ `touch $child`;
+ }
+ }
+=cut
+ }
+ return $f_do;
+}
+
+sub round_up($){
+ my $val = shift;
+ my $tmp = Scalar::Util::looks_like_number($val);
+ if(!$tmp){
+ die "$val does not look line number\n";
+ }
+ my $ret;
+ if($val == int($val)){
+ $ret = int($val);
+ }
+ else{
+ $ret = int($val+1.0);
+ }
+ return $ret;
+}
diff --git a/ezez_vx1.pl b/ezez_vx1.pl
new file mode 100755
index 0000000..afa7827
--- /dev/null
+++ b/ezez_vx1.pl
@@ -0,0 +1,727 @@
+#!/usr/bin/perl
+use strict;
+use warnings;
+use Getopt::Long;
+
+my $DEVEL;
+my $DEBUG;
+
+my $opt_dryrun;
+my $opt_ec_only;
+my $opt_foobar;
+my $sprai_path="";
+
+my $now = `date +%Y%m%d_%H%M%S`;
+chomp $now;
+
+my %original_time_stamps = ();
+my @modified_file_names = ();
+
+GetOptions(
+ "n" => \$opt_dryrun,
+ "devel" => \$DEVEL,
+ "debug"=>\$DEBUG,
+ "ec_only"=>\$opt_ec_only,
+ "sprai_path=s"=>\$sprai_path,
+ "now=s"=>\$now,
+ "foobar"=>\$opt_foobar
+);
+
+my %params;
+
+my @msgs = (
+ "USAGE: <this> <ec.spec> <asm.spec>",
+ "[-n: only shows parameters in ec.spec and exit.]",
+ "[-ec_only: does error correction and does NOT assemble]",
+ #'[-debug: outputs intermediate files (not implemented)]',
+ '[-now yyyymmdd_hhmmss: use a result_yyyymmdd_hhmmss directory, detect unfinished jobs and restart at the appropriate stage.]',
+);
+
+if(@ARGV != 2){
+ my $msg = join "\n\t", at msgs;
+ printf STDERR ("%s\n",$msg);
+ exit(1);
+}
+
+my $spec=$ARGV[1];
+
+{
+ my $ec_spec = $ARGV[0];
+ open my $fh,"<",$ec_spec or die "cannot open $ec_spec :$!\n";
+ while($_ = <$fh>){
+ next if($_ =~ /^\s+$/);
+ next if($_ =~ /^\s*#/);
+ chomp;
+ my @line = split /\s+/,$_;
+ # if(@line < 2){
+ # die "strange line in $ec_spec\n$_\n";
+ # }
+ for(my $i=0; $i<@line; ++$i){
+ if($line[$i] =~ /^\s*#/){
+ @line = @line[0..$i-1];
+ last;
+ }
+ }
+ $params{$line[0]}=join(" ", at line[1.. at line-1]);
+ if($params{$line[0]}){
+ # printf("%s %s#\n",$line[0],$params{$line[0]});
+ }
+ }
+ close $fh;
+}
+
+my $input_for_database;
+my $from=0;
+my $to=1;
+my $partition=12;
+my $evalue=1e-50;
+my $num_threads=1;
+my $max_target_seqs=100;
+my $valid_voters=11;
+my $trim=42;
+my $estimated_genome_size=0;
+my $ca_path="/home/imai/wgs-7.0/Linux-amd64/bin/";
+my $word_size=0;
+my $min_len_for_query=1;
+my $max_len_for_query=1000000000000000;
+my $estimated_depth=0;
+
+my $blasr_path="";
+my $blasr_opt="";
+
+if(defined($params{input_for_database})){
+ $input_for_database = $params{input_for_database};
+ if(!-e $input_for_database){
+ die "$input_for_database does not exist.\n";
+ }
+}
+else{
+ die "specify input_for_database in ec.spec\n";
+}
+
+if(defined($params{estimated_genome_size})){
+ $estimated_genome_size = $params{estimated_genome_size};
+}
+else{
+ die "specify estimated_genome_size in ec.spec\n";
+}
+if($estimated_genome_size<=0){
+ die "estimated_genome_size must be > 0\n";
+}
+if(defined($params{estimated_depth})){
+ $estimated_depth = $params{estimated_depth};
+}
+else{
+ die "specify estimated_depth in ec.spec\n";
+}
+
+if(defined($params{from})){
+ $from = $params{from};
+}
+if(defined($params{to})){
+ $to = $params{to};
+}
+if(defined($params{partition})){
+ $partition = $params{partition};
+}
+if(defined($params{evalue})){
+ $evalue = $params{evalue};
+}
+if(defined($params{num_threads})){
+ $num_threads = $params{num_threads};
+}
+if(defined($params{valid_voters})){
+ $valid_voters = $params{valid_voters};
+}
+else{
+# $valid_voters = 11;
+
+ $valid_voters = int(0.8*($estimated_depth+0.0));
+ if($valid_voters > 30){
+ $valid_voters = 30;
+ }
+ #my $n_base = -s $input_for_database;
+ #$n_base /= 2;
+ #$valid_voters = int(0.8*$n_base/$estimated_genome_size);
+ $valid_voters = ($valid_voters < 11) ? 11 : $valid_voters;
+}
+if(defined($params{trim})){
+ $trim = $params{trim};
+}
+if(defined($params{ca_path})){
+ $ca_path = $params{ca_path};
+}
+if(defined($params{sprai_path})){
+ $sprai_path = $params{sprai_path};
+ if(!-e "$sprai_path/nss2v_v3" || !-e "$sprai_path/fq2idfq.pl"){
+ die "there is no $sprai_path/nss2v_v3 or $sprai_path/fq2idfq.pl.\nset right sprai_path in ec.spec\n";
+ }
+}
+if(defined($params{word_size})){
+ $word_size = $params{word_size};
+}
+if(defined($params{blasr_path})){
+ $blasr_path = $params{blasr_path};
+}
+if(defined($params{blasr_opt})){
+ $blasr_opt = $params{blasr_opt};
+}
+if(defined($params{min_len_for_query})){
+ $min_len_for_query = $params{min_len_for_query};
+}
+if(defined($params{max_len_for_query})){
+ $max_len_for_query = $params{max_len_for_query};
+}
+if(defined($params{max_target_seqs})){
+ $max_target_seqs = $params{max_target_seqs};
+}
+
+printf STDERR ("#>- params -<#\n");
+printf STDERR ("input_for_database %s\n",$input_for_database);
+printf STDERR ("estimated_genome_size %g\n",$estimated_genome_size);
+printf STDERR ("estimated_depth %d\n",$estimated_depth);
+#printf STDERR ("from %s\n",$from);
+#printf STDERR ("to %s\n",$to);
+printf STDERR ("partition %s\n",$partition);
+printf STDERR ("evalue %g\n",$evalue);
+printf STDERR ("num_threads %d\n",$num_threads);
+printf STDERR ("valid_voters %s\n",$valid_voters);
+printf STDERR ("trim %d\n",$trim);
+printf STDERR ("ca_path %s\n",$ca_path);
+if($sprai_path){
+ printf STDERR ("sprai_path %s\n",$sprai_path);
+}
+if($word_size){
+ printf STDERR ("word_size %d\n",$word_size);
+}
+if($blasr_path){
+ printf STDERR ("blasr_path %s\n",$blasr_path);
+ if($blasr_opt){
+ printf STDERR ("blasr_opt %s\n",$blasr_opt);
+ }
+}
+if($min_len_for_query){
+ printf STDERR ("min_len_for_query %d\n",$min_len_for_query);
+}
+if($max_len_for_query){
+ printf STDERR ("max_len_for_query %d\n",$max_len_for_query);
+}
+if($max_target_seqs){
+ printf STDERR ("max_target_seqs %d\n",$max_target_seqs);
+}
+printf STDERR ("#>- params -<#\n");
+
+if(!-e $ca_path){
+ die "$ca_path does not exist.\n";
+}
+
+if(!$opt_ec_only && !-e $spec){
+ die "$spec does not exist.\n";
+}
+
+if($opt_dryrun){
+ exit;
+}
+
+my $preprefix="c";
+my $CURRENT=$partition;
+
+my $outfmt = "7 qseqid sstart send sacc qstart qend bitscore evalue pident qseq sseq";
+
+my $index=0;
+my $command="";
+
+my $valid_depth = 4;
+my $valid_read_length = 500;
+my $confident_depth = $valid_depth;
+my $confident_length_coefficient = 0.75;
+
+my $date="";
+my $message="";
+
+# mkdirs
+my $PWD = `pwd`;
+chomp $PWD;
+
+my $result_dir="$PWD/result_$now/";
+if(!-d $result_dir){
+ mkdir $result_dir or die "cannot mkdir $result_dir: $!";
+}
+my $tmp_dir="$result_dir/tmp/";
+if(!-d $tmp_dir){
+ mkdir $tmp_dir or die "cannot mkdir $tmp_dir: $!\n";
+}
+
+# ec
+
+my $orig_idfq = sprintf("$tmp_dir/$preprefix%02d.idfq.gz",0);
+
+my $db_idfq_gz = sprintf("$tmp_dir/$preprefix%02d.db.idfq.gz",0);
+my $qu_idfq_gz = sprintf("$tmp_dir/$preprefix%02d.qu.idfq.gz",0);
+
+if($from == 0)
+{
+ # fq -> idfq (& id2n)
+ my $PG = "fq2idfq.pl";
+ if($sprai_path){
+ $PG = "$sprai_path/$PG";
+ }
+ my $PG2 = "fqfilt.pl";
+ if($sprai_path){
+ $PG2 = "$sprai_path/$PG2";
+ }
+ my $dumbbell_filter = "dumbbell_filter.pl";
+ if($sprai_path){
+ $dumbbell_filter = "$sprai_path/$dumbbell_filter";
+ }
+ my $com1;
+ my $com2;
+
+ {
+ my $f_do=1;
+ my @parents;
+ push @parents,$input_for_database;
+ my $child=$db_idfq_gz;
+ $f_do = &do_or_not(\@parents,\$child);
+# printf STDERR ("%d\n",$f_do);
+
+ my $parent=$input_for_database;
+ my $PG0 = "fa2fq.pl";
+ if($sprai_path){
+ $PG0 = "$sprai_path/$PG0";
+ }
+ if($f_do){
+ $com1 = sprintf("time cat $parent | $PG0 - | $dumbbell_filter - | $PG - --prefix $tmp_dir/db%02d | gzip -c -1 > $child.tmp && mv $child.tmp $child",$from);
+ `$com1`;
+ }
+ else{
+ $com1 = "sleep 0";
+ }
+ }
+
+ {
+ my $f_do=1;
+ my @parents;
+ push @parents,$db_idfq_gz;
+ my $child=$qu_idfq_gz;
+ $f_do = &do_or_not(\@parents,\$child);
+# printf STDERR ("%d\n",$f_do);
+
+ my $parent=$db_idfq_gz;
+ if($f_do){
+ if($min_len_for_query > 1){
+ $com2 = sprintf("time gzip -d -c $parent | $PG2 - $min_len_for_query -max_len $max_len_for_query | gzip -c -1 > $child.tmp && mv $child.tmp $child",$from);
+ }
+ else{
+ $com2 = sprintf("ln -s $parent $child");
+ }
+ `$com2`;
+ }
+ else{
+ $com2 = "sleep 0";
+ }
+ }
+}
+
+for(my $index=$from; $index<$to; ++$index){
+ {
+ my $com1;
+ my $com2;
+ my $PG1 = "dfq2fq_v2.pl";
+ if($sprai_path){
+ $PG1 = "$sprai_path/$PG1";
+ }
+ if($blasr_path){
+ die "under construction. sorry\n";
+ #$com1 = sprintf("gzip -d -c $qu_idfq_gz | $PG1 -f - > $tmp_dir/$preprefix%02d.fasta ",$index);
+ #$com1 .= sprintf(" && $blasr_path/sawriter $tmp_dir/$preprefix%02d.fasta ",$index);
+ }
+ else{
+ my $dummy_target = sprintf("$tmp_dir/makeblastdb_%02d.done",$index);
+ my @parents;
+ push @parents,$db_idfq_gz;
+ my $child = $dummy_target;
+ my $f_do=1;
+ $f_do = &do_or_not(\@parents,\$child);
+ my $parent = $parents[0];
+ if($f_do){
+ $com1 = sprintf("time gzip -d -c $parent | $PG1 -f - | makeblastdb -in - -dbtype nucl -out $tmp_dir/$preprefix%02d -title $preprefix%02d 1>$child.tmp && mv $child.tmp $child",$index,$index);
+ }
+ else{
+ $com1 = "sleep 0";
+ }
+ }
+
+ my $PG2="partition_fa.pl";
+ if($sprai_path){
+ $PG2 = "$sprai_path/$PG2";
+ }
+ {
+ my $dummy_target = sprintf("$tmp_dir/partition_fa_%02d.done",$index);
+ my @parents;
+ push @parents,$qu_idfq_gz;
+ my $child = $dummy_target;
+ my $f_do=1;
+ $f_do = &do_or_not(\@parents,\$child);
+ my $parent = $parents[0];
+ if($f_do){
+ $com2 = sprintf("time gzip -d -c $parent | $PG1 -f - | $PG2 - $partition -p $tmp_dir/$preprefix%02d 1>$child.tmp && mv $child.tmp $child", $index);
+ }
+ else{
+ $com2 = "sleep 0";
+ }
+ }
+
+ `$com1 & $com2 & wait`;
+ }
+
+ {
+ $command="";
+ for(my $i=0; $i<$partition; ++$i){
+ my $t_trim = $trim;
+ if($index>0){
+ $t_trim = 0;
+ }
+ my $PG2=sprintf("bfmt72s -c %d -u -i", $t_trim);
+ if($sprai_path){
+ $PG2 = "$sprai_path/$PG2";
+ }
+ my $PG3="nss2v_v3 -v $valid_voters";
+ if($sprai_path){
+ $PG3 = "$sprai_path/$PG3";
+ }
+ my $PG4="myrealigner -f -B $valid_voters -b 3 -d 0.5";
+ if($sprai_path){
+ $PG4 = "$sprai_path/$PG4";
+ }
+ my $BLASTN = "blastn -dbsize 1 -num_threads $num_threads";
+ if($word_size){
+ $BLASTN .= " -word_size $word_size";
+ }
+ if($max_target_seqs){
+ $BLASTN .= " -max_target_seqs $max_target_seqs";
+ }
+ if($DEBUG){
+ die "sorry. under construction\n";
+=pod
+ $PG4="realigner_v2 -B $valid_voters -b 3";
+ if($DEVEL){
+ $PG4 = "./".$PG4;
+ }
+ my $PG5="realigner_v2 -f -n -B $valid_voters -b 3";
+ if($DEVEL){
+ $PG5 = "./".$PG5;
+ }
+ $command.= sprintf("cat $preprefix%02d_%04d.fa | $BLASTN -db $preprefix%02d -query - -evalue $evalue -outfmt '$outfmt' | tee $preprefix%02d_%04d.blastn | $PG2 - | tee $preprefix%02d_%04d.nss | $PG3 - | tee $preprefix%02d_%04d.vertical | $PG4 - | tee $preprefix%02d_%04d.realigned | $PG5 - | gzip -c -1 > $preprefix%02d_%04d.dfq.gz & ", $index,$i,$index,$index,$i,$index,$i,$index,$i,$index,$i,$index,$i);
+=cut
+ }
+ else{
+ if($blasr_path){
+ die "under construction. sorry 2\n";
+ #$command .= sprintf("$blasr_path/blasr $tmp_dir/$preprefix%02d_%04d.fa $tmp_dir/$preprefix%02d.fasta -sa $tmp_dir/$preprefix%02d.fasta.sa -m 5 $blasr_opt -out $tmp_dir/tmp%02d_%04d.m5 ",$index,$i,0,0,$index,$i);
+ #$command .= sprintf(" && cat $tmp_dir/tmp%02d_%04d.m5 | m52bfmt7 - | $PG2 - | $PG3 - | $PG4 - | gzip -c -1 > $tmp_dir/$preprefix%02d_%04d.dfq.gz & ", $index,$i,$index,$i);
+ }
+ else{
+ my $target = sprintf("$tmp_dir/$preprefix%02d_%04d.dfq.gz",$index,$i);
+ my $parent = sprintf("$tmp_dir/$preprefix%02d_%04d.fa",$index,$i);
+ my $dummy_parent_1 = sprintf("$tmp_dir/makeblastdb_%02d.done",$index);
+ my $dummy_parent_2 = sprintf("$tmp_dir/partition_fa_%02d.done",$index);
+ my @parents;
+ push @parents,$parent;
+ push @parents,$dummy_parent_1;
+ push @parents,$dummy_parent_2;
+ my $child = $target;
+ my $f_do=1;
+ $f_do = &do_or_not(\@parents,\$child);
+ if($f_do){
+ $command .= sprintf("time cat $parent | $BLASTN -db $tmp_dir/$preprefix%02d -query - -evalue $evalue -outfmt '$outfmt' | $PG2 - | $PG3 - | $PG4 - | gzip -c -1 > $child.tmp && mv $child.tmp $child & ", $index);
+ }
+ else{
+ $command .= "sleep 0 & ";
+ }
+ }
+ }
+ }
+ $command .= " wait ";
+
+ `$command`;
+ }
+
+ #finish
+ my $finished_file = sprintf("$result_dir/$preprefix%02d.fin.idfq.gz",$index+1);
+ {
+ my @n_parents=();
+
+ my $current = $CURRENT;
+ for(my $i=0,$command=""; $i<$partition; $i+=$current,$command=""){
+ my $right = $i+$current;
+ for(my $j=$i; $j<$right && $j<$partition; ++$j){
+ my $PG1 = "dfq2fq_v2.pl";
+ if($sprai_path){
+ $PG1 = "$sprai_path/$PG1";
+ }
+ my $parent = sprintf("$tmp_dir/$preprefix%02d_%04d.dfq.gz",$index,$j);
+ my $child = sprintf("$tmp_dir/$preprefix%02d.refined_%04d.fin.idfq.gz",$index,$j);
+ push @n_parents, $child;
+
+ my @parents;
+ push @parents,$parent;
+ my $f_do=1;
+ $f_do = &do_or_not(\@parents,\$child);
+ if($f_do){
+ $command .= sprintf("time gzip -d -c $parent | $PG1 - -finish -valid_depth $valid_depth -valid_read_length $valid_read_length | gzip -c -1 > $child.tmp && mv $child.tmp $child & ");
+ }
+ else{
+ $command .= "sleep 0 & ";
+ }
+ }
+ $command .= " wait ";
+
+ `$command`;
+ }
+
+ {
+ my $child = $finished_file;
+ my $f_do=1;
+ my @parents = @n_parents;
+ $f_do = &do_or_not(\@parents,\$child);
+ if($f_do){
+ if(-e $child){
+ `rm $child`;
+ }
+ {
+ my $current = $CURRENT;
+ for(my $i=0; $i<$partition; $i+=$current){
+ my $files="";
+ for(my $j=$i; $j<$i+$current && $j<$partition; ++$j){
+ $files .= sprintf(" $parents[$j]");
+ }
+ $command="";
+ $command .= sprintf("cat $files >> $child.tmp && mv $child.tmp $child");
+ `$command`;
+ }
+ }
+ }
+ else{
+ }
+ }
+ }
+
+ # prep for next iter
+ my $topXx = sprintf("$result_dir/$preprefix%02d.fin.longestXx.fq",$index+1);
+ {
+ my $PG0 = "get_top_20x_fa.pl";
+ if($sprai_path){
+ $PG0 = "$sprai_path/$PG0";
+ }
+ my $com;
+
+ my $parent = $finished_file;
+ my @parents;
+ push @parents,$parent;
+ my $child = $topXx;
+ my $f_do=1;
+ $f_do = &do_or_not(\@parents,\$child);
+ if($f_do){
+ my $uuid = $now;
+ #my $uuid = `uuidgen`;
+ chomp $uuid;
+ $com = "time gzip -d -c $parent > $result_dir/$uuid.tmp && $PG0 $result_dir/$uuid.tmp -l -g $estimated_genome_size -q -c 20 > $child.tmp && mv $child.tmp $child && rm $result_dir/$uuid.tmp";
+ }
+ else{
+ $com = "sleep 0";
+ }
+
+ `$com`;
+=pod
+ # fq -> idfq (& id2n)
+ my $PG = "fq2idfq.pl";
+ if($sprai_path){
+ $PG = "$sprai_path/$PG";
+ }
+ my $input_fastq_for_query = $topXx;
+ $qu_idfq_gz = sprintf("$tmp_dir/$preprefix%02d.qu.idfq.gz",$index+1);
+ $db_idfq_gz = sprintf("$tmp_dir/$preprefix%02d.db.idfq.gz",$index+1);
+
+ $parent = $input_fastq_for_query;
+ @parents = ();
+ push @parents,$parent;
+ $child = $qu_idfq_gz;
+ $f_do = &do_or_not(\@parents,\$child);
+
+ my $com1;
+ if($f_do){
+ $com1 = sprintf("(cat $parent | $PG - --prefix $tmp_dir/qu%02d | gzip -c -1 > $child.tmp && mv $child.tmp $child)",$index+1);
+ }
+ else{
+ $com1 = "sleep 0";
+ }
+
+ my $parent_1 = $input_fastq_for_query;
+ my $parent_2 = $input_for_database;
+ @parents = ();
+ push @parents,$parent_1;
+ push @parents,$parent_2;
+ $child = $db_idfq_gz;
+ $f_do = &do_or_not(\@parents,\$child);
+
+ my $com2;
+ if($f_do){
+ # don't 'cat database query | fq2idfq.pl - ... '
+ # do 'cat query database | fq2idfq.pl - ... '
+ $com2 = sprintf("(cat $parent_1 $parent_2 | $PG - --prefix $tmp_dir/db%02d | gzip -c -1 > $child.tmp && mv $child.tmp $child)",$index+1);
+ }
+ else{
+ $com2 = "sleep 0";
+ }
+
+ `$com1 & $com2 & wait`;
+=cut
+
+ }
+}
+
+my $ikki="ca_ikki_v5.pl";
+my $ikki_log="$PWD/ca_ikki.log";
+
+my $ca_dir="$result_dir/CA/";
+
+my $ca_command_2="$ikki ";
+if($sprai_path){
+ $ca_command_2 = "$sprai_path/$ca_command_2";
+}
+if($ca_path){
+ $ca_command_2 .= "-ca_path $ca_path ";
+}
+if($sprai_path){
+ $ca_command_2 .= "-sprai_path $sprai_path ";
+}
+my $t_from = $to-1;
+$ca_command_2 .= "-from $t_from -to $to $spec $estimated_genome_size -d $result_dir -out_dir $ca_dir > $ikki_log 2>&1";
+#$ca_command_2 .= "-from $from -to $to $spec $estimated_genome_size -d $result_dir -out_dir $ca_dir > $ikki_log 2>&1";
+
+if(!$opt_ec_only && !$opt_foobar){
+ my $finished_file = sprintf("$result_dir/$preprefix%02d.fin.idfq.gz",$to);
+ my @parents;
+ push @parents,$finished_file;
+ my $child = $ikki_log;
+ my $f_do=1;
+ $f_do = &do_or_not(\@parents,\$child);
+ if($f_do){
+ `$ca_command_2`;
+ }
+ else{
+ }
+}
+
+{
+ open my $fh, ">>$tmp_dir/$now.tss" or die "cannot open $now.tss: $!\n";
+
+ foreach my $key (@modified_file_names){
+ printf $fh ("%s\t%d\n",$key,$original_time_stamps{"$key"});
+ }
+
+ close $fh;
+}
+
+# test code
+if($opt_foobar){
+ my $outfmt_2 = "7 qseqid sstart send sacc qstart qend bitscore evalue pident";
+ my $unique_limit=-1;
+ {
+ `gzip -d -c $tmp_dir/c00_0000.dfq.gz | dfq2fq_v2.pl --orig_depth - | count_chars.pl - > $tmp_dir/tmp.count`;
+ my $count=0;
+ open my $fh,"<","$tmp_dir/tmp.count" or die "cannot open $tmp_dir/tmp.count: $!\n";
+ while(my $line = <$fh>){
+ ++$count;
+ chomp $line;
+ if($count == 3){
+ $line =~ /(\d+)\s+\d+/;
+ $unique_limit = $1;
+ close $fh;
+ last;
+ }
+ }
+ }
+ if(!defined($unique_limit) || $unique_limit<0){
+ die "strange unique_limit: $unique_limit\n";
+ }
+
+ my $PG1 = "get_top_20x_fa.pl";
+ if($sprai_path){
+ $PG1 = "$sprai_path/$PG1";
+ }
+ `(gzip -d -c $result_dir/c01.fin.idfq.gz | $PG1 - -g $estimated_genome_size -q | fq2fa.pl - > $tmp_dir/c01.fin.longestXx.fa) `;
+
+ {
+ my $command="";
+ $command .= "fa2fq.pl $tmp_dir/c01.fin.longestXx.fa | fq2idfq.pl - | fq2fa.pl - > $tmp_dir/d01.fin.longestXx.fa";
+ `$command`;
+ }
+
+ {
+ my $command1="";
+ $command1.= sprintf("makeblastdb -in $tmp_dir/d01.fin.longestXx.fa -dbtype nucl -out $tmp_dir/d01 -title d01 ");
+ my $command2="";
+ $command2.= sprintf("partition_fa.pl $tmp_dir/d01.fin.longestXx.fa $partition -p $tmp_dir/d01");
+ `$command1 & $command2 & wait`;
+ }
+
+ {
+ my $command="";
+ for(my $i=0; $i<$partition; ++$i){
+ my $BLASTN = "blastn -dbsize 1 -num_threads $num_threads";
+ if($word_size){
+ $BLASTN .= " -word_size $word_size";
+ }
+ $command.= sprintf("cat $tmp_dir/d01_%04d.fa | $BLASTN -db $tmp_dir/d01 -query - -evalue $evalue -outfmt '$outfmt_2' | gzip -c -1 > $tmp_dir/d01_%04d.blastn.gz & ",$i,$i);
+ }
+ `$command wait`;
+ }
+}
+
+
+sub do_or_not($$){
+ my $a = shift;
+ my $b = shift;
+
+ my @parents = @$a;
+ my $child = $$b;
+
+ my $f_do=0;
+
+# printf STDERR ("%s\n",$child);
+# printf STDERR ("%s\n",$parents[0]);
+ if(!-e $child){
+ $f_do=1;
+ }
+ else{
+ for(my $i=0; $i<@parents; ++$i){
+ if(!-e $parents[$i]){
+ $f_do=1;
+ my @c_fs=stat $child;
+ push @modified_file_names,$child;
+ $original_time_stamps{"$child"} = $c_fs[9];
+ `touch $child`;
+ last;
+ }
+ else{
+ my @p_fs=stat $parents[$i];
+ my @c_fs=stat $child;
+ my $p_bd = $p_fs[9];
+ my $c_bd = $c_fs[9];
+ if($p_bd > $c_bd){
+ $f_do=1;
+ push @modified_file_names,$child;
+ $original_time_stamps{"$child"} = $c_fs[9];
+ `touch $child`;
+ last;
+ }
+ else{
+ #$f_do=0;
+ }
+ }
+ }
+ }
+ return $f_do;
+}
diff --git a/fa2fq.pl b/fa2fq.pl
new file mode 100755
index 0000000..c968b6b
--- /dev/null
+++ b/fa2fq.pl
@@ -0,0 +1,214 @@
+#!/usr/bin/perl
+use strict;
+use warnings;
+use Getopt::Long;
+
+my $opt_help=0;
+my $opt_shortname=0;
+my $QV="5";
+
+GetOptions(
+ 'q=s'=>\$QV,
+ 'n'=>\$opt_shortname,
+ 'help'=>\$opt_help
+);
+if(length($QV) != 1){
+ printf STDERR ("-q <one letter>\n");
+ printf STDERR ("you gave %s (length %d>1)\n",$QV,length($QV));
+ exit(1);
+}
+
+my @msgs=(
+ "USAGE: <this> <in.fasta>",
+ "[-q=s specify quality value (default: '5')]",
+ "[-n convert each read name to 1,2,3,...]",
+ "[-h show this message]"
+);
+
+if($opt_help){
+ my $msg = join("\n\t", at msgs);
+ printf STDERR ("%s\n",$msg);
+ exit(0);
+}
+
+my $counter=0;
+
+my $heading;
+my $name;
+
+{
+ my $l1 = <>;
+ chomp $l1;
+ $heading = substr($l1,0,1);
+ my $l2 = <>;
+ chomp $l2;
+ if(eof){
+ $l1 =~ s/^$heading/\@/;
+ if($opt_shortname){
+ printf("\@%d\n",++$counter);
+ }
+ else{
+ printf("%s\n",$l1);
+ }
+ my $qvs = "";
+ for(my $i=0; $i<length($l2); ++$i){
+ $qvs .= $QV;
+ }
+ printf("%s\n",$l2);
+ printf("+\n");
+ printf("%s\n",$qvs);
+ exit 0;
+ }
+
+ my $l3 = <>;
+ chomp $l3;
+ while(substr($l3,0,1) ne "+" && substr($l3,0,1) ne $heading){
+ $l2 .= $l3;
+ if(eof){
+ $l1 =~ s/^$heading/\@/;
+ if($opt_shortname){
+ printf("\@%d\n",++$counter);
+ }
+ else{
+ printf("%s\n",$l1);
+ }
+ my $qvs = "";
+ for(my $i=0; $i<length($l2); ++$i){
+ $qvs .= $QV;
+ }
+ printf("%s\n",$l2);
+ printf("+\n");
+ printf("%s\n",$qvs);
+ exit 0;
+ }
+ $l3 = <>;
+ chomp $l3;
+ }
+ if(substr($l3,0,1) eq $heading){
+ # fasta
+ my $qvs = "";
+ for(my $i=0; $i<length($l2); ++$i){
+ $qvs .= $QV;
+ }
+ $l1 =~ s/^$heading/\@/;
+ if($opt_shortname){
+ printf("\@%d\n",++$counter);
+ }
+ else{
+ printf("%s\n",$l1);
+ }
+ printf("%s\n",$l2);
+ printf("+\n");
+ printf("%s\n",$qvs);
+ $name = $l3;
+ }
+ else{
+ # fastq
+ my $l4 = <>;
+ chomp $l4;
+ my $qvs = $l4;
+ while($l4 = <>){
+ chomp $l4;
+ if(substr($l4,0,1) eq $heading){
+ last;
+ }
+ $qvs .= $l4;
+ }
+ $l1 =~ s/^$heading/\@/;
+ if($opt_shortname){
+ printf("\@%d\n",++$counter);
+ }
+ else{
+ printf("%s\n",$l1);
+ }
+ printf("%s\n",$l2);
+ printf("%s\n",$l3);
+ printf("%s\n",$qvs);
+ if(eof){
+ exit 0;
+ }
+ while(1){
+ $name = $l4;
+ my $bases="";
+ my $opts="";
+ $qvs="";
+ $bases = <>;
+ chomp $bases;
+ $opts = <>;
+ chomp $opts;
+ while(substr($opts,0,1) ne "+"){
+ $bases .= $opts;
+ $opts = <>;
+ chomp $opts;
+ }
+ while($l4 = <>){
+ chomp $l4;
+ if(substr($l4,0,1) eq $heading){
+ last;
+ }
+ $qvs .= $l4;
+ }
+ $name =~ s/^$heading/\@/;
+ printf("%s\n",$name);
+ printf("%s\n",$bases);
+ printf("%s\n",$opts);
+ printf("%s\n",$qvs);
+ if(eof){
+ exit 0;
+ }
+ $name = $l4;
+ }
+ }
+}
+
+# fasta
+
+#my $name = <>;
+#chomp $name;
+$name =~ s/^$heading/\@/;
+++$counter;
+my $bases = "";
+my $qval = "";
+while(1){
+ while(my $buf=<>){
+ chomp $buf;
+ if($buf =~ /^$heading/){
+ for(my $i=0; $i < length($bases); ++$i){
+ $qval .= $QV;
+ }
+ if($opt_shortname){
+ printf("\@%d\n",$counter);
+ }
+ else{
+ printf("%s\n",$name);
+ }
+ printf("%s\n",$bases);
+ printf("+\n");
+ printf("%s\n",$qval);
+ $name = $buf;
+ $bases= "";
+ $qval = "";
+ $name =~ s/^$heading/\@/;
+ ++$counter;
+ last;
+ }
+ else{
+ $bases .= $buf;
+ }
+ }
+ if(eof){
+ last;
+ }
+}
+for(my $i=0; $i < length($bases); ++$i){
+ $qval .= $QV;
+}
+if($opt_shortname){
+ printf("\@%d\n",$counter);
+}
+else{
+ printf("%s\n",$name);
+}
+printf("%s\n",$bases);
+printf("+\n");
+printf("%s\n",$qval);
diff --git a/fq2fa.pl b/fq2fa.pl
new file mode 100755
index 0000000..6ca8c8b
--- /dev/null
+++ b/fq2fa.pl
@@ -0,0 +1,59 @@
+#!/usr/bin/perl
+use strict;
+use warnings;
+use Getopt::Long;
+
+my $pacbio;
+
+GetOptions('p'=>\$pacbio);
+
+if(@ARGV != 1){
+ die "USAGE: <this> <in.fq>\n\t[-p: against '\@'->'>' input]\n";
+}
+
+my $id_head_character="\@";
+
+if($pacbio){
+ $id_head_character=">";
+}
+else{
+}
+
+my $line = <>;
+while(!eof){
+ chomp $line;
+ my $result = $line =~ s/^$id_head_character/>/;
+ if(!$result){
+ if($id_head_character eq "\@"){
+ $id_head_character = ">";
+ redo;
+ }
+ elsif($id_head_character eq ">"){
+ die "1. strange input $result\n$line\n";
+ }
+ }
+
+ #$line =~ s/^\@/>/;
+ print $line,"\n";
+
+ my $bases="";
+ $line =<>;
+ while($line !~ /^\+/){
+ chomp $line;
+ $bases .= $line;
+ $line = <>;
+ }
+ print $bases,"\n";
+
+ my $qvs="";
+ $line =<>;# qvs
+ while($line !~ /^$id_head_character/ || length($qvs) < length($bases)){
+ chomp $line;
+ $qvs.=$line;
+ # do nothing
+ if(eof){
+ last;
+ }
+ $line = <>;
+ }
+}
diff --git a/fq2idfq.pl b/fq2idfq.pl
new file mode 100755
index 0000000..89a50cc
--- /dev/null
+++ b/fq2idfq.pl
@@ -0,0 +1,161 @@
+#!/usr/bin/perl
+use strict;
+use warnings;
+use Getopt::Long;
+
+my $opt_largerthan;
+my $opt_shortname=1;
+my $prefix=`date +%Y%m%d%H%M%S`;
+chomp $prefix;
+my $dbsuffix = ".id2n";
+my $output_is_fasta=0;
+my $opt_flag_fq=0;
+
+my $valid_read_length=1;
+
+GetOptions(
+ 'l' => \$opt_largerthan,
+ 'prefix=s' => \$prefix,
+ 'valid_read_length=i'=>\$valid_read_length,
+ 'flag'=>\$opt_flag_fq,
+ 'output_is_fasta'=>\$output_is_fasta
+);
+
+my @msg = (
+ "USAGE: <this> <in.fq> > out.idfq",
+ "[--prefix prefix]",
+ "[-l : '\@'->'>']",
+ "[-output_is_fasta]",
+ "[-flag : discard quality values and use quality lines for iterative error correction]",
+);
+
+if(@ARGV != 1){
+ my $msg = join "\n\t", at msg;
+ die "$msg\n";
+}
+
+my $id_head_character="\@";
+
+if($opt_largerthan){
+ $id_head_character=">";
+}
+else{
+}
+
+my $fh;
+if($opt_shortname){
+ open $fh, ">", $prefix.$dbsuffix or die "cannot open $prefix.$dbsuffix:$!\n";
+}
+
+my $counter=0;
+my $printed_line=0;
+
+my $line = <>;
+++$counter;
+my $result;
+while(!eof){
+ chomp $line;
+ $result = $line =~ s/^$id_head_character/>/;
+ if(!$result){
+ if($id_head_character eq "\@"){
+ $id_head_character = ">";
+ redo;
+ }
+ elsif($id_head_character eq ">"){
+ die "1. strange input $result\n$line\n";
+ }
+ }
+ my($idfqline,$nameline,$baseline,$optionalline,$qvline);
+ if($opt_shortname){
+ #printf $fh ("%d\t%s\n",$counter,substr($line,1));
+ $idfqline = sprintf("%d\t%s",$counter,substr($line,1));
+ if($output_is_fasta){
+ $line = sprintf(">%d",$counter);
+ }
+ else{
+ $line = sprintf("\@%d",$counter);
+ }
+ }
+ $nameline = $line;
+ #print $line,"\n";# name
+ #++$printed_line;
+
+ my $bases="";
+ if(eof){
+ last;
+ }
+ $line =<>;
+ my $line_c = 1;
+ chomp $line;
+ while(1){# read bases
+ $bases .= $line;
+ if(eof){
+ last;
+ }
+ $line = <>;
+ chomp $line;
+ if($line =~ /^\+/){
+ chomp $line;
+ $optionalline = $line;
+ last;
+ }
+ else{
+ ++$line_c;
+ }
+ }
+ $baseline = $bases;
+
+ if(eof){
+ last;
+ }
+ #print "+\n";
+ #++$printed_line;
+ my $qvs="";
+ for(my $i = 0; $i<$line_c; ++$i){# # of lines of bases and qvs must be =
+ if(eof){
+ last;
+ }
+ $line = <>;
+ chomp $line;
+ $qvs .= $line;
+ }
+ $qvline = $qvs;
+ if(length($baseline) != length($qvline)){
+ printf STDERR "strange input: # of bases and qvs differs\n$baseline\n$qvline\nthis record was skipped.\n";
+ }
+ #print $qvs,"\n";
+ #++$printed_line;
+
+ elsif(length($bases)>=$valid_read_length){
+ print $fh $idfqline,"\n";
+ print $nameline,"\n";
+ ++$printed_line;
+ print $baseline,"\n";
+ ++$printed_line;
+ if(!$output_is_fasta){
+ print $optionalline,"\n";
+ ++$printed_line;
+ if($opt_flag_fq){
+ my $len = length($qvline);
+ $qvline = "";
+ for(my $i=0; $i<$len; ++$i){
+ $qvline .= "2";
+ }
+ }
+ print $qvline,"\n";
+ ++$printed_line;
+ }
+ }
+
+ if(!eof){
+ $line = <>;# next unit's name
+ }
+ ++$counter;
+}
+
+if($opt_shortname){
+ close $fh;
+}
+#if($printed_line % 4 != 0){
+# printf STDERR "WARNING: the input fq file may be broken\n";
+#}
diff --git a/fqfilt.pl b/fqfilt.pl
new file mode 100755
index 0000000..33fad78
--- /dev/null
+++ b/fqfilt.pl
@@ -0,0 +1,58 @@
+#!/usr/bin/perl
+use strict;
+use warnings;
+use Getopt::Long;
+
+my $pacbio;
+my $min_len = 1;
+my $max_len = 1000000000000;
+
+GetOptions('p'=>\$pacbio,"max_len=i"=>\$max_len);
+
+if(@ARGV != 2){
+ die "USAGE: <this> <in.fq> min_len\n";
+}
+
+my $id_head_character="\@";
+
+if($pacbio){
+ $id_head_character=">";
+}
+else{
+}
+
+$min_len = $ARGV[1];
+
+my $line = <>;
+while(!eof){
+ chomp $line;
+ my $name = $line;
+ #print $line,"\n";
+
+ my $bases="";
+ $line =<>;
+ while($line !~ /^\+/){
+ chomp $line;
+ $bases .= $line;
+ $line = <>;
+ }
+ #print $bases,"\n";
+
+ my $qvs="";
+ $line =<>;# qvs
+ while($line !~ /^$id_head_character/ || length($qvs) < length($bases)){
+ chomp $line;
+ $qvs.=$line;
+ # do nothing
+ if(eof){
+ last;
+ }
+ $line = <>;
+ }
+ if(length($bases)>=$min_len && length($bases)<=$max_len){
+ print $name,"\n";
+ print $bases,"\n";
+ print "+\n";
+ print $qvs,"\n";
+ }
+}
diff --git a/get_target_fasta_records.pl b/get_target_fasta_records.pl
new file mode 100755
index 0000000..336e0ec
--- /dev/null
+++ b/get_target_fasta_records.pl
@@ -0,0 +1,98 @@
+#!/usr/bin/perl
+use strict;
+use warnings;
+use Getopt::Long;
+
+my $sort_by_length=0;;
+
+GetOptions('s'=>\$sort_by_length);
+
+my $error_message ="USAGE: <this> <all.fa> <part.target_names (generated by\n\t\$ cut -f 3 part.sam > part.target_names)>\n\t[-s: sort by length]";
+
+if(@ARGV != 2){
+ die "$error_message\n";
+}
+
+my $in_fa=$ARGV[0];
+my $in_names=$ARGV[1];
+
+open my $fh, "<", $in_fa or die "cannot open $in_fa: $!\n";
+
+my $name = <$fh>;
+chomp $name;
+
+my $bases = "";
+
+my %reads;
+
+while(1){
+ while(my $buf=<$fh>){
+ chomp $buf;
+ if($buf =~ /^>/){
+ $name =~ s/^>//;
+ # confirm $name was not added
+ if(exists($reads{$name})){
+ printf STDERR ("WARNING: the record %s conflicted.\n",$name);
+ }
+
+# $bases =~ s/^N+//i;
+# $bases =~ s/N+$//i;
+ $reads{$name} = $bases;
+
+ $name = $buf;
+ $bases= "";
+ last;
+ }
+ else{
+ $bases .= $buf;
+ }
+ }
+ if(eof){
+# $bases =~ s/^N+//i;
+# $bases =~ s/N+$//i;
+ $reads{$name} = $bases;
+ last;
+ }
+}
+close $fh;
+
+my @names = keys %reads;
+my %exists_in_targets;
+if($sort_by_length){
+ @names = sort { length($reads{$b}) <=> length($reads{$a}) } keys %reads;
+}
+for(my $i=0; $i<@names; ++$i){
+ $exists_in_targets{$names[$i]} = 0;
+}
+
+open my $targets_fh, "<", $in_names or die "cannot open $in_names: $!\n";
+
+while(my $target_name = <$targets_fh>){
+ chomp $target_name;
+ if(exists($reads{$target_name})){
+ $exists_in_targets{$target_name} = 1;
+# printf(">%s\n",$target_name);
+# printf("%s\n",$reads{$target_name});
+ }
+ else{
+ printf STDERR ("WARNING: $target_name does not exist in $in_fa\n");
+ }
+ if(!$sort_by_length){
+ printf(">%s\n",$target_name);
+ printf("%s\n",$reads{$target_name});
+ }
+}
+
+close $targets_fh;
+
+if(!$sort_by_length){
+ exit;
+}
+else{
+ for(my $i=0; $i<@names; ++$i){
+ if($exists_in_targets{$names[$i]}){
+ printf(">%s\n",$names[$i]);
+ printf("%s\n",$reads{$names[$i]});
+ }
+ }
+}
diff --git a/get_top_20x_fa.pl b/get_top_20x_fa.pl
new file mode 100755
index 0000000..c0b0519
--- /dev/null
+++ b/get_top_20x_fa.pl
@@ -0,0 +1,381 @@
+#!/usr/bin/perl
+use strict;
+use warnings;
+use Getopt::Long;
+
+my $windowsize=0;
+my $coverage=20;
+my $opt_fastq=0;
+my $opt_help=0;
+my $opt_light=0;
+
+my $estimated_genome_size = -1;
+
+my @msgs = (
+"USAGE <this> <in.fa> -g estimated_genome_size(integer>0)",
+"[-w windos_size: integer>0 (0)]",
+"[-c coverage: integer>0 (20)]",
+"[-q: regards input as fq]",
+"[-l: use less memory (but incompatible with a pipe because $0 reads <in.fa> twice)]",
+"[-h: show this message]"
+);
+
+my $error_message = join "\n\t", at msgs;
+#my $error_message ="USAGE <this> <in.fa> -g estimated_genome_size(integer>0)\n\t[-w windos_size(integer>0)(500)]\n\t[-c coverage(integer>0)(20)]\n\t[-q (regards input as fq)]";
+
+GetOptions(
+'g=f'=>\$estimated_genome_size,
+'w=i'=>\$windowsize,
+'c=i'=>\$coverage,
+'q' => \$opt_fastq,
+'l' => \$opt_light,
+'h' => \$opt_help
+);
+
+if(@ARGV != 1){
+ die "$error_message\n";
+}
+
+if($estimated_genome_size <= 0){
+ die "$error_message\n";
+}
+if($coverage <= 0){
+ die "$error_message\n";
+}
+
+my $header = ">";
+if($opt_fastq){
+ $header = "\@";
+}
+
+my %lengths;
+my %reads;
+my %others;
+my %qvs;
+
+if($opt_light){
+ if($ARGV[0] eq '-'){
+ printf STDERR ("-l: incompatible with a pipe because $0 reads <in.fa> twice\n");
+ exit 1;
+ }
+ &light();
+ exit 0;
+}
+
+my $buf = <>;
+
+while(1){
+ chomp $buf;
+ if($buf =~ /^$header/){
+ $buf =~ s/^$header//;
+ my $name = $buf;
+ # confirm $name was not added
+ if(exists($reads{$name})){
+ printf STDERR ("WARNING: the record %s is ambiguous.\n",$name);
+ }
+ $buf=<>;
+ chomp $buf;
+ my $tmp_read = $buf;
+ my $n_lines = 1;
+ while($buf = <>){
+ chomp $buf;
+ if($opt_fastq){
+ if($buf =~ /^\+/){
+ $reads{$name} = $tmp_read;
+ last;
+ }
+ }
+ else{
+ if($buf =~ /^>/){
+ $reads{$name} = $tmp_read;
+ last;
+ }
+ }
+ ++$n_lines;
+ $tmp_read .= $buf;
+ }
+ if(eof and !$opt_fastq){
+ $reads{$name} = $tmp_read;
+ last;
+ }
+
+ if($opt_fastq){
+ $others{$name} = $buf;# '+'
+ my $tmp_qv = "";
+ for(my $i=0; $i<$n_lines; ++$i){
+ $buf = <>;
+ chomp $buf;
+ $tmp_qv .= $buf;
+ }
+ $qvs{$name} = $tmp_qv;
+ if(eof){
+ last;
+ }
+ else{
+ $buf = <>;
+ }
+ }
+ }
+ else{
+ printf STDERR ("strange input. $buf\n");
+ exit;
+ }
+}
+
+my @names = sort { length($reads{$b}) <=> length($reads{$a}) } keys %reads;
+
+if($windowsize){
+ my $maxlength=500;
+ my $limit=1000000;
+
+ for(my $i=1; $i*$windowsize <= $limit; ++$i){
+ my $totalbases=0;
+ my $threshold = $i*$windowsize;
+ for(my $j=0; $j<@names; ++$j){
+ if(length($reads{$names[$j]}) >= $threshold){
+ $totalbases += length($reads{$names[$j]});
+ }
+ else{
+ last;
+ }
+ }
+ if($totalbases >= $coverage*$estimated_genome_size){
+ $maxlength = $threshold;
+ }
+ else{
+ last;
+ }
+ }
+
+ if($maxlength+$windowsize > $limit){
+ printf STDERR ("WARNING: maybe strange input.\n");
+ }
+
+ for(my $j=0; $j<@names; ++$j){
+ if(length($reads{$names[$j]}) >= $maxlength){
+ if(!$opt_fastq){
+ printf(">%s\n",$names[$j]);
+ printf("%s\n",$reads{$names[$j]});
+ }
+ else{
+ printf("\@%s\n",$names[$j]);
+ printf("%s\n",$reads{$names[$j]});
+ printf("%s\n",$others{$names[$j]});
+ printf("%s\n",$qvs{$names[$j]});
+ }
+ }
+ else{
+ last;
+ }
+ }
+
+ printf STDERR ("%d\n",$maxlength);
+}
+else{
+ my $total_length = 0;
+ my $threshold_length = $estimated_genome_size * $coverage;
+ my $prev_length = 1000000000000000;
+ my $shortest_read_length=1;
+ my $f_printed=0;
+ for(my $j=0; $j<@names; ++$j){
+ my $current_rl = length($reads{$names[$j]});
+ if($current_rl > $prev_length){
+ printf STDERR ("not sorted by length: %d %d\n", $prev_length, $current_rl);
+ exit 1;
+ }
+ $prev_length = $current_rl;
+ if(length($reads{$names[$j]}) >= $shortest_read_length){
+ if(!$opt_fastq){
+ printf(">%s\n",$names[$j]);
+ printf("%s\n",$reads{$names[$j]});
+ }
+ else{
+ printf("\@%s\n",$names[$j]);
+ printf("%s\n",$reads{$names[$j]});
+ printf("%s\n",$others{$names[$j]});
+ printf("%s\n",$qvs{$names[$j]});
+ }
+ $total_length += $current_rl;
+ if($total_length >= $threshold_length){
+ $shortest_read_length=$current_rl;
+ if(!$f_printed){
+ printf STDERR ("%d\n",$current_rl);
+ $f_printed = 1;
+ }
+ #exit 0;
+ }
+ }
+ else{
+ exit 0;
+ }
+ }
+}
+
+sub light(){
+ open my $fh, "<$ARGV[0]" or die "cannot open $ARGV[0]: $!\n";
+
+ my @lengths = ();
+ {
+ my $buf = <$fh>;
+ while(1){
+ chomp $buf;
+ if($buf =~ /^$header/){
+ $buf =~ s/^$header//;
+ my $name = $buf;
+ # confirm $name was not added
+ if(exists($reads{$name})){
+ printf STDERR ("WARNING: the record %s is ambiguous.\n",$name);
+ }
+ $buf=<$fh>;
+ chomp $buf;
+ my $tmp_read = $buf;
+ my $n_lines = 1;
+ while($buf = <$fh>){
+ chomp $buf;
+ if($opt_fastq){
+ if($buf =~ /^\+/){
+ #$reads{$name} = $tmp_read;
+ push @lengths, length($tmp_read);
+ last;
+ }
+ }
+ else{
+ if($buf =~ /^>/){
+ #$reads{$name} = $tmp_read;
+ push @lengths, length($tmp_read);
+ last;
+ }
+ }
+ ++$n_lines;
+ $tmp_read .= $buf;
+ }
+ if(eof($fh) and !$opt_fastq){
+ #$reads{$name} = $tmp_read;
+ push @lengths, length($tmp_read);
+ last;
+ }
+
+ if($opt_fastq){
+ #$others{$name} = $buf;# '+'
+ my $tmp_qv = "";
+ for(my $i=0; $i<$n_lines; ++$i){
+ $buf = <$fh>;
+ chomp $buf;
+ $tmp_qv .= $buf;
+ }
+ #$qvs{$name} = $tmp_qv;
+ if(eof($fh)){
+ last;
+ }
+ else{
+ $buf = <$fh>;
+ }
+ }
+ }
+ else{
+ printf STDERR ("strange input (light 1). $buf\n");
+ exit 1;
+ }
+ }
+ }
+ @lengths = sort {$b <=> $a} @lengths;
+ my $shortest_read_length = 1;
+ my $total_length = 0;
+ my $threshold_length = $estimated_genome_size * $coverage;
+ my $prev_length = 1000000000000000;
+ for(my $i=0; $i<@lengths; ++$i){
+ my $current_rl = $lengths[$i];
+ if($current_rl > $prev_length){
+ printf STDERR ("not sorted by length: %d %d\n", $prev_length, $current_rl);
+ exit 1;
+ }
+ $prev_length = $current_rl;
+ $total_length += $current_rl;
+ if($total_length >= $threshold_length){
+ $shortest_read_length = $current_rl;
+ last;
+ }
+ }
+ if(!seek $fh,0,0){
+ die "failed to seek in $0\n";
+ }
+ my $buf = <$fh>;
+ while(1){
+ my $name;
+ my $bases;
+ my $opts;
+ my $qvs;
+
+ chomp $buf;
+ if($buf =~ /^$header/){
+ $buf =~ s/^$header//;
+ $name = $buf;
+ $buf=<$fh>;
+ chomp $buf;
+ my $tmp_read = $buf;
+ my $n_lines = 1;
+ while($buf = <$fh>){
+ chomp $buf;
+ if($opt_fastq){
+ if($buf =~ /^\+/){
+ $bases = $tmp_read;
+ last;
+ }
+ }
+ else{
+ if($buf =~ /^>/){
+ $bases = $tmp_read;
+ last;
+ }
+ }
+ ++$n_lines;
+ $tmp_read .= $buf;
+ }
+ if(eof($fh) and !$opt_fastq){
+ $bases = $tmp_read;
+# &flush();
+# last;
+ }
+
+ if($opt_fastq){
+ $opts = $buf; # '+'
+ my $tmp_qv = "";
+ for(my $i=0; $i<$n_lines; ++$i){
+ $buf = <$fh>;
+ chomp $buf;
+ $tmp_qv .= $buf;
+ }
+ $qvs = $tmp_qv;
+ if(eof($fh)){
+# &flush();
+# last;
+ }
+ else{
+ $buf = <$fh>;
+ }
+ }
+ }
+ else{
+ printf STDERR ("strange input (light 2). $buf\n");
+ exit;
+ }
+ if(length($bases) >= $shortest_read_length){
+ if($opt_fastq){
+ printf("\@%s\n",$name);
+ printf("%s\n",$bases);
+ printf("%s\n",$opts);
+ printf("%s\n",$qvs);
+ }
+ else{
+ printf(">%s\n",$name);
+ printf("%s\n",$bases);
+ }
+ }
+ if(eof($fh)){
+ last;
+ }
+ }
+ close $fh;
+ printf STDERR ("%d\n",$shortest_read_length);
+}
+
diff --git a/m52bfmt7.c b/m52bfmt7.c
new file mode 100644
index 0000000..663f2ab
--- /dev/null
+++ b/m52bfmt7.c
@@ -0,0 +1,151 @@
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <unistd.h>
+
+int LBUF = 1024*1024*1024;
+int LNAME = 1024;
+
+typedef struct blast{
+ int sstart,send,qstart,qend;
+ double bitscore,evalue,pident;
+}blast_t;
+
+
+void print_bfmt7(blast_t * b){
+ return;
+}
+
+void mem(char * var_name){
+ fprintf(stderr,"cannot allocate memory: %s\n",var_name);
+ exit(1);
+}
+
+int opt_swap_query_and_subject=0;
+
+int main(int argc, char ** argv)
+{
+
+ int hitnum=0;
+ {
+ int result;
+ while((result=getopt(argc,argv,"s")) != -1){
+ switch(result){
+ case 's':
+ opt_swap_query_and_subject=1;
+ ++hitnum;
+ break;
+// case 'c':
+// opt_chimeric_filter=atoi(optarg);
+// if(opt_chimeric_filter < 0){
+// fprintf(stderr,"c: %d is given.",opt_chimeric_filter);
+// fprintf(stderr,"\tmust be >= 0");
+// abort();
+// }
+// hitnum+=2;
+// break;
+ case '?':
+ printf("humei\n");
+ break;
+ default:
+ break;
+ }
+ }
+ }
+
+ if(argc != 2+hitnum){
+ fprintf(stderr, "USAGE: <this> <in.m5 | - >\n");
+ fprintf(stderr, "\t-s: swap query and subject names\n");
+ return 1;
+ }
+ char * in_m5 = argv[1+hitnum];
+
+ FILE * fp;
+ if(in_m5[0] == '-'){
+ fp = stdin;
+ }
+ else{
+ fp = fopen(in_m5,"r");
+ }
+ if(fp == NULL){
+ fprintf(stderr,"cannot open the file %s\n", in_m5);
+ abort();
+ }
+
+ char * buf = (char*)malloc(sizeof(char)*LBUF);
+ if(buf == NULL){
+ mem("buf");
+ fprintf(stderr, "cannot allocate memory: buf\n");
+ exit(1);
+ }
+ char * qseq = (char*)malloc(sizeof(char)*LBUF);
+ if(qseq == NULL){
+ mem("qseq");
+ fprintf(stderr, "cannot allocate memory: qseq\n");
+ exit(1);
+ }
+ char * sseq = (char*)malloc(sizeof(char)*LBUF);
+ if(sseq == NULL){
+ mem("sseq");
+ fprintf(stderr, "cannot allocate memory: sseq\n");
+ exit(1);
+ }
+ char * ali = (char*)malloc(sizeof(char)*LBUF);
+ if(ali == NULL){
+ mem("ali");
+ fprintf(stderr, "cannot allocate memory: ali\n");
+ exit(1);
+ }
+ char * qname = (char*)malloc(sizeof(char)*LNAME);
+ if(qname==NULL){
+ mem("qname");
+ fprintf(stderr,"cannot allocate memory: qname\n");
+ exit(1);
+ }
+ char * sname = (char*)malloc(sizeof(char)*LNAME);
+ if(sname==NULL){
+ mem("sname");
+ fprintf(stderr,"cannot allocate memory: sname\n");
+ exit(1);
+ }
+ int qlen,qstt,qend,slen,sstt,send,score,nMatch,nMismatch,nIns,nDel,mapqv;
+ char qstrand,sstrand;
+
+ int scanfret;
+
+ while(fgets(buf,LBUF,fp) != NULL){
+ // m5:
+ // qname qlen qstt(0-origin) qend(1-origin) qstrand sname slen sstt(0-origin) send(1-origin) sstrand score nMatch nMismatch nIns nDel mapqv qseq ali sseq
+ // [stt,end)
+ scanfret = sscanf(buf,"%s %d %d %d %s %s %d %d %d %s %d %d %d %d %d %d %s %s %s", qname,&qlen,&qstt,&qend,&qstrand,sname,&slen,&sstt,&send,&sstrand,&score,&nMatch,&nMismatch,&nIns,&nDel,&mapqv,qseq,ali,sseq);
+ if(scanfret != 19){
+ fprintf(stderr, "sth strange: scanfret %d\n",scanfret);
+ exit(1);
+ }
+ // -outfmt '7 qseqid sstart send sacc qstart qend bitscore evalue pident qseq sseq'
+ double dummy_evalue=0.0;
+ int qnamelen = strlen(qname);
+ {
+ int i;
+ for(i=qnamelen-1; i>=0; --i){
+ if(qname[i] == '/'){
+ qname[i] = '\0';
+ break;
+ }
+ }
+ }
+ if(sstrand == '-'){// qstrand is always '+'
+ int tmp = send;
+ send = sstt+1;// 1-originize
+ sstt = tmp-1;// 0-originize
+ }
+ printf("%s %d %d %s %d %d %d %f %f %s %s\n",qname,sstt+1,send,sname,qstt+1,qend,score,dummy_evalue,(double)(nMatch)/(double)(nMatch+nMismatch+nIns+nDel),qseq,sseq);
+ }
+ free(buf);
+ free(qseq);
+ free(sseq);
+ free(ali);
+ free(qname);
+ free(sname);
+ return 0;
+}
diff --git a/makefile b/makefile
new file mode 100644
index 0000000..f6a175b
--- /dev/null
+++ b/makefile
@@ -0,0 +1,56 @@
+APPNAME = 'sprai'
+VERSION = '0.9.9.12'
+
+PREFIX=$(PWD)
+COMPILED= \
+bfmt72s \
+nss2v_v3 \
+myrealigner \
+m52bfmt7 \
+
+
+SCRIPTS= \
+ca_ikki_v5.pl \
+ezez4qsub_vx1.pl \
+ezez_vx1.pl \
+dumbbell_filter.pl \
+fa2fq.pl \
+fq2fa.pl \
+fq2idfq.pl \
+fqfilt.pl \
+get_top_20x_fa.pl \
+partition_fa.pl \
+get_target_fasta_records.pl \
+dfq2fq_v2.pl \
+extract_fq.pl \
+bfmtx2m4.pl \
+check_redundancy.pl \
+check_circularity.pl \
+#mira_ikki.pl \
+#sprai_dagcon_v3.py \
+#sprai_dagcon.cfg \
+#re2cons.pl \
+#ezez4qsub_v9.pl \
+#ezez_v8.pl \
+#ezez4makefile_v4.pl \
+
+
+all: $(COMPILED)
+
+bfmt72s: bfmt72s.c
+ $(CC) -Wall -O3 -g -o $@ $<
+
+nss2v_v3: nss2v_v3.c
+ $(CC) -Wall -O3 -g -o $@ $<
+
+myrealigner: myrealigner.c
+ $(CC) -Wall -O3 -g -o $@ $^
+
+m52bfmt7: m52bfmt7.c
+ $(CC) -Wall -O3 -g -o $@ $<
+
+
+install: $(COMPILED) $(SCRIPTS)
+ chmod 766 $^
+ cp -p $^ $(PREFIX)/bin/
+
diff --git a/myrealigner.c b/myrealigner.c
new file mode 100644
index 0000000..88622a9
--- /dev/null
+++ b/myrealigner.c
@@ -0,0 +1,1332 @@
+// J Comput Biol. 1997 Fall;4(3):369-83.
+// ReAligner: a program for refining DNA sequence multi-alignments.
+// Anson EL1, Myers EW.
+//
+// we modified Eric & Myers ReAligner, allowed by Myers on 10/22/2012
+// written by Takamasa Imai
+// E-mail: imai at cb.k.u-tokyo.ac.jp
+
+double MIX = 0.5;
+int BandSize = 8;
+int minimum_ballots = 3;
+int maximum_ballots = 11;
+
+
+#include <stdio.h>
+#include <string.h>
+#include <stdlib.h>
+#include <unistd.h>
+#include "col2fqcell.h"
+
+#define FoS 16
+
+int MaxDepth = FoS*32;
+//int MaxDepth = 512;
+//int MaxDepth = 64;// for large
+//int MaxFragSize = 32*4*1024*1024;// for large
+int MaxFragSize = 131072;
+int MaxFragNum = 1024*1024;// max # of reads
+int NumIts = 0;
+int NumCons = 0;
+
+int DoneFlag = 0;
+
+int Rows;
+#define BUFSIZE 4096
+char * buffer;
+char * sbuf;
+char * qbuf;
+char * chrname;
+int chr_is=0;
+int comment_is=0;
+
+int opt_fastq=0;
+int opt_qvs=0;
+int opt_consensus=0;
+int opt_vertical=0;
+
+
+base_t * buf4print;
+char * buf4printSeq;
+char * buf4printDepth;
+char * buf4printQual;
+char * base_exists;
+char * buf4printComment;
+
+double distinguishable=0.70;
+
+typedef struct fragEl{
+ base_t el;// typedef struct base_t{char base; char qv;}base_t;
+// u
+// p.n
+// d
+ struct fragEl *prev;
+ struct fragEl *next;
+ struct fragEl *up;
+ struct fragEl *down;
+}fragEl;
+
+typedef struct colStruc{
+ int colInf[6];// Information, not Infimum // colInf[.] <- the number of '.'
+ int colDepth;
+ double preCalc[6];
+// p.n
+// f
+ struct colStruc *prev;
+ struct colStruc *next;
+ fragEl frags;
+}colStruc;
+
+typedef struct{
+ fragEl *ofrag;
+ fragEl *lastEl;
+ base_t *efrag;//encoded
+ base_t *afrag;//ascii
+ int len;
+ colStruc *scol;
+ colStruc *ecol;
+ int row;
+}frag;
+
+frag *Frags;// reads
+colStruc *FirstCol;
+colStruc *LastCol;
+
+fragEl *FreeElList=NULL;
+unsigned long long el_index=0ull;
+unsigned long long elsize;
+
+fragEl *getEl(){
+ if(el_index >= elsize){
+ fprintf(stderr,"never come here (getEl() in myrealigner)\n");
+ exit(1);
+ fprintf(stderr, "no el is left\n");
+ fprintf(stderr,"reallocate: el\n");
+ // realloc
+ unsigned long long tmpsize = elsize * 2ull;
+ if(tmpsize > elsize){
+ elsize = tmpsize;
+ }
+ else{
+ fprintf(stderr, "elsize overflow\n");
+ abort();
+ }
+ fragEl * tmp = (fragEl*)malloc(sizeof(fragEl)*elsize);
+ if(tmp == NULL){
+ fprintf(stderr, "cannot REallocate memory: fragEl\n");
+ abort();
+ }
+ else{
+ FreeElList = tmp;
+ }
+ }
+ if(&FreeElList[el_index] == NULL){
+ fprintf(stderr, "FreeElList is NULL\n");
+ abort();
+ }
+ FreeElList[el_index].el.base = -1;
+ FreeElList[el_index].el.qv = ' ';
+ FreeElList[el_index].prev = NULL;
+ FreeElList[el_index].next = NULL;
+ FreeElList[el_index].up = NULL;
+ FreeElList[el_index].down = NULL;
+ return &FreeElList[el_index++];
+}
+
+int freeEl(fragEl * ptr){
+ ptr->next = FreeElList;
+ FreeElList = ptr;
+ return 0;
+}
+
+colStruc *FreeColList=NULL;
+int col_index=0;
+unsigned long long colsize;
+
+colStruc *getCol(){
+ if(col_index >= colsize){
+ fprintf(stderr,"never come here (getCol() in myrealigner)\n");
+ exit(1);
+ fprintf(stderr,"no col is left\n");
+ // realloc
+ fprintf(stderr,"reallocate: col\n");
+ unsigned long long tmpsize = colsize * 2ull;
+ if(tmpsize > colsize){
+ colsize = tmpsize;
+ }
+ else{
+ fprintf(stderr, "colsize overflow\n");
+ abort();
+ }
+ colStruc * tmp = (colStruc*)malloc(sizeof(colStruc)*colsize);
+ if(tmp == NULL){
+ fprintf(stderr, "cannot REallocate memory: colStruc\n");
+ abort();
+ }
+ else{
+ FreeColList = tmp;
+ }
+ }
+ int i;
+ if(&FreeColList[col_index] == NULL){
+ fprintf(stderr, "FreeColList is NULL\n");
+ abort();
+ }
+ for (i=0; i < 6; ++i)
+ FreeColList[col_index].colInf[i] = 0;
+ FreeColList[col_index].colDepth = 0;
+ FreeColList[col_index].preCalc[5] = 0.0;
+ FreeColList[col_index].next = NULL;
+ FreeColList[col_index].prev = NULL;
+ FreeColList[col_index].frags.up = &(FreeColList[col_index].frags);
+ FreeColList[col_index].frags.down = &(FreeColList[col_index].frags);
+ FreeColList[col_index].frags.prev = (fragEl*) &(FreeColList[col_index]);
+ return &(FreeColList[col_index++]);
+}
+
+int freeCol(colStruc * ptr){
+ ptr->next = FreeColList;
+ FreeColList = ptr;
+ return 0;
+}
+
+int encode[128];
+int * row;
+int maxNumFrags=-1;
+FILE * input;
+
+int readFrags(){
+ char *s;
+ int i, r, numFrags;
+ colStruc *curCol;
+ fragEl *elPtr, *telPtr;
+
+ for (i=0; i < MaxDepth; ++i)
+ row[i] = -1;
+ //buffer[BUFSIZE-1] = '\0';
+ numFrags = 0;
+ elPtr = telPtr = getEl();
+ elPtr->el.base = -1;
+ elPtr->el.qv = ' ';
+ for(i=1; i < BandSize; ++i){
+ // ...epeqer- -> ...epeqer--------
+ // bandsize
+ // t t
+ // e e
+ // l l
+ // p p
+ // t t
+ // r r
+ elPtr->next = getEl();
+ elPtr = elPtr->next;
+ elPtr->el.base = -1;
+ elPtr->el.qv = ' ';
+ }
+
+ FirstCol = curCol = getCol();
+
+ Rows = -1;
+ while((s=fgets(sbuf, 2*MaxFragSize-1, input)) != NULL){
+ i = strlen(sbuf);
+ if(i == 0){
+ fprintf(stderr, "strange input format\n");
+ //return;
+ }
+ else if(sbuf[i-1] == '\n'){
+ sbuf[--i] = '\0';
+ }
+ else{
+ fprintf(stderr,"Each input line must not be more than %d chars\n",BUFSIZE-1);
+ abort();
+ }
+ if(sbuf[0] == '%'){
+// if(opt_fastq != 1){
+// printf("%s\n",sbuf);
+// }
+// else{
+ if(chr_is==0){
+ strcpy(chrname, &sbuf[1]);
+ chr_is=1;
+ }
+ else{
+ fprintf(stderr, "soutei gai 100\n");
+ exit(1);
+ }
+ //printf("@%s\n",&sbuf[1]);
+// }
+ continue;
+ }
+ else if(sbuf[0] == '#'){
+ if(chr_is && comment_is == 0){
+ strcpy(buf4printComment,&sbuf[1]);
+ comment_is=1;
+ }
+ else{
+ fprintf(stderr, "soutei gai 101\n");
+ exit(1);
+ }
+ continue;
+ }
+ if(opt_qvs)
+ {
+ if(sbuf[0] == '\0'){
+ buffer[0] = qbuf[0] = '\0';
+ }
+ else{
+ int i;
+ for(i=0; sbuf[i] != '\t'; ++i){
+ buffer[i] = sbuf[i];
+ }
+ buffer[i] = '\0';
+ int j;
+ for(j=0,++i; sbuf[i] != '\0'; ++j,++i){
+ qbuf[j] = sbuf[i];
+ }
+ qbuf[j] = '\0';
+ if(strlen(buffer) != strlen(qbuf)){
+ fprintf(stderr, "the input format is broken\n");
+ fprintf(stderr, "#%s#\n",sbuf);
+ fprintf(stderr, "#%s#\n",buffer);
+ fprintf(stderr, "#%s#\n",qbuf);
+ abort();
+ }
+ if((int)strlen(buffer)>MaxDepth){
+ fprintf(stderr,"too thick: depth %d\n",MaxDepth);
+ fprintf(stderr,"%s\n",sbuf);
+ fprintf(stderr,"%s\n",buffer);
+ fprintf(stderr,"%s\n",qbuf);
+ abort();
+ }
+ }
+ }
+ else{
+ strcpy(buffer,sbuf);
+ //strcpy(qbuf,sbuf);
+ int i;
+ for(i=0; buffer[i] != '\0'; ++i){
+ qbuf[i] = '5';// dummy value
+ }
+ if((int)strlen(buffer)>MaxDepth){
+ fprintf(stderr,"too thick: depth %d\n",MaxDepth);
+ fprintf(stderr,"%s\n",sbuf);
+ fprintf(stderr,"%s\n",buffer);
+ fprintf(stderr,"%s\n",qbuf);
+ abort();
+ }
+ //fprintf(stdout, "#%s#\n",buffer);
+ //fprintf(stdout, "#%s#\n",qbuf);
+ }
+
+ r = 0;
+ curCol->next = getCol();
+ curCol->next->prev = curCol;
+ curCol = curCol->next;
+ int j;
+ for (j=0; buffer[j] != '\0'; ++j){
+ if(buffer[j] != ' '){
+ if(r > Rows)
+ Rows = r;
+ if(r >= MaxDepth){
+ fprintf(stderr, "too deep. Change the MaxDepth value in your realigner.c\n");
+ abort();
+ }
+ if(row[r] == -1){
+ row[r] = numFrags;// id in the unit
+ Frags[numFrags].scol = curCol;// read's starting column?
+ Frags[numFrags].row = r;
+ if(numFrags >= maxNumFrags){
+ Frags[numFrags].efrag = (base_t *) malloc(MaxFragSize*sizeof(base_t));
+ if(Frags[numFrags].efrag == NULL){
+ fprintf(stderr, "cannot allocate memory: Frags[numFrags].efrag\n");
+ abort();
+ }
+ Frags[numFrags].afrag = (base_t *) malloc(MaxFragSize*sizeof(base_t));
+ if(Frags[numFrags].afrag == NULL){
+ fprintf(stderr, "cannot allocate memory: Frags[numFrags].afrag\n");
+ abort();
+ }
+ }
+ Frags[numFrags].len = 0;
+ elPtr = getEl();
+ for(i=0; i < BandSize; ++i){
+ // e1e2e3... -> --------e1e2e3...
+ // bandsize b a n d s i z e
+ // t p
+ // p t
+ // t r
+ // r
+ // in the loop 'for (j=0, tptr=ptr=pf->ofrag; j < BandSize; ++j)...' in reAlign
+ elPtr->el.base = -1;
+ elPtr->el.qv = ' ';
+ elPtr->next = getEl();
+ elPtr->next->prev = elPtr;
+ elPtr = elPtr->next;
+ }
+ Frags[numFrags].ofrag = Frags[numFrags].lastEl = elPtr;
+ ++numFrags;
+ if (numFrags == MaxFragNum){
+ fprintf(stderr, "too many frags. Change the MaxFragNum value in your realigner.c\n");
+ abort();
+ }
+ }
+ else
+ elPtr = Frags[row[r]].lastEl;
+ if((i = encode[(int)buffer[j]]) < 0){
+ fprintf(stderr,"Illegal char in input line %d\n",r);
+ exit(0);
+ }
+ ++curCol->colInf[i];
+ ++curCol->colDepth;
+ elPtr->el.base = i;
+ elPtr->el.qv = qbuf[j];
+ elPtr->up = &(curCol->frags);
+ elPtr->down = curCol->frags.down;
+ curCol->frags.down = elPtr;
+ elPtr->down->up = elPtr;
+ elPtr->next = Frags[row[r]].lastEl = getEl();
+ Frags[row[r]].lastEl->prev = elPtr;
+ if(i != 0){// not '-'
+ Frags[row[r]].afrag[Frags[row[r]].len].base = buffer[j];
+ Frags[row[r]].afrag[Frags[row[r]].len].qv = qbuf[j];
+ Frags[row[r]].efrag[Frags[row[r]].len].base = i;
+ Frags[row[r]].efrag[Frags[row[r]].len].qv = qbuf[j];
+ Frags[row[r]].len++;
+ if(Frags[row[r]].len >= MaxFragSize){
+ fprintf(stderr, "too long frag. Change the MaxFragSize value in your realigner.c\n");
+ abort();
+ }
+ }
+ }
+ else if(row[r] != -1){
+ Frags[row[r]].lastEl->el.base = -1;
+ Frags[row[r]].lastEl->el.qv = ' ';
+ Frags[row[r]].lastEl->next = telPtr;// terminal pointer (empty)
+ Frags[row[r]].lastEl = Frags[row[r]].lastEl->prev;
+ Frags[row[r]].ecol = curCol->prev;
+ row[r] = -1;
+ }
+ ++r;
+ }
+ while (r <= Rows){
+ // padding
+ if(row[r] != -1){
+ Frags[row[r]].lastEl->el.base = -1;
+ Frags[row[r]].lastEl->el.qv = ' ';
+ Frags[row[r]].lastEl->next = telPtr;
+ Frags[row[r]].lastEl = Frags[row[r]].lastEl->prev;
+ Frags[row[r]].ecol = curCol->prev;
+ row[r] = -1;
+ }
+ ++r;
+ }
+ if(curCol->colDepth == 0){
+ //fprintf(stderr, "1 unit loaded\n");
+ break;
+ }
+ }
+ if(s == NULL)
+ DoneFlag = 1;
+ curCol->next = LastCol = getCol();
+ LastCol->prev = curCol;
+ ++Rows;
+ if(Rows>=MaxDepth*2){
+ fprintf(stderr, "souteigai 2 (2) chr:%s\n",chrname);
+ //abort();
+ exit(1);
+ }
+ return numFrags;// num of the reads in this unit
+}
+
+fragEl **curEl;
+base_t **curChar;
+
+void print_dfq(char *chr, char *seq, char *depth, char *qual, char *base_exists, char *comment){
+ int i;
+ int limit = strlen(seq);
+ //int limit = strlen(base_exists);// 0='\0'
+ int from=0;
+ int to=limit-1;
+ for(i=0; i<limit; ++i){
+ if(base_exists[i]==1){
+ from = i;
+ break;
+ }
+ }
+ for(i=limit-1; i>=0; --i){
+ if(base_exists[i]==1){
+ to = i;
+ break;
+ }
+ }
+ /*
+ if(to>=from){
+ char tmp;
+ printf("@%s\n",chr);
+
+ tmp = seq[to+1];
+ seq[to+1] = '\0';
+ printf("%s\n",&seq[from]);
+ seq[to+1] = tmp;
+
+ tmp = depth[to+1];
+ depth[to+1] = '\0';
+ printf("+\t%s\n",&depth[from]);
+ depth[to+1] = tmp;
+
+ tmp = qual[to+1];
+ qual[to+1] = '\0';
+ printf("%s\n",&qual[from]);
+ qual[to+1] = tmp;
+ }
+ else{
+ fprintf(stderr, "buggy %s %d %d %d\n",chr, from, to, limit);
+ abort();
+ }
+ */
+ printf("@%s\n",chr);
+ printf("%s\n",seq);
+ printf("+\t%s\t%s\n",depth,comment);
+ //printf("+\t%s\n",depth);
+ printf("%s\n",qual);
+ for(i=from+1; i<to; ++i){
+ if(base_exists[i]!=1){
+ fprintf(stderr, "WARNING: base does not exist: %d %s\n", i+1, chr);
+ }
+ }
+ return;
+}
+
+void printAlign(int numFrags){
+ int row, i;
+ colStruc *col, *ecol;
+ fragEl *ptr;
+
+ col = FirstCol;
+ while(col != LastCol){
+ // chop '-' only columns
+ if(col->colDepth == col->colInf[0]){
+ if(col != FirstCol)
+ col->prev->next = col->next;
+ else
+ FirstCol = col->next;
+ col->next->prev = col->prev;
+ ptr=col->frags.down;
+ for(i=0; i < col->colDepth; ++i){
+ ptr->prev->next = ptr->next;
+ ptr->next->prev = ptr->prev;
+ ptr = ptr->down;
+ }
+ ecol = col;
+ col = ecol->next;
+ }
+ else{
+ col->frags.el.base = 0;// initialize
+ col = col->next;
+ }
+ }
+ for (i=0; i < numFrags; ++i)
+ Frags[i].scol->frags.el.base = 1;// TODO
+ for(i=0; i < Rows; ++i){
+ curEl[i] = NULL;
+ curChar[i] = NULL;
+ }
+ col = FirstCol;
+ int be_idx=0;
+ int b4p_idx=0;
+ if(opt_fastq != 1){
+ printf("%%%s\n",chrname);
+ printf("#%s\n",buf4printComment);
+ }
+ while(col != LastCol){
+ if(col->frags.el.base){
+ for(i=0; i < numFrags; ++i){
+ if(col == Frags[i].scol){
+ if(curEl[Frags[i].row] == NULL){
+ curEl[Frags[i].row] = Frags[i].ofrag;
+ curChar[Frags[i].row] = Frags[i].afrag;
+ }
+ else{
+ curEl[Rows] = Frags[i].ofrag;
+ curChar[Rows] = Frags[i].afrag;
+ ++Rows;
+ if(Rows>=MaxDepth*2){
+ fprintf(stderr, "souteigai 2 (1) chr %s\n",chrname);
+ exit(1);
+ //abort();
+ }
+ }
+ }
+ }
+ }
+ int pi=0;
+ for(row=0; row < Rows; ++row){
+ if(curEl[row] == NULL){
+ buf4print[pi].base = ' ';
+ buf4print[pi].qv = ' ';
+ ++pi;
+ }
+ else if(curEl[row]->el.base == -1){
+ buf4print[pi].base = ' ';
+ buf4print[pi].qv = ' ';
+ ++pi;
+ curEl[row] = NULL;
+ curChar[row] = NULL;
+ }
+ else{
+ if(curEl[row]->el.base == 0){
+ buf4print[pi].base = '-';
+ buf4print[pi].qv = curEl[row]->el.qv;
+ ++pi;
+ }
+ else{
+ buf4print[pi].base = (*curChar[row]).base;
+ if(buf4print[pi].base == '\0'){
+ fprintf(stderr, "base 0\n");
+ abort();
+ }
+ buf4print[pi].qv = (*curChar[row]).qv;
+ ++pi;
+ ++curChar[row];
+ }
+ curEl[row] = curEl[row]->next;
+ }
+ }
+ buf4print[pi].base='\0';
+ buf4print[pi].qv='\0';
+ if(pi == 0){
+ fprintf(stderr, "pi 0\n");
+ abort();
+ }
+
+ if(opt_fastq != 1){
+ //printf("%s\n",buf4print);
+ if(opt_consensus==1){
+ char consensus,tmp2,tmp3;
+ col2fqcell(buf4print,&consensus,&tmp2,&tmp3,maximum_ballots,minimum_ballots,distinguishable);
+ putchar(consensus);
+ putchar('\t');
+ }
+ int tmp;
+ for(tmp = 0; tmp<pi; ++tmp){
+ putchar(buf4print[tmp].base);
+ }
+ if(!opt_vertical){
+ putchar('\t');
+ for(tmp = 0; tmp<pi; ++tmp){
+ putchar(buf4print[tmp].qv);
+ }
+ }
+ putchar('\n');
+ }
+ else{
+ col2fqcell(buf4print, buf4printSeq+b4p_idx, buf4printDepth+b4p_idx, buf4printQual+b4p_idx, maximum_ballots, minimum_ballots,distinguishable);
+ if(buf4print[0].base != ' '){
+ base_exists[be_idx]=1;
+ }
+ else{
+ base_exists[be_idx]=0;
+ }
+ if(buf4printSeq[b4p_idx] != ' '){
+ ++b4p_idx;
+ }
+ else{
+ // The base of the base read was '-', and col2fqcell did not elect.
+ // We ignore this '-'.
+ }
+ ++be_idx;
+ }
+ col = col->next;
+ }
+ buf4printSeq[b4p_idx] = '\0';
+ buf4printDepth[b4p_idx] = '\0';
+ buf4printQual[b4p_idx] = '\0';
+ base_exists[be_idx] = '\0';
+
+ if(opt_fastq != 1){
+ printf("\n");
+ }
+ else{
+ print_dfq(chrname,buf4printSeq,buf4printDepth,buf4printQual,base_exists,buf4printComment);
+ }
+ chr_is=0;
+ comment_is=0;
+ return;
+}
+
+#define DEL 1
+#define SUB 0
+#define INS 2
+
+double *mat;
+char *bmat;
+int *bmatPtr;
+unsigned long long BmatSize;
+int *shift;
+
+/* reAlign realigns the fnum fragment against the alignment of the rest of the fragments */
+// fnum: fragment number = id of the fragment (given in readFrags)
+int reAlign (int fnum){
+ int i, j, m, n;
+ char *cptr;
+ base_t * fel;
+ double min, dval, sval, ival;
+ double *lval, *tval;
+ frag *pf;// pointer of fragment? presenting fragment?
+ int mlen, max, mark;
+ colStruc *col, *minCol, *tcol, *mstart, *mstop;
+ fragEl *ptr, *tptr;
+ fragEl *fptr;
+
+ pf = &Frags[fnum];
+ mark = 0;
+
+ /* Strip fragment from structure */
+ col = pf->scol;// starting column
+ for(ptr=pf->ofrag; ptr->el.base != -1; ptr=ptr->next){// starting element to last element
+ ptr->up->down = ptr->down;
+ ptr->down->up = ptr->up;
+ --(col->colDepth);
+ --(col->colInf[(int)ptr->el.base]);
+ col = col->next;
+ }
+
+ mstart = pf->scol;
+ mlen = 1+2*BandSize;
+ // set start column
+ for(n=0; n < BandSize; ++n){// go back to the n previous colum
+ if(mstart == FirstCol){// if you reach FirstColumn, then add a new FirstCol
+ FirstCol = mstart->prev = getCol();
+ FirstCol->next = mstart;
+ }
+ mstart = mstart->prev;
+ }
+ // done
+
+ // set terminals of a starting element
+ // ptr: right, tpter: left
+ for(j=0, tptr=ptr=pf->ofrag; j < BandSize; ++j){
+ // move to next base, not '-'
+ ptr = ptr->next;
+ while(ptr->el.base == 0){// '-'
+ ++mlen;
+ ptr = ptr->next;
+ }
+ tptr = tptr->prev;// there is no insertion, because pf->ofrag is the terminal of the read
+ }
+ // done
+ // mstart ~ tptr
+ // but mstart does not have el, so it needs tptr
+
+ // set score matrix to 0.0
+ // mat[0] = MaxFragSize
+ // mat[0] is always MaxFragSize
+ for(j=1; j <= mlen; ++j)
+ mat[j] = 0.0;
+
+ // set stop column. stop column is column next to last column
+ for(mstop=pf->ecol->next,j=0; mstop!=LastCol && j<BandSize; ++j,mstop=mstop->next);
+
+ // precalculate deltas of the dynamic programming matrix
+ for(col = mstart; col != mstop; col = col->next){
+ m = col->colDepth - col->colInf[5];
+ if(m != 0){
+ max = col->colInf[0];
+ for(i=1; i < 5; ++i)
+ if(max < col->colInf[i])
+ max = col->colInf[i];
+ min = m;
+ for(i=0; i < 5; ++i){
+ col->preCalc[i] = MIX*(1.0-(double)col->colInf[i]/min);
+ if(col->colInf[i] != max)
+ col->preCalc[i] += (1.0-MIX);
+ }
+ }
+ else{
+ for(i=0; i < 5; ++i)
+ col->preCalc[i] = 0.0;
+ }
+ }
+ // done
+
+ fel = pf->efrag;// (a pointer to) Fragment's ELement? // encoded character array // this array does not include '-'
+ for(i = 1; i <= pf->len; ++i, ++fel){// pf->len is the length of pf->efrag, not including '-' length
+ // one '-' lengthens mlen one
+ ptr = ptr->next;
+ while(ptr->el.base == 0){// '-'
+ ++mlen;
+ mat[mlen] = MaxFragSize;// out of the band // infinity penalty
+ ptr = ptr->next;
+ }
+ mat[mlen+1] = MaxFragSize;// out of the band // infinity penalty
+ shift[i] = 1;// xxxxxxxxxxx <- i iteration
+ // xxxxxxxxxxx <- i+1 iteration
+ // |--|
+ // `-shift
+ //
+ // before above,
+ // insertions are thought as mismatches like below
+ //
+ // ss -> s-s .
+ // ss -> s-s .
+ // ss -> s-s .
+ // q \ -> \ .
+ // q | -> \ .
+ // q \ -> \ .
+
+ // one '-' shortens mlen one
+ // mstart (col = mstart) is important
+ while(tptr->el.base == 0){// '-'
+ --mlen;
+ ++(shift[i]);// length tptr moved when tptr->el reached next base, not '-'
+ tptr = tptr->next;
+ mstart = mstart->next;
+ }
+ tptr = tptr->next;// preparing for the next loop
+ col = mstart;
+ mstart = mstart->next;// preparing for the next loop
+ bmatPtr[i] = mark;// the boundary of bmat
+ cptr = &bmat[mark];// backtrack matrix
+ mark += mlen;// preparing for the next loop
+ if((unsigned long long)mark > BmatSize){
+ fprintf(stderr, "too large backtrack matrix. Change the BmatSize value in your realigner\n");
+ abort();
+ }
+ // fill the dynamic programming matrix and get the path (cptr)
+ tval = mat;
+ lval = &mat[shift[i]];
+ // xxtxxxlxxxx <- i iteration
+ // xxtxxxlxxxx <- i+1 iteration
+ // |--|
+ // `-shift
+ for(j=1; j <= mlen && col != LastCol; ++j, col=col->next){
+ // (*tval++) = (*(tval++)). pointer moves, not value
+ dval = (*tval++) + col->preCalc[0];// Frags[fnum][.] = '-', a deletion for the consensus
+ sval = (*lval++) + col->preCalc[(int)(fel->base)];// a match or substitution
+ if (fel->base != 5)
+ ival = *lval+1.0;// *lval+(1-MIX)*1+MIX*1
+ else
+ ival = *lval;// 'N' means it has no penalty
+ if(sval <= dval && sval <= ival){
+ mat[j] = sval;
+ *cptr = SUB;
+ }
+ else if(dval <= ival){
+ mat[j] = dval;
+ *cptr = DEL;
+ }
+ else{
+ mat[j] = ival;
+ *cptr = INS;
+ }
+ ++cptr;
+ }
+ }
+
+ cptr = &bmat[bmatPtr[pf->len]];// cptr is 0-origin
+ for(n=1,col=mstart->prev;col!=pf->ecol && n<=mlen; ++n,col=col->next);// n>bandwidth, because tptr+bandwidth <= pf->ecol
+
+ min = mat[n];
+ minCol = col;
+ cptr = &bmat[bmatPtr[pf->len]];
+ j = n+1;
+ col = minCol->next;
+ tcol = minCol->prev;// temporary column? terminal column?
+ for(i=n-1; i > 0; --i){
+ if(j <= mlen && col != LastCol){
+ if(mat[j] < min || (mat[j] == min && cptr[n-1]==DEL)){// score is the strict minimum or (minumum & not a terminal deletion
+ n = j;
+ min = mat[j];
+ minCol = col;
+ }
+ ++j;
+ col = col->next;
+ }
+ // else{ empty mat[j];}
+ if (mat[i] < min || (mat[i] == min && cptr[n-1]==DEL)){
+ n = i;
+ min = mat[i];
+ minCol = tcol;
+ }
+ tcol = tcol->prev;
+ }
+ // now, mat[n] is the strict minimum or not strict minimum but not a terminal deletion
+
+ // let's traceback!
+ ptr = pf->lastEl;
+ mlen = j = n-1;// j is an offset from bmatPtr[pf->len]
+ i = pf->len;
+ fel = &(pf->efrag[pf->len-1]);
+ col = minCol;
+ while(i > 0){
+ if(bmat[bmatPtr[i]+j] == SUB){
+ ptr->el.base = m = fel->base;
+ ptr->el.qv = fel->qv;
+ --fel;
+ ++(col->colDepth);
+ ++(col->colInf[m]);
+ ptr->up = &(col->frags);
+ ptr->down = col->frags.down;
+ ptr->up->down = ptr;
+ ptr->down->up = ptr;
+ col = col->prev;
+ j = j+shift[i]-1;
+ --i;
+ }
+ else if(bmat[bmatPtr[i]+j] == DEL){
+ ptr->el.base = 0;
+ if(ptr->prev->el.qv >= '!'){
+ if(ptr->next->el.qv >= '!'){
+ ptr->el.qv = (ptr->prev->el.qv+ptr->next->el.qv)/2;//((a-k)+(b-k))/2+k = (a+b)/2
+ }
+ else{
+ ptr->el.qv = ptr->prev->el.qv;
+ }
+ }
+ else if(ptr->next->el.qv >= '!'){
+ ptr->el.qv = ptr->next->el.qv;
+ }
+ else{
+ fprintf(stderr, "sth buggy 200\n");
+ abort();
+ }
+ ++(col->colDepth);
+ ++(col->colInf[0]);
+ ptr->up = &(col->frags);
+ ptr->down = col->frags.down;
+ ptr->up->down = ptr;
+ ptr->down->up = ptr;
+ col = col->prev;
+ --j;
+ }
+ else{
+ tcol = getCol();
+ tcol->prev = col;
+ tcol->next = col->next;
+ col->next->prev = tcol;
+ col->next = tcol;
+ ++(tcol->colDepth);
+ ptr->el.base = m = fel->base;
+ ptr->el.qv = fel->qv;
+ --fel;
+ ++(tcol->colInf[m]);
+ ptr->down = ptr->up = &(tcol->frags);// frags is the fragEl head of the column
+ tcol->frags.down = tcol->frags.up = ptr;
+ tcol->frags.prev = (fragEl *) tcol;
+ fptr = col->frags.down;
+ for(n=0; n < col->colDepth; ++n){
+ if(fptr->next->el.base != -1){
+ ++(tcol->colDepth);
+ ++(tcol->colInf[0]);
+ tptr = getEl();
+ tptr->prev = fptr;
+ tptr->next = fptr->next;
+ tptr->next->prev = tptr;
+ tptr->prev->next = tptr;
+ tptr->el.base = 0;
+ tptr->el.qv = (fptr->el.qv+fptr->next->el.qv)/2;
+ tptr->up = &(tcol->frags);
+ tptr->down = tcol->frags.down;
+ tptr->up->down = tptr;
+ tptr->down->up = tptr;
+ }
+ fptr = fptr->down;
+ }
+ j = j+shift[i];
+ --i;
+ }
+ if(ptr == pf->ofrag && i > 0){
+ pf->ofrag = getEl();
+ pf->ofrag->prev = ptr->prev;
+ ptr->prev->next = pf->ofrag;
+ ptr->prev = pf->ofrag;
+ pf->ofrag->next = ptr;
+ }
+ ptr = ptr->prev;
+ }
+ pf->ofrag = ptr->next;
+ while(ptr->el.base != -1){
+ ptr->el.base = -1;
+ ptr->el.qv = ' ';
+ ptr = ptr->prev;
+ }
+ if(col != NULL)
+ pf->scol = col->next;
+ else
+ pf->scol = FirstCol;
+ if (bmat[bmatPtr[pf->len]+mlen] == INS)// when last column is an insertion, the inserted column is the last column(pf->ecol)
+ pf->ecol = (colStruc *) pf->lastEl->down->prev;
+ else
+ pf->ecol = minCol;
+
+ return 0;
+}
+
+int opt_not_realign=0;
+
+void useErr(char *name)
+{
+ fprintf(stderr,"usage: %s <in.vertical>\n",name);
+ fprintf(stderr,"\t[-l (max read length): (default: %d)]\n",MaxFragSize);
+ fprintf(stderr,"\t[-w (bandwidth): (default: 8)]\n");
+ fprintf(stderr,"\t[-m (mix): (1.0-mix)*delta_a+mix*delta_c (default: 0.5)]\n");
+ fprintf(stderr,"\t[-q: use quality values if exist in the input]\n");
+ fprintf(stderr,"\t[-f: outputs in fastq]\n");
+ fprintf(stderr,"\t[-b (the minimum of ballots): decides whether to elect or not (default: 3)]\n");
+ fprintf(stderr,"\t[-B (the maximum of ballots): scales quality values (default: 11)]\n");
+ fprintf(stderr,"\t[-c: outputs consensus bases in vertical like format]\n");
+ fprintf(stderr,"\t[-v: outputs only realigned bases in vertical format]\n");
+ fprintf(stderr,"\t[-n: does NOT realign but take consensus]\n");
+ fprintf(stderr,"\t[-d (0.5-1.0 default:0.7): when voting, if(1st/(1st+2nd) >= d) then change a base else does not]\n");
+ exit(1);
+}
+
+int main(int argc, char **argv){
+ int hitnum=0;
+ {
+ int r;
+ while((r=getopt(argc,argv,"w:m:fqb:B:cnd:vl:")) != -1){
+ switch(r){
+ case 'l':
+ MaxFragSize = atoi(optarg);
+ if(MaxFragSize < 1){
+ fprintf(stderr,"Illegal read length: %d\n",MaxFragSize);
+ useErr(argv[0]);
+ }
+ hitnum+=2;
+ break;
+ case 'w':
+ BandSize = atoi(optarg);
+ if(BandSize < 1){
+ fprintf(stderr,"Illegal band size\n");
+ useErr(argv[0]);
+ }
+ hitnum+=2;
+ break;
+ case 'm':
+ MIX=atof(optarg);
+ if(MIX < 0.0 || MIX > 1.0){
+ fprintf(stderr, "0.0<=mix<=1.0\n");
+ useErr(argv[0]);
+ }
+ hitnum+=2;
+ break;
+ case 'f':
+ opt_fastq=1;
+ hitnum+=1;
+ break;
+ case 'q':
+ opt_qvs=1;
+ hitnum+=1;
+ break;
+ case 'b':
+ minimum_ballots=atoi(optarg);
+ if(minimum_ballots < 1 || minimum_ballots > 1024){
+ fprintf(stderr, "1<=-v(minimum_ballots)<=1024\n");
+ useErr(argv[0]);
+ }
+ hitnum+=2;
+ break;
+ case 'B':
+ maximum_ballots=atoi(optarg);
+ if(maximum_ballots < 1 || maximum_ballots > 1024){
+ fprintf(stderr, "1<=-v(maximum_ballots)<=1024\n");
+ useErr(argv[0]);
+ }
+ hitnum+=2;
+ break;
+ case 'c':
+ opt_consensus=1;
+ hitnum+=1;
+ break;
+ case 'n':
+ opt_not_realign=1;
+ hitnum+=1;
+ break;
+ case 'd':
+ distinguishable = atof(optarg);
+ if(distinguishable < 0.5 || distinguishable > 1.0){
+ fprintf(stderr, "0.5<=distinguishable<=1.0\n");
+ useErr(argv[0]);
+ }
+ hitnum+=2;
+ break;
+ case 'v':
+ opt_vertical=1;
+ opt_fastq=0;
+ hitnum+=1;
+ break;
+ default:
+ useErr(argv[0]);
+ break;
+ }
+ }
+ }
+ if(argc != 2+hitnum){
+ useErr(argv[0]);
+ }
+ if(strcmp(argv[1+hitnum],"-") == 0){
+ input = stdin;
+ }
+ else{
+ input = fopen(argv[1+hitnum],"r");
+ if(input == NULL){
+ fprintf(stderr, "cannot open %s\n",argv[1+hitnum]);
+ abort();
+ }
+ }
+// if(opt_not_realign == 1 && opt_fastq != 1){
+// fprintf(stderr, "-n option must be given with -f option\n");
+// return 1;
+// }
+// if(opt_qvs && maximum_ballots<1){
+// fprintf(stderr, "give a value to the -B option\n");
+// useErr(argv[0]);
+// }
+ int i;
+ int numFrags, flag;
+ int score, max, oldScore, n;
+ colStruc *col;
+ fragEl *ep;
+ buffer = (char*)malloc(sizeof(char)*BUFSIZE);
+ if(buffer == NULL){
+ fprintf(stderr, "cannot allocate memory: buffer\n");
+ abort();
+ }
+ qbuf = (char*)malloc(sizeof(char)*BUFSIZE);
+ if(qbuf == NULL){
+ fprintf(stderr, "cannot allocate memory: qbuf\n");
+ abort();
+ }
+ sbuf = (char*)malloc(sizeof(char)*2*MaxFragSize);
+ if(sbuf == NULL){
+ fprintf(stderr, "cannot allocate memory: sbuf\n");
+ abort();
+ }
+ chrname = (char*)malloc(sizeof(char)*BUFSIZE);
+ if(chrname == NULL){
+ fprintf(stderr, "cannot allocate memory: buffer\n");
+ abort();
+ }
+ buf4print = (base_t*)malloc(sizeof(base_t)*MaxDepth);
+ if(buf4print == NULL){
+ fprintf(stderr, "cannot allocate memory: buf4print\n");
+ abort();
+ }
+ buf4printSeq = (char*)malloc(sizeof(char)*MaxFragSize);
+ if(buf4printSeq == NULL){
+ fprintf(stderr, "cannot allocate memory: buf4printSeq\n");
+ abort();
+ }
+ buf4printDepth = (char*)malloc(sizeof(char)*MaxFragSize);
+ if(buf4printDepth == NULL){
+ fprintf(stderr, "cannot allocate memory: buf4printDepth\n");
+ abort();
+ }
+ buf4printQual = (char*)malloc(sizeof(char)*MaxFragSize);
+ if(buf4printQual == NULL){
+ fprintf(stderr, "cannot allocate memory: buf4printQual\n");
+ abort();
+ }
+ buf4printComment = (char*)malloc(sizeof(char)*MaxFragSize);
+ if(buf4printComment == NULL){
+ fprintf(stderr, "cannot allocate memory: buf4printComment\n");
+ abort();
+ }
+ base_exists = (char*)malloc(sizeof(char)*MaxFragSize);
+ if(base_exists == NULL){
+ fprintf(stderr, "cannot allocate memory: base_exists\n");
+ abort();
+ }
+ row = (int *) malloc(MaxDepth*sizeof(int));
+ if(row == NULL){
+ fprintf(stderr, "cannot allocate memory: row\n");
+ abort();
+ }
+ /* initialize matrices needed for calculations */
+ BmatSize = (unsigned long long)MaxFragSize;
+ BmatSize *=(unsigned long long)(4*BandSize+2);
+ //printf("%llu\n",BmatSize);
+ bmat = (char *) malloc(BmatSize*sizeof(char));
+ if(bmat == NULL){
+ fprintf(stderr, "cannot allocate memory: bmat\n");
+ abort();
+ }
+ mat = (double *) malloc(MaxFragSize*sizeof(double));
+ if(mat == NULL){
+ fprintf(stderr, "cannot allocate memory: mat\n");
+ abort();
+ }
+ mat[0] = MaxFragSize;
+ bmatPtr = (int *) malloc(MaxFragSize*sizeof(int));
+ if(bmatPtr == NULL){
+ fprintf(stderr, "cannot allocate memory: bmatPtr\n");
+ abort();
+ }
+ shift = (int *) malloc(MaxFragSize*sizeof(int));
+ if(shift == NULL){
+ fprintf(stderr, "cannot allocate memory: shift\n");
+ abort();
+ }
+ Frags = (frag *) malloc(MaxFragNum*sizeof(frag));
+ if(Frags == NULL){
+ fprintf(stderr, "cannot allocate memory: Frags\n");
+ abort();
+ }
+ curEl = (fragEl **) malloc(sizeof(fragEl *)*MaxDepth*2);
+ if(curEl == NULL){
+ fprintf(stderr, "cannot allocate memory: curEl\n");
+ abort();
+ }
+ curChar = (base_t **) malloc(sizeof(base_t *)*MaxDepth*2);
+ if(curChar == NULL){
+ fprintf(stderr, "cannot allocate memory: curChar\n");
+ abort();
+ }
+ {
+ unsigned long long MaxIns = 31;
+ elsize = (unsigned long long)(MaxFragSize*(1+MaxIns));
+ elsize *= (unsigned long long)(MaxDepth/FoS);
+
+ colsize = (unsigned long long)(MaxFragSize*(1+MaxIns));
+ }
+ FreeElList = (fragEl *) malloc(sizeof(fragEl)*elsize);
+ if(FreeElList == NULL){
+ fprintf(stderr, "cannot allocate memory: FreeElList\n");
+ abort();
+ }
+ FreeColList = (colStruc *) malloc(sizeof(colStruc)*colsize);
+ if(FreeColList == NULL){
+ fprintf(stderr, "cannot allocate memory: FreeColList\n");
+ abort();
+ }
+
+ for (i = 0; i < 128; i++){
+ encode[i] = 5;
+ }
+ encode['-'] = 0;
+ encode['a'] = encode['A'] = 1;
+ encode['c'] = encode['C'] = 2;
+ encode['g'] = encode['G'] = 3;
+ encode['t'] = encode['T'] = 4;
+ encode['n'] = encode['N'] = 5;
+
+ while(!DoneFlag){
+ buf4printComment[0] = '\0';
+ numFrags = readFrags();
+ if(numFrags == 0){
+ if(!DoneFlag)
+ fprintf(stderr, "no frag\n");
+ chr_is=0;
+ comment_is=0;
+ continue;
+ }
+ maxNumFrags = (numFrags > maxNumFrags) ? numFrags : maxNumFrags;
+
+ /* tack 2 blank columns on end of alignment to allow movement */
+ LastCol->next = getCol();
+ LastCol->next->prev = LastCol;
+ LastCol = LastCol->next;
+ LastCol->next = getCol();
+ LastCol->next->prev = LastCol;
+ LastCol = LastCol->next;
+
+ score = 0;
+ // get the sum of scores
+ for(col=FirstCol; col != LastCol; col=col->next){
+ if(col->colDepth > 0){
+ max = col->colInf[0];// num of deletions
+ for(i = 1; i < 5; ++i)
+ if(col->colInf[i] > max)
+ max = col->colInf[i];
+ score += (col->colDepth-max);
+ }
+ }
+
+ oldScore = score+1;
+ flag = 0;
+ if(opt_not_realign == 0){
+ while(oldScore > score){
+ oldScore = score;
+ ++flag;
+ for(i=0; i < numFrags; i++){
+ reAlign(i);// reAlign each read
+ }
+
+ // calculate the score again
+ score = 0;
+ n = 0;
+ for(col=FirstCol; n < BandSize; ++n,col=col->next){
+ if(col->colDepth > 0){
+ max = col->colInf[0];
+ for(i = 1; i < 5; ++i)
+ if(col->colInf[i] > max)
+ max = col->colInf[i];
+ score += (col->colDepth-max);
+ }
+ }
+ for( ; col != LastCol; col=col->next){
+ max = col->colInf[0];
+ if (col->colDepth == max) /* if column of blanks, remove */
+ {
+ col->prev->next = col->next;
+ col->next->prev = col->prev;
+ ep=col->frags.down;
+ for(i=0; i < col->colDepth; ++i){
+ ep->prev->next = ep->next;
+ ep->next->prev = ep->prev;
+ ep = ep->down;
+ }
+ }
+ else{
+ for(i = 1; i < 5; ++i)
+ if (col->colInf[i] > max)
+ max = col->colInf[i];
+ score += (col->colDepth-max);
+ }
+ }
+ }
+ }
+
+ printAlign(numFrags);
+ //fprintf(stderr,"After %d iterations\n",flag);
+ NumIts += flag;
+ ++NumCons;
+
+ el_index=0;
+ col_index=0;
+ }
+
+ free(bmat);
+ free(mat);
+ free(bmatPtr);
+ free(shift);
+ for(i=0; i < maxNumFrags; ++i){
+ free(Frags[i].afrag);
+ free(Frags[i].efrag);
+ }
+ free(Frags);
+
+ //fprintf(stderr,"%d %d\t", NumIts,NumCons);
+ //fprintf(stderr,"Total %d its in %d contigs for ave of %5.2f its/con\n", NumIts,NumCons,(double)NumIts/(double)NumCons);
+ free(buffer);
+ free(chrname);
+ free(buf4print);
+ free(buf4printSeq);
+ free(buf4printDepth);
+ free(buf4printQual);
+ free(buf4printComment);
+ free(base_exists);
+ free(row);
+ free(curEl);
+ free(curChar);
+ free(sbuf);
+ free(qbuf);
+ if(strcmp(argv[1+hitnum],"-") == 0){
+ }
+ else{
+ fclose(input);
+ }
+
+ return 0;
+}
+
diff --git a/nss2v_v3.c b/nss2v_v3.c
new file mode 100644
index 0000000..51b5d24
--- /dev/null
+++ b/nss2v_v3.c
@@ -0,0 +1,1098 @@
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <unistd.h>
+#include <getopt.h>
+#include <ctype.h> /* toupper */
+
+int LSEQ = 46;
+#define LAUX 32
+
+/* ~hit num */
+#define NROWS 512
+#define LISTSIZE (1024)
+#define FoS 16
+
+void*
+malloc_or_die(size_t size, const char* message)
+{
+ void *pval;
+ pval = malloc(size);
+ if(pval == NULL){
+ fputs("Fatal error!\n", stderr);
+ fputs("cannot allocate memory: ", stderr);
+ fputs(message, stderr);
+ fputs("\n", stderr);
+ exit(1);
+ }
+ return pval;
+}
+void*
+realloc_or_die(void*buf, size_t size, const char* message)
+{
+ void *pval;
+ pval = realloc(buf, size);
+ if(pval == NULL){
+ fputs("Fatal error!\n", stderr);
+ fputs("cannot reallocate memory: ", stderr);
+ fputs(message, stderr);
+ fputs("\n", stderr);
+ exit(1);
+ }
+ return pval;
+}
+
+
+void*
+calloc_or_die(size_t nmemb, size_t size, const char* message)
+{
+ void *pval;
+ pval = calloc(nmemb, size);
+ if(pval == NULL){
+ fputs("Fatal error!\n", stderr);
+ fputs("cannot allocate memory: ", stderr);
+ fputs(message, stderr);
+ fputs("\n", stderr);
+ exit(1);
+ }
+ return pval;
+}
+void
+reset_i_list(int *i_list, size_t nrows)
+{
+ int i;
+ for(i = 0; i < nrows; ++i){
+ i_list[i]=0;
+ }
+}
+
+const unsigned INTERVAL=16;/* >0ull */
+const int max_ins = 31;/* if a record has more than 31 insertions, this program will discard the record. */
+const int max_depth = FoS*32;
+
+typedef struct regions_t{
+ unsigned long long stt;
+ unsigned long long end;
+}regions_t;
+
+typedef struct cigar_t{
+ int num;
+ char sym;
+}cigar_t;
+
+typedef struct cmaux_t{
+ char sym;
+ int num;
+}cmaux_t;
+
+typedef struct sam_t{
+ char * qname;
+ int flag;
+ char * rname;
+ int pos;
+ int mapq;
+ char *cigar_string;
+ struct cigar_t * cigar;
+ int cigar_length;
+ char * rnext;
+ int pnext;
+ int tlen;
+ char * seq;
+ char * qual;
+ char as[LAUX];
+ char ev[LAUX];
+ int bit_score;
+ double evalue;
+ char * buf;
+ size_t buffer_size;
+}sam_t;
+
+typedef struct base_t{
+ char base;
+ char qv;
+}base_t;
+
+typedef struct el_t{
+ struct base_t el;
+ struct el_t * down;
+}el_t;
+typedef struct el_pool_t{
+ size_t index;
+ size_t size; /* currently not exceeding 2^31 */
+ el_t * free_el_list;
+ struct el_pool_t *ext;
+}el_pool_t;
+
+el_pool_t*
+init_el_pool(el_pool_t * pool_ptr, size_t size)
+{
+ pool_ptr->size = size;
+ pool_ptr->index = 0;
+ pool_ptr->ext = NULL;
+ pool_ptr->free_el_list =
+ (el_t*)malloc_or_die(sizeof(el_t)*size, "free_el_list");
+ return pool_ptr;
+}
+
+int free_el_pool(el_pool_t * pool_ptr)
+{
+ int size;
+ if(pool_ptr == NULL)
+ return 0;
+ size = pool_ptr->size;
+ size += free_el_pool(pool_ptr->ext);
+ pool_ptr->ext = NULL;
+ free(pool_ptr->free_el_list);
+ return size;
+}
+
+el_pool_t*
+flush_el_pool(el_pool_t * pool_ptr)
+{
+ int size;
+ size = free_el_pool(pool_ptr->ext);
+ pool_ptr->ext = NULL;
+ if(size){
+ pool_ptr->size += size;
+ pool_ptr->free_el_list =
+ realloc_or_die(pool_ptr->free_el_list,
+ sizeof(el_t)*(pool_ptr->size), "free_el_list");
+ }
+ pool_ptr->index = 0;
+ return pool_ptr;
+}
+
+typedef struct column_t{
+ int score;
+ int s_depth;
+ struct column_t * next;
+ el_t * to_el;
+}column_t;
+
+typedef struct b_col_t{
+ size_t size;
+ int end;
+ column_t * array;
+}b_col_t;
+
+b_col_t*
+init_expand_b_col(b_col_t * basic_column, size_t end)
+{
+ int i;
+ if(basic_column->size <= end){
+ basic_column->array = realloc_or_die(basic_column->array, sizeof(column_t)*(end+1), "expand_b_col");
+ }
+ for(i=basic_column->end + 1; i <= end; ++i){
+ basic_column->array[i].to_el = NULL;
+ basic_column->array[i].next = NULL;
+ basic_column->array[i].score = 0;
+ basic_column->array[i].s_depth = 0;
+ }
+ basic_column->end = end;
+ return basic_column;
+}
+typedef struct col_pool_t{
+ unsigned long index;
+ unsigned long size;/* = (LSEQ*(max_ins+1)); */
+ column_t* free_col_list;
+ struct col_pool_t * ext;
+}col_pool_t;
+
+el_t*
+init_el(el_t* el)
+{
+ el->el.base = ' ';
+ el->el.qv = ' ';
+ el->down = NULL;
+ return el;
+}
+
+col_pool_t*
+init_col_pool(col_pool_t* pool_ptr, size_t size)
+{
+ pool_ptr->size = size;
+ pool_ptr->index = 0;
+ pool_ptr->ext = NULL;
+ pool_ptr->free_col_list =
+ (column_t*)malloc_or_die(sizeof(column_t)*size, "free_col_list");
+ return pool_ptr;
+}
+int
+free_col_pool(col_pool_t* pool_ptr)
+{
+ int size;
+ if(pool_ptr == NULL)
+ return 0;
+ size = pool_ptr->size;
+ size += free_col_pool(pool_ptr->ext);
+ pool_ptr->ext = NULL;
+ free(pool_ptr->free_col_list);
+ return size;
+}
+col_pool_t*
+flush_col_pool(col_pool_t* pool_ptr)
+{
+ int size;
+ size = free_col_pool(pool_ptr->ext);
+ pool_ptr->ext = NULL;
+ if(size){
+ pool_ptr->size += size;
+ pool_ptr->free_col_list =
+ realloc_or_die(pool_ptr->free_col_list,
+ sizeof(column_t)*(pool_ptr->size), "free_col_list");
+ }
+ pool_ptr->index = 0;
+ return pool_ptr;
+}
+
+el_t *
+get_el(el_pool_t *el_pool)
+{
+ if(el_pool->index >= el_pool->size){
+ if(el_pool->ext == NULL){
+ el_pool->ext = (el_pool_t*)malloc_or_die(sizeof(el_pool_t),"el_pool_expand");
+ init_el_pool(el_pool->ext, el_pool->size * 1.4);
+ }
+ return get_el(el_pool->ext);
+ }
+ if(el_pool->free_el_list == NULL){
+ fprintf(stderr, "free_el_list is NULL\n");
+ exit(1);
+ }
+ return init_el(el_pool->free_el_list+el_pool->index++);
+}
+
+inline int is_nt_char(char c)
+{
+ if(c == 'A' || c == 'C' || c == 'G' || c== 'T' || c == 'N')
+ return 1;
+ return 0;
+}
+
+
+column_t *
+init_col(column_t *col, el_pool_t *el_pool)
+{
+ col->score = 0;
+ col->s_depth = 0;
+ col->next = NULL;
+ col->to_el = get_el(el_pool);
+ return col;
+}
+column_t *
+get_col(col_pool_t *col_pool, el_pool_t*el_pool){
+ if(col_pool->index >= col_pool->size){
+ if(col_pool->ext == NULL){
+ col_pool->ext = (col_pool_t*)malloc_or_die(sizeof(col_pool_t),"col_pool_expand");
+ init_col_pool(col_pool->ext, col_pool->size * 1.4);
+ }
+ return get_col(col_pool->ext, el_pool);
+ }
+ if(col_pool->free_col_list == NULL){
+ fprintf(stderr, "free_col_list is NULL\n");
+ abort();
+ }
+ return init_col(col_pool->free_col_list+col_pool->index++, el_pool);
+}
+
+regions_t ** alloc_lists_or_die(size_t nrows, size_t listsize)
+{
+ int i;
+ char buf[32];
+ regions_t ** lists;
+ lists = (regions_t**)malloc_or_die(sizeof(regions_t*)*nrows, "lists");
+ for(i=0;i<NROWS;++i){
+ sprintf(buf, "lists[%i]", i);
+ lists[i] = (regions_t*)malloc_or_die(sizeof(regions_t)*listsize, buf);
+ }
+ return lists;
+}
+void init_sam(sam_t * s, size_t LBUF){
+ s->buffer_size = LBUF;
+ s->qname = (char*)malloc_or_die(LBUF, "s->qname");
+ s->rname = (char*)malloc_or_die(LBUF, "s->rname");
+ s->cigar_string = (char*)malloc_or_die(LBUF, "s->rname");
+ s->cigar = (cigar_t*)malloc_or_die(sizeof(cigar_t)*LSEQ, "s->cigar");
+ s->rnext = (char*)malloc_or_die(LBUF, "s->rnext");
+ s->seq = (char*)malloc_or_die(LBUF, "s->seq");
+ s->qual = (char*)malloc_or_die(LBUF, "s->qual");
+ s->buf = (char*)malloc_or_die(LBUF, "s->buf");
+ return;
+}
+
+void realloc_sam(sam_t * s, size_t LBUF){
+ s->buffer_size = LBUF;
+ s->qname = (char*)realloc_or_die(s->qname, LBUF, "s->qname");
+ s->rname = (char*)realloc_or_die(s->rname, LBUF, "s->rname");
+ s->cigar_string = (char*)realloc_or_die(s->cigar_string, LBUF, "s->rname");
+ s->cigar = (cigar_t*)realloc_or_die(s->cigar, sizeof(cigar_t)*LBUF/2, "s->cigar");
+ s->rnext = (char*)realloc_or_die(s->rnext, LBUF, "s->rnext");
+ s->seq = (char*)realloc_or_die(s->seq, LBUF, "s->seq");
+ s->qual = (char*)realloc_or_die(s->qual, LBUF, "s->qual");
+ s->buf = (char*)realloc_or_die(s->buf, LBUF, "s->buf");
+ return;
+}
+
+void reset_sam(sam_t * s){
+ s->qname[0] = '\0';
+ s->rname[0] = '\0';
+ s->cigar_length=0;
+ s->rnext[0] = '\0';
+ s->seq[0] = '\0';
+ s->qual[0] = '\0';
+ return;
+}
+
+void free_sam(sam_t * s){
+ free(s->qname);
+ free(s->rname);
+ free(s->cigar_string);
+ free(s->cigar);
+ free(s->rnext);
+ free(s->seq);
+ free(s->qual);
+ return;
+}
+
+int opt_separate=0;
+
+void print_vertical(char *, b_col_t *, char* ,char *, el_pool_t*);
+int ref_count = 0;
+
+void
+parse_cigar(sam_t*t_sam)
+{
+ int stt_cigar_index;
+ int end_cigar_index;
+ int offset=0;
+ int j;
+ int ret;
+ int tmpoffset;
+ int len = strlen(t_sam->cigar_string);
+ for(j=0; offset < len && j<t_sam->buffer_size/2; ++j){
+ ret = sscanf((t_sam->cigar_string+offset),"%d%c%n",&t_sam->cigar[t_sam->cigar_length].num,&t_sam->cigar[t_sam->cigar_length].sym,&tmpoffset);
+ if(ret != 2){
+ printf("%s\n",t_sam->cigar_string);
+ fprintf(stderr, "souteigai 1 ret: %d %s\n",ret, t_sam->cigar_string+offset);
+ fprintf(stderr, "offset: %d \n",offset);
+ abort();
+ }
+ offset += tmpoffset;
+ ++t_sam->cigar_length;
+ }
+ stt_cigar_index=0;
+ end_cigar_index=t_sam->cigar_length-1;
+ if(offset != len){
+ fprintf(stderr, "souteigai 2 %d %d\n", offset, len);
+ abort();
+ }
+ if(t_sam->cigar[stt_cigar_index].sym == 'H'){
+ fprintf(stderr,"err: heading H\n");
+ exit(1);
+ }
+ if(t_sam->cigar[end_cigar_index].sym == 'H'){
+ fprintf(stderr,"err: tailing H\n");
+ exit(1);
+ }
+ if(t_sam->cigar[stt_cigar_index].sym == 'I'){
+ fprintf(stderr,"err: heading I\n");
+ exit(1);
+ }
+ if(t_sam->cigar[end_cigar_index].sym == 'I'){
+ fprintf(stderr,"err: tailing I\n");
+ exit(1);
+ }
+}
+
+void
+normalize_naseq(char * seq)
+{
+ /* small -> capital */
+ /* later, small letters will be ignored */
+ int j;
+ int loop = strlen(seq);
+ for(j=0; j<loop; ++j){
+ seq[j] = toupper(seq[j]);
+ if(!is_nt_char(seq[j])){
+ fputs("Fatal error!\n", stderr);
+ fputs("unexpected character:", stderr);
+ fprintf(stderr, "'%c' in\n%s\n",seq[j],seq);
+ abort();
+ }
+ }
+}
+
+char*
+gen_inserted_seq(char *sbuf, sam_t * t_sam)
+{
+ int cp=0;
+ int si=0;
+ int j, k;
+ for(j=0; j<t_sam->cigar_length; ++j){
+ char sym = t_sam->cigar[j].sym;
+ if(sym == 'D'){
+ for(k=0; k<t_sam->cigar[j].num; ++k){
+ sbuf[si++] = '-';
+ }
+ }else if(sym == 'I'){
+ for(k=0; k<t_sam->cigar[j].num; ++k){
+ sbuf[si++] = t_sam->seq[cp++];
+ }
+ }else if(sym == 'M'){
+ for(k=0; k<t_sam->cigar[j].num; ++k){
+ sbuf[si++] = t_sam->seq[cp++];
+ }
+ }else{
+ fprintf(stderr, "strange cigar %c\n",sym);
+ exit(1);
+ }
+ }
+ sbuf[si] = '\0';
+ return sbuf;
+}
+
+int
+get_sam_record(sam_t*t_sam, FILE* in_sam)
+{
+ char * retp;
+ int sretval;
+ int line_length;
+ char * buf = t_sam->buf;
+ size_t buffer_size = t_sam->buffer_size;
+ retp = fgets(buf, buffer_size, in_sam);
+ if(!retp) return 0;
+ line_length = strlen(buf);
+ if(line_length >= t_sam->buffer_size - 1){
+ /* The line is incompletely read if the buffer
+ * is fully occuppied and not ending with newline */
+ while(line_length >= buffer_size - 1 && buf[line_length - 1] != '\n'){
+ buf = (char*)realloc_or_die(buf, buffer_size*2, "buf");
+ fgets(buf+line_length, buffer_size + 1, in_sam);
+ line_length = strlen(buf);
+ buffer_size *= 2;
+ }
+ t_sam->buf=buf;
+/* now that the length of the buffer was measured,
+ * the line should contain qseq and sseq of the same length.
+ * Thus the sequence should be less than half of the line length */
+ realloc_sam(t_sam, buffer_size);
+ }
+ reset_sam(t_sam);
+ t_sam->cigar_length=0;
+ sretval = sscanf(t_sam->buf,"%s\t%d\t%s\t%d\t%d\t%s\t%s\t%d\t%d\t%s\t%s\t%s\t%s\n",
+ t_sam->qname,
+ &t_sam->flag,
+ t_sam->rname,
+ &t_sam->pos, /* 1-origin */
+ &t_sam->mapq,
+ t_sam->cigar_string,
+ t_sam->rnext,
+ &t_sam->pnext,
+ &t_sam->tlen,
+ t_sam->seq,
+ t_sam->qual,
+ t_sam->as,
+ t_sam->ev);
+ t_sam->tlen = strlen(t_sam->seq);
+ if(t_sam->pos > 0){
+ t_sam->pos--;/* 0-origin */
+ }else{
+ fprintf(stderr,"strange pos: %d . this must be > 0\n",t_sam->pos);
+ exit(1);
+ }
+ if(sretval != 13){/* XXX 13 may be changed */
+ fprintf(stderr, "souteigai sam: retval %d\n",sretval);
+ fprintf(stderr, "%s\n", t_sam->buf);
+ abort();
+ }
+ if(strcmp("*",t_sam->qual) == 0){
+ int len = strlen(t_sam->seq);
+ int j;
+ for(j=0; j<len; ++j){
+ t_sam->qual[j] = '!';
+ }
+ }
+ sretval = sscanf(t_sam->as, "AS:i:%d", &(t_sam->bit_score));
+ if(sretval != 1){
+ fprintf(stderr, "no alignment score ? : retval %d\n",sretval);
+ fprintf(stderr, "%s\n",t_sam->as);
+ fprintf(stderr, "%s\n",t_sam->qname);
+ fprintf(stderr, "%s\n",t_sam->rname);
+ abort();
+ }
+ sretval = sscanf(t_sam->ev, "EV:Z:%lf", &(t_sam->evalue));
+ if(sretval != 1){
+ fprintf(stderr, "no evalue ? : retval %d\n",sretval);
+ fprintf(stderr, "%s\n",t_sam->ev);
+ abort();
+ }
+ parse_cigar(t_sam);
+ normalize_naseq(t_sam->seq);
+ return 1;
+}
+int
+calculate_region_length(sam_t* t_sam)
+{
+ int j;
+ int length_of_region = 0;
+ for(j = 0; j < t_sam->cigar_length; j++){
+ char sym = t_sam->cigar[j].sym;
+ if(sym == 'M' || sym == 'D'){
+ length_of_region += t_sam->cigar[j].num;
+ }else if(sym == 'I' && t_sam->cigar[j].num > max_ins){
+ /* max_ins is a global variable */
+ fprintf(stderr, "WARNING: too large ins: %d . skipped\n",t_sam->cigar[j].num);
+ return 0;
+ }
+ }
+ return length_of_region;
+}
+int main(int argc, char ** argv)
+{
+ int hitnum=0;
+ int LBUF;
+ FILE * IN_SAM = NULL;
+ int valid_voters = (NROWS-1);
+ char *prevchr;
+ b_col_t basic_column;
+ int * i_list;
+ char *pbuf;
+ char *sbuf;
+ int max_end=0; /* only int variable end2 is assigned */
+ el_pool_t el_pool;
+ col_pool_t col_pool;
+ regions_t ** lists;
+ sam_t * t_sam;
+ {
+ int result;
+ int tmp;
+ while((result=getopt(argc,argv,"v:r:s")) != -1){
+ switch(result){
+ case 's':
+ opt_separate=1;
+ ++hitnum;
+ break;
+ case 'v':
+ valid_voters = atoi(optarg);
+ if(valid_voters <= 0){
+ fprintf(stderr, "valid_voters must be > 0: %s\n", optarg);
+ abort();
+ }
+ if(valid_voters > 1024){
+ fprintf(stderr, "valid_voters must be <= 1024: %s\n", optarg);
+ abort();
+ }
+ hitnum += 2;
+ break;
+ case 'r':
+ tmp = atoi(optarg);
+ if(tmp <= 0){
+ fprintf(stderr, "r(max_read_length) must be > 0: %s\n", optarg);
+ abort();
+ }
+ LSEQ = tmp;
+ if(tmp>=65536){
+ ++LSEQ;
+ }
+ hitnum += 2;
+ break;
+ case '?':
+ printf("humei\n");
+ exit(1);
+ break;
+ default:
+ break;
+ }
+ }
+ }
+
+ LBUF = 2*LSEQ;
+ if(argc != 2+hitnum)
+ {
+ fprintf(stderr, "USAGE: <this> <in.nss>\n");
+ fprintf(stderr, "\tinput MUST be sorted by 1st chromosome name (must), 2nd evalue and 3rd bitScore\n");
+ fprintf(stderr, "\t-v <integer>: valid voter.\n");
+ fprintf(stderr, "\t-r <integer>: max read length. default value is 32768\n");
+ return 1;
+ }
+ if(LSEQ <= 65536){
+ el_pool.size = LSEQ*(1+max_ins);
+ el_pool.size *= max_depth/FoS; /* 32 */
+ }else{
+ el_pool.size = LSEQ*100;
+ }
+ col_pool.size = LSEQ*(1+max_ins);
+
+ if(strcmp("-",argv[1+hitnum])==0){
+ IN_SAM = stdin;
+ }else{
+ IN_SAM = fopen(argv[1+hitnum],"r");
+ if(IN_SAM == NULL){
+ fprintf(stderr, "cannot open the file %s\n", argv[1+hitnum]);
+ abort();
+ }
+ }
+ prevchr = (char*)malloc_or_die(LBUF, "prevchr");
+ basic_column.array = (column_t*)malloc_or_die(sizeof(column_t)*LSEQ, "basic_column");
+ basic_column.size = LSEQ;
+ basic_column.end=-1;
+ init_el_pool(&el_pool, el_pool.size);
+ init_col_pool(&col_pool, col_pool.size);
+ lists = alloc_lists_or_die(NROWS, LISTSIZE);
+ i_list = (int*)calloc_or_die(NROWS, sizeof(int), "i_list");
+ pbuf = (char*)malloc_or_die(LSEQ, "pbuf");
+ sbuf = (char*)malloc_or_die(LSEQ*2, "sbuf");
+ t_sam = (sam_t*)malloc_or_die(sizeof(sam_t), "t_sam");
+ init_sam(t_sam, LBUF);
+
+ max_end=0;
+ prevchr[0] = '\0';
+ while(get_sam_record(t_sam, IN_SAM)){
+ if(LSEQ < t_sam->buffer_size/2){
+ LSEQ=(t_sam->buffer_size+1)/2;
+ pbuf = (char*)realloc_or_die(pbuf, LSEQ, "pbuf");
+ sbuf = (char*)realloc_or_die(sbuf, LSEQ*2, "sbuf");
+ }
+ if(prevchr[0] == '\0'){
+ strcpy(prevchr,t_sam->rname);
+ }else if(strcmp(prevchr,t_sam->rname) != 0){
+ if(ref_count>0){
+ print_vertical(prevchr, &basic_column, pbuf,sbuf, &el_pool);
+ }
+ ref_count = 0;
+
+ strcpy(prevchr, t_sam->rname);
+ reset_i_list(i_list, NROWS);
+ basic_column.end=-1;
+ max_end = 0;
+ flush_el_pool(&el_pool);
+ flush_col_pool(&col_pool);
+ }
+
+ /* look for a free region and set */
+ {
+ int stt;
+ int end;
+ int stt2;
+ int end2;
+ int f_discard;
+ int not_collide;
+ int length_of_region;
+ length_of_region = calculate_region_length(t_sam);
+ if(!length_of_region){ continue; }
+ stt = t_sam->pos;/* 0-originized */
+ end = stt + length_of_region - 1;
+ stt2 = stt;
+ end2 = end;
+
+ if(strcmp(t_sam->rname,t_sam->qname) == 0){/* this is a ref-ref hit */
+ int length = strlen(t_sam->seq);
+ if(length != t_sam->cigar[0].num){
+ /*discard this record */
+ continue;
+ }
+ if(basic_column.end < end){
+ /* initialize basic_column[] */
+ init_expand_b_col(&basic_column, end);
+ }
+ ++ref_count;
+ }else{
+ if(ref_count <= 0){
+ continue;/* against chunks without ref-ref hits. */
+ }
+ }
+
+ f_discard=1;
+ {
+ int s;
+ for(s=stt; s<=end; ++s){
+ basic_column.array[s].s_depth++;
+ if(basic_column.array[s].s_depth <= valid_voters){
+ f_discard=0;
+ }
+ }
+ }
+ if(f_discard){
+ continue;
+ }
+
+ gen_inserted_seq(sbuf, t_sam);
+
+ int row;
+ for(row=0; row<NROWS; ++row){
+ int f_free = 0;
+ not_collide = 1;
+ /* find a free region */
+ if(!opt_separate){
+ int l;
+ for(l=0; l<i_list[row] && l<LISTSIZE; ++l){/* check the rgn does not overlap any reads */
+ if(end2+INTERVAL < lists[row][l].stt || stt2 > lists[row][l].end+INTERVAL){
+ continue;
+ }else{
+ not_collide=0;
+ }
+ }
+ if(not_collide==1 && i_list[row] <= LISTSIZE){
+ f_free = 1;
+ }
+ }
+ if(opt_separate){
+ if(i_list[row] == 0){
+ f_free = 1;
+ }
+ }
+ if(f_free){
+ /* column[row] is free */
+ /* set */
+ {
+ int k,m;
+ int pos=stt;
+ int si = 0;
+ for(k=0; k<t_sam->cigar_length; ++k){
+ char sym = t_sam->cigar[k].sym;
+ if(sym == 'M' || sym == 'D'){
+ int t = pos+t_sam->cigar[k].num;
+ int l;
+ for(l=pos; l<t; ++l,++pos){
+ int score = 0;
+ el_t * tmp;
+ if(basic_column.array[l].to_el == NULL){
+ /* initialize */
+ basic_column.array[l].to_el = get_el(&el_pool);
+ basic_column.array[l].next = NULL;
+ basic_column.array[l].score = 0;
+ }
+ tmp = basic_column.array[l].to_el;
+ for(m=0; m<row; ++m){
+ if(tmp->down == NULL){
+ tmp->down = get_el(&el_pool);
+ }
+ if(tmp->el.base != ' '){
+ ++score;
+ }
+ tmp = tmp->down;
+ }
+ if(sbuf[si] != ' '){
+ if(tmp->el.base != ' '){
+ fprintf(stderr,"bug %s %c %c\n",t_sam->rname,tmp->el.base,sbuf[si]);
+ }
+ tmp->el.base = sbuf[si];
+ ++score;
+ }else{
+ if(tmp->el.base != ' '){
+ ++score;
+ }
+ }
+ ++si;
+ while(tmp->down){
+ tmp = tmp->down;
+ if(tmp->el.base != ' '){
+ ++score;
+ }
+ }
+ basic_column.array[l].score = score;
+
+ if(pos != stt && basic_column.array[l].to_el->el.base != ' ' && basic_column.array[l-1].to_el->el.base != ' ')
+ {
+ column_t * cur_col;
+ column_t * l_col;
+ column_t * r_col;
+ if(l<1){
+ fprintf(stderr,"pg bug l\n");
+ }
+ cur_col = basic_column.array[l-1].next;/* ins column */
+ l_col = &(basic_column.array[l-1]);
+ r_col = &(basic_column.array[l]);
+ for(; cur_col; cur_col=cur_col->next){
+ {
+ el_t * l_el = l_col->to_el;
+ el_t * r_el = r_col->to_el;
+ el_t * c_el = cur_col->to_el;
+ int depth = 0;
+ int score = 0;
+ while(l_el != NULL && r_el != NULL)
+ {
+ if(c_el->el.base == ' '){
+ if(l_el->el.base != ' ' && r_el->el.base != ' '){
+ c_el->el.base = '-';
+ ++score;
+ }
+ }else{
+ ++score;
+ }
+ ++depth;
+
+ l_el = l_el->down;
+ r_el = r_el->down;
+ if(depth > row && (l_el == NULL || r_el == NULL)){
+ break;
+ }
+ if(c_el->down == NULL){
+ c_el->down = get_el(&el_pool);
+ }
+ c_el = c_el->down;
+ }
+ while(depth <= row){
+ if(depth == row){
+ c_el->el.base = '-';
+ ++score;
+ break;
+ }else if(c_el->el.base == ' '){
+ }else{
+ ++score;
+ }
+ if(c_el->down == NULL){
+ c_el->down = get_el(&el_pool);
+ }
+ c_el = c_el->down;
+ ++depth;
+ }
+ cur_col->score = score;
+ }
+ }
+ }
+ }
+ }else if(sym == 'I'){
+ int p;
+ int n_ins;
+ column_t *l_col, *r_col, *cur_col;
+ if(pos<1){
+ fprintf(stderr,"strange pos %d\n",pos);
+ exit(1);
+ }
+ l_col = &(basic_column.array[pos-1]);
+ r_col = &(basic_column.array[pos]);
+ cur_col = l_col;
+
+ n_ins = t_sam->cigar[k].num;
+ for(p=0; p<n_ins; ++p){
+ if(cur_col->next == NULL){
+ cur_col->next = get_col(&col_pool, &el_pool);
+ }
+ cur_col = cur_col->next;
+
+ {
+ el_t * l_el = l_col->to_el;
+ el_t * r_el = r_col->to_el;
+ el_t * c_el = cur_col->to_el;
+ int depth = 0;
+ int score = 0;
+ while(l_el != NULL && r_el != NULL)
+ {
+ if(depth == row){
+ if(sbuf[si] != ' '){
+ if(c_el->el.base != ' '){
+ fprintf(stderr,"pg bug 433\n");
+ }
+ c_el->el.base = sbuf[si];
+ ++score;
+ }else{
+ if(c_el->el.base != ' '){
+ ++score;
+ }
+ }
+ ++si;
+ }else if(c_el->el.base == ' '){
+ if(l_el->el.base != ' ' && r_el->el.base != ' '){
+ c_el->el.base = '-';
+ ++score;
+ }
+ }else{
+ /* keep */
+ ++score;
+ }
+ ++depth;
+
+ l_el = l_el->down;
+ r_el = r_el->down;
+ if(depth > row && (l_el == NULL || r_el == NULL)){
+ break;
+ }
+ if(c_el->down == NULL){
+ c_el->down = get_el(&el_pool);
+ }
+ c_el = c_el->down;
+ }
+ while(depth <= row){
+ if(depth == row){
+ if(sbuf[si] != ' '){
+ c_el->el.base = sbuf[si];
+ ++score;
+ }
+ ++si;
+ break;
+ }else if(c_el->el.base == ' '){
+ }else{
+ ++score;
+ }
+ if(c_el->down == NULL){
+ c_el->down = get_el(&el_pool);
+ }
+ c_el = c_el->down;
+ ++depth;
+ }
+ cur_col->score = score;
+ }
+ }
+ }else{
+ fprintf(stderr,"strange sym %c\n",sym);
+ exit(1);
+ }
+ }
+ }
+ lists[row][i_list[row]].stt = stt2;
+ lists[row][i_list[row]].end = end2;
+ ++i_list[row];
+ if(max_end < end2){
+ max_end = end2;
+ }
+ break;
+ }
+ }
+ }
+ }
+
+ if(ref_count>0){
+ print_vertical(prevchr, &basic_column, pbuf,sbuf,&el_pool);
+ }
+ ref_count = 0;
+
+ free_sam(t_sam);
+ free(t_sam);
+ if(IN_SAM) fclose(IN_SAM);
+ free(prevchr);
+ free(basic_column.array);
+ free_col_pool(&col_pool);
+ free_el_pool(&el_pool);
+ {
+ int i;
+ for(i=0;i<NROWS;++i){
+ free(lists[i]);
+ }
+ free(lists);
+ }
+ free(i_list);
+ free(pbuf);
+ free(sbuf);
+
+ return EXIT_SUCCESS;
+}
+
+int print_check(el_t * tmp, char * chr, int stt, int i_basic_column){
+ if(tmp == NULL){
+ fprintf(stderr, "NULL column 1 %s %d %d\n",chr,stt,i_basic_column);
+ exit(1);
+ }
+ while(tmp && (tmp->el.base == ' ' || tmp->el.base == '-')){
+ tmp = tmp->down;
+ }
+ if(tmp){
+ return 1;/* will be printed */
+ }else{
+ return 0;
+ }
+}
+
+void print_column(el_t * tmp, int d, char * pbuf){
+ int index=0;
+ if(tmp == NULL){
+ fprintf(stderr, "NULL column 2\n");
+ exit(1);
+ }
+ pbuf[index++]=tmp->el.base;
+ while(tmp->down){
+ tmp = tmp->down;
+ pbuf[index++]=tmp->el.base;
+ }
+ pbuf[index] = '\0';
+ if(d>=0){
+ if(d==0){
+ printf("%s %c %3d\n",pbuf,(char)(0+33),d);
+ }else{
+ if(d>=920){
+ d = 920;
+ }
+ printf("%s %c %3d\n",pbuf,(char)(d/10+1+33),d);
+ }
+ }else{
+ printf("%s\n",pbuf);
+ }
+}
+
+void print_vertical(char * chr, b_col_t *basic_column,char * pbuf,char * sbuf, el_pool_t*el_pool){
+ /* done valid voters */
+ /* done terminal '-' */
+ /* done chop short frags */
+
+ int stt=basic_column->end+1;
+ int end=0;
+
+ int printed=0;
+ int i;
+ int si=0;
+ for(i=0; i<=basic_column->end; ++i){
+ el_t * tmp = basic_column->array[i].to_el;
+ if(tmp){
+ stt = i;
+ break;
+ }
+ }
+ for(i=basic_column->end; i>=stt; --i){
+ el_t * tmp = basic_column->array[i].to_el;
+ if(tmp){
+ end = i;
+ break;
+ }
+ }
+
+ for(i=stt; i<=end; ++i){
+ int d;
+ el_t * tmp = basic_column->array[i].to_el;
+ if(tmp == NULL){
+ basic_column->array[i].to_el = get_el(el_pool);
+ basic_column->array[i].to_el->el.base = 'N';
+ basic_column->array[i].next = NULL;
+ basic_column->array[i].score = 1;
+ basic_column->array[i].s_depth = 1;
+ }
+ d = basic_column->array[i].s_depth;
+ if(d==0){
+ sbuf[si] = (char)(0+33);
+ }else{
+ if(d>=920){
+ d = 920;
+ }
+ sbuf[si] = (char)(d/10+1+33);
+ }
+ ++si;
+ }
+ sbuf[si] = '\0';
+
+ for(i=stt; i<=end; ++i){
+ el_t* tmp = basic_column->array[i].to_el;
+ column_t* tc;
+ if(print_check(tmp,chr,stt,basic_column->end)){
+ if(!printed){
+ printf("%%%s\n",chr);
+ printf("#%s\n",sbuf);
+ }
+ print_column(tmp,-1,pbuf);
+ printed=1;
+ }
+ tc = basic_column->array[i].next;
+ while(tc){
+ el_t * tmp = tc->to_el;
+ if(print_check(tmp,chr,stt,basic_column->end)){
+ if(!printed){
+ printf("%%%s\n",chr);
+ }
+ print_column(tmp,-1,pbuf);
+ printed=1;
+ }
+ tc = tc->next;
+ }
+ }
+ if(printed)
+ printf("\n");
+}
+
diff --git a/partition_fa.pl b/partition_fa.pl
new file mode 100755
index 0000000..28611d9
--- /dev/null
+++ b/partition_fa.pl
@@ -0,0 +1,143 @@
+#!/usr/bin/perl
+use strict;
+use warnings;
+use Getopt::Long;
+#use Compress::Zlib;
+
+#my $opt_n;
+#GetOptions("n" => \$opt_n);
+
+my $opt_prefix=`date +%Y%m%d%H%M%S`;
+chomp $opt_prefix;
+my $opt_gz;
+my $opt_1origin=0;
+my $opt_fq=0;
+
+#print $opt_prefix,"\n";
+#exit 1;
+GetOptions(
+ "p=s" => \$opt_prefix,
+ "q" => \$opt_fq,
+ "1origin"=>\$opt_1origin
+);
+
+my $ext="fa";
+if($opt_fq){
+ $ext = "fq";
+}
+
+#GetOptions("p=s" => \$opt_prefix, "z" => \$opt_gz);
+$opt_gz=0;
+
+if(@ARGV != 2){
+ printf STDERR ("USAGE: <this> <in.fa> <int (number of partitions)>\n");
+ printf STDERR ("\t[-p <prefix>: prefix to the outputs. default is yymmddhhmmss]\n");
+ printf STDERR ("\t[-1origin: for file naming\n");
+ printf STDERR ("\t[-q : regard input as in fastq]\n");
+# printf STDERR ("\t[-z: output gz compressed file instead of uncompressed fasta file]\n");
+ #printf STDERR ("\t[-n (name: output only names , not sequences)\n");
+ exit 1;
+}
+
+#printf("%s\n", $ARGV[0]);
+#printf("%s\n", $ARGV[1]);
+#exit 1;
+
+my $in_fa = $ARGV[0];
+chomp $in_fa;
+#my $IN_FA;
+#if($in_fa eq '-'){
+# open $IN_FA,"<",STDIN or die "cannot open $in_fa : $!\n";
+#}
+#else{
+ open my $IN_FA,"<".$in_fa or die "cannot open $in_fa : $!\n";
+#}
+
+my $partitions = $ARGV[1];
+if($partitions !~ /^\+?\d+$/){
+ printf STDERR ("nth must be an integer\n");
+ exit 1;
+}
+
+my @out;
+for(my $i=0; $i<$partitions; ++$i){
+ if($opt_gz){
+# $out[$i] = gzopen(sprintf("%s_%04d.fa.gz",$opt_prefix,$i),"wb") or die "cannot open output file:$!\n";
+ }
+ else{
+ if($opt_1origin){
+ open $out[$i],">",sprintf("%s_%04d.$ext",$opt_prefix,$i+1) or die "cannot open output file:$!\n";
+ }
+ else{
+ open $out[$i],">",sprintf("%s_%04d.$ext",$opt_prefix,$i) or die "cannot open output file:$!\n";
+ }
+ }
+}
+
+my $counter=-1;
+
+my $name = <$IN_FA>;
+
+while(!eof($IN_FA)){
+ ++$counter;
+ chomp $name;
+ my $tmp_name = $name;
+ my $bases="";
+ my $opts="";
+ my $qvs="";
+ if(!$opt_fq){
+ while(!eof($IN_FA) && (($name = <$IN_FA>) !~ /^>/)){
+ chomp $name;#bases
+ $bases .= $name;
+ }
+ }
+ else{
+ while(!eof($IN_FA) && (($name = <$IN_FA>) !~ /^\+/)){
+ chomp $name;#bases
+ $bases .= $name;
+ }
+ chomp $name;
+ $opts = $name;
+ while(!eof($IN_FA) && (($name = <$IN_FA>) !~ /^\@/)){
+ chomp $name;#qvs
+ $qvs .= $name;
+ }
+ if(length($bases) != length($qvs)){
+ printf STDERR ("|bases| != |qvs|: %d %d\n",length($bases),length($qvs));
+ exit(1);
+ }
+ }
+ #next if ($counter % $mod != $nth);
+ my $fh = $out[$counter % $partitions];
+ if($opt_gz){
+# $fh->gzwrite(sprintf("%s\n",$tmp_name));
+# $fh->gzwrite(sprintf("%s\n",$bases));
+ }
+ else{
+ printf $fh ("%s\n",$tmp_name);
+ printf $fh ("%s\n",$bases);
+ if($opt_fq){
+ printf $fh ("%s\n",$opts);
+ printf $fh ("%s\n",$qvs);
+ }
+ }
+
+ #printf $out[$counter % $partitions] ("%s\n",$tmp_name);
+ #printf $out[$counter % $partitions] ("%s\n",$bases);
+ #my $num=70;
+ #my $loop = length($bases);
+ #for(my $stt=0; $stt < $loop; $stt += $num){
+ # printf("%s\n",substr($bases,$stt,$num));
+ #}
+}
+
+close $IN_FA;
+for(my $i=0; $i<$partitions; ++$i){
+ if($opt_gz){
+# $out[$i]->gzclose;
+ }
+ else{
+ close $out[$i];
+ }
+}
+
diff --git a/pbasm.spec b/pbasm.spec
new file mode 100644
index 0000000..4f5a366
--- /dev/null
+++ b/pbasm.spec
@@ -0,0 +1,86 @@
+# Copyright (c) 2011-2013, Pacific Biosciences of California, Inc.
+#
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted (subject to the limitations in the
+# disclaimer below) provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+#
+# * Neither the name of Pacific Biosciences nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE
+# GRANTED BY THIS LICENSE. THIS SOFTWARE IS PROVIDED BY PACIFIC
+# BIOSCIENCES AND ITS CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
+# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL PACIFIC BIOSCIENCES OR ITS
+# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
+# USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
+# OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+# SUCH DAMAGE.
+#
+unitigger = bogart
+#utgErrorRate = 0.015
+#utgErrorLimit = 4.5
+
+cnsErrorRate = 0.25
+cgwErrorRate = 0.25
+ovlErrorRate = 0.015
+
+frgMinLen = 1000
+ovlMinLen = 40
+
+merSize=14
+
+merylMemory = 16384
+merylThreads = 8
+
+ovlStoreMemory = 16384
+
+# grid info
+useGrid = 0
+scriptOnGrid = 0
+frgCorrOnGrid = 0
+ovlCorrOnGrid = 0
+
+sge = -S /bin/bash -V -q all.q
+#sge = -S /bin/bash -sync y -V -q all.q
+sgeScript = -pe threads 1
+sgeConsensus = -pe threads 1
+sgeOverlap = -pe threads 4
+sgeFragmentCorrection = -pe threads 4
+sgeOverlapCorrection = -pe threads 1
+
+#ovlHashBits = 22
+#ovlHashBlockLength = 46871347
+#ovlRefBlockSize = 537
+
+ovlHashBits = 25
+ovlThreads = 4
+ovlHashBlockLength = 50000000
+ovlRefBlockSize = 100000000
+
+ovlConcurrency = 6
+frgCorrThreads = 4
+frgCorrBatchSize = 100000
+ovlCorrBatchSize = 100000
+
+cnsMinFrags = 7500
+cnsConcurrency = 24
+
+# change sgeName every time if you do not want to wait for the jobs not necessary to wait
+sgeName = iroha
diff --git a/waf b/waf
new file mode 100755
index 0000000..16a745b
--- /dev/null
+++ b/waf
@@ -0,0 +1,164 @@
+#!/usr/bin/env python
+# encoding: ISO8859-1
+# Thomas Nagy, 2005-2012
+
+"""
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+
+1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+3. The name of the author may not be used to endorse or promote products
+ derived from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR
+IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
+INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
+IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGE.
+"""
+
+import os, sys
+
+VERSION="1.7.9"
+REVISION="27bc0de640eec0db74b23f9d3a63c369"
+INSTALL=''
+C1='#&'
+C2='#%'
+cwd = os.getcwd()
+join = os.path.join
+
+
+WAF='waf'
+def b(x):
+ return x
+if sys.hexversion>0x300000f:
+ WAF='waf3'
+ def b(x):
+ return x.encode()
+
+def err(m):
+ print(('\033[91mError: %s\033[0m' % m))
+ sys.exit(1)
+
+def unpack_wafdir(dir):
+ f = open(sys.argv[0],'rb')
+ c = 'corrupt archive (%d)'
+ while 1:
+ line = f.readline()
+ if not line: err('run waf-light from a folder containing waflib')
+ if line == b('#==>\n'):
+ txt = f.readline()
+ if not txt: err(c % 1)
+ if f.readline() != b('#<==\n'): err(c % 2)
+ break
+ if not txt: err(c % 3)
+ txt = txt[1:-1].replace(b(C1), b('\n')).replace(b(C2), b('\r'))
+
+ import shutil, tarfile
+ try: shutil.rmtree(dir)
+ except OSError: pass
+ try:
+ for x in ['Tools', 'extras']:
+ os.makedirs(join(dir, 'waflib', x))
+ except OSError:
+ err("Cannot unpack waf lib into %s\nMove waf in a writable directory" % dir)
+
+ os.chdir(dir)
+ tmp = 't.bz2'
+ t = open(tmp,'wb')
+ try: t.write(txt)
+ finally: t.close()
+
+ try:
+ t = tarfile.open(tmp)
+ except:
+ try:
+ os.system('bunzip2 t.bz2')
+ t = tarfile.open('t')
+ tmp = 't'
+ except:
+ os.chdir(cwd)
+ try: shutil.rmtree(dir)
+ except OSError: pass
+ err("Waf cannot be unpacked, check that bzip2 support is present")
+
+ try:
+ for x in t: t.extract(x)
+ finally:
+ t.close()
+
+ for x in ['Tools', 'extras']:
+ os.chmod(join('waflib',x), 493)
+
+ if sys.hexversion<0x300000f:
+ sys.path = [join(dir, 'waflib')] + sys.path
+ import fixpy2
+ fixpy2.fixdir(dir)
+
+ os.unlink(tmp)
+ os.chdir(cwd)
+
+ try: dir = unicode(dir, 'mbcs')
+ except: pass
+ try:
+ from ctypes import windll
+ windll.kernel32.SetFileAttributesW(dir, 2)
+ except:
+ pass
+
+def test(dir):
+ try:
+ os.stat(join(dir, 'waflib'))
+ return os.path.abspath(dir)
+ except OSError:
+ pass
+
+def find_lib():
+ name = sys.argv[0]
+ base = os.path.dirname(os.path.abspath(name))
+
+ #devs use $WAFDIR
+ w=test(os.environ.get('WAFDIR', ''))
+ if w: return w
+
+ #waf-light
+ if name.endswith('waf-light'):
+ w = test(base)
+ if w: return w
+ err('waf-light requires waflib -> export WAFDIR=/folder')
+
+ dirname = '%s-%s-%s' % (WAF, VERSION, REVISION)
+ for i in [INSTALL,'/usr','/usr/local','/opt']:
+ w = test(i + '/lib/' + dirname)
+ if w: return w
+
+ #waf-local
+ dir = join(base, (sys.platform != 'win32' and '.' or '') + dirname)
+ w = test(dir)
+ if w: return w
+
+ #unpack
+ unpack_wafdir(dir)
+ return dir
+
+wafdir = find_lib()
+sys.path.insert(0, wafdir)
+
+if __name__ == '__main__':
+
+ from waflib import Scripting
+ Scripting.waf_entry_point(cwd, VERSION, wafdir)
+
diff --git a/waflib/Build.py b/waflib/Build.py
new file mode 100644
index 0000000..70330ef
--- /dev/null
+++ b/waflib/Build.py
@@ -0,0 +1,769 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import os,sys,errno,re,shutil
+try:
+ import cPickle
+except ImportError:
+ import pickle as cPickle
+from waflib import Runner,TaskGen,Utils,ConfigSet,Task,Logs,Options,Context,Errors
+import waflib.Node
+CACHE_DIR='c4che'
+CACHE_SUFFIX='_cache.py'
+INSTALL=1337
+UNINSTALL=-1337
+SAVED_ATTRS='root node_deps raw_deps task_sigs'.split()
+CFG_FILES='cfg_files'
+POST_AT_ONCE=0
+POST_LAZY=1
+POST_BOTH=2
+class BuildContext(Context.Context):
+ '''executes the build'''
+ cmd='build'
+ variant=''
+ def __init__(self,**kw):
+ super(BuildContext,self).__init__(**kw)
+ self.is_install=0
+ self.top_dir=kw.get('top_dir',Context.top_dir)
+ self.run_dir=kw.get('run_dir',Context.run_dir)
+ self.post_mode=POST_AT_ONCE
+ self.out_dir=kw.get('out_dir',Context.out_dir)
+ self.cache_dir=kw.get('cache_dir',None)
+ if not self.cache_dir:
+ self.cache_dir=self.out_dir+os.sep+CACHE_DIR
+ self.all_envs={}
+ self.task_sigs={}
+ self.node_deps={}
+ self.raw_deps={}
+ self.cache_dir_contents={}
+ self.task_gen_cache_names={}
+ self.launch_dir=Context.launch_dir
+ self.jobs=Options.options.jobs
+ self.targets=Options.options.targets
+ self.keep=Options.options.keep
+ self.cache_global=Options.cache_global
+ self.nocache=Options.options.nocache
+ self.progress_bar=Options.options.progress_bar
+ self.deps_man=Utils.defaultdict(list)
+ self.current_group=0
+ self.groups=[]
+ self.group_names={}
+ def get_variant_dir(self):
+ if not self.variant:
+ return self.out_dir
+ return os.path.join(self.out_dir,self.variant)
+ variant_dir=property(get_variant_dir,None)
+ def __call__(self,*k,**kw):
+ kw['bld']=self
+ ret=TaskGen.task_gen(*k,**kw)
+ self.task_gen_cache_names={}
+ self.add_to_group(ret,group=kw.get('group',None))
+ return ret
+ def rule(self,*k,**kw):
+ def f(rule):
+ ret=self(*k,**kw)
+ ret.rule=rule
+ return ret
+ return f
+ def __copy__(self):
+ raise Errors.WafError('build contexts are not supposed to be copied')
+ def install_files(self,*k,**kw):
+ pass
+ def install_as(self,*k,**kw):
+ pass
+ def symlink_as(self,*k,**kw):
+ pass
+ def load_envs(self):
+ node=self.root.find_node(self.cache_dir)
+ if not node:
+ raise Errors.WafError('The project was not configured: run "waf configure" first!')
+ lst=node.ant_glob('**/*%s'%CACHE_SUFFIX,quiet=True)
+ if not lst:
+ raise Errors.WafError('The cache directory is empty: reconfigure the project')
+ for x in lst:
+ name=x.path_from(node).replace(CACHE_SUFFIX,'').replace('\\','/')
+ env=ConfigSet.ConfigSet(x.abspath())
+ self.all_envs[name]=env
+ for f in env[CFG_FILES]:
+ newnode=self.root.find_resource(f)
+ try:
+ h=Utils.h_file(newnode.abspath())
+ except(IOError,AttributeError):
+ Logs.error('cannot find %r'%f)
+ h=Utils.SIG_NIL
+ newnode.sig=h
+ def init_dirs(self):
+ if not(os.path.isabs(self.top_dir)and os.path.isabs(self.out_dir)):
+ raise Errors.WafError('The project was not configured: run "waf configure" first!')
+ self.path=self.srcnode=self.root.find_dir(self.top_dir)
+ self.bldnode=self.root.make_node(self.variant_dir)
+ self.bldnode.mkdir()
+ def execute(self):
+ self.restore()
+ if not self.all_envs:
+ self.load_envs()
+ self.execute_build()
+ def execute_build(self):
+ Logs.info("Waf: Entering directory `%s'"%self.variant_dir)
+ self.recurse([self.run_dir])
+ self.pre_build()
+ self.timer=Utils.Timer()
+ if self.progress_bar:
+ sys.stderr.write(Logs.colors.cursor_off)
+ try:
+ self.compile()
+ finally:
+ if self.progress_bar==1:
+ c=len(self.returned_tasks)or 1
+ self.to_log(self.progress_line(c,c,Logs.colors.BLUE,Logs.colors.NORMAL))
+ print('')
+ sys.stdout.flush()
+ sys.stderr.write(Logs.colors.cursor_on)
+ Logs.info("Waf: Leaving directory `%s'"%self.variant_dir)
+ self.post_build()
+ def restore(self):
+ try:
+ env=ConfigSet.ConfigSet(os.path.join(self.cache_dir,'build.config.py'))
+ except(IOError,OSError):
+ pass
+ else:
+ if env['version']<Context.HEXVERSION:
+ raise Errors.WafError('Version mismatch! reconfigure the project')
+ for t in env['tools']:
+ self.setup(**t)
+ f=None
+ try:
+ dbfn=os.path.join(self.variant_dir,Context.DBFILE)
+ try:
+ f=open(dbfn,'rb')
+ except(IOError,EOFError):
+ Logs.debug('build: Could not load the build cache %s (missing)'%dbfn)
+ else:
+ try:
+ waflib.Node.pickle_lock.acquire()
+ waflib.Node.Nod3=self.node_class
+ try:
+ data=cPickle.load(f)
+ except Exception ,e:
+ Logs.debug('build: Could not pickle the build cache %s: %r'%(dbfn,e))
+ else:
+ for x in SAVED_ATTRS:
+ setattr(self,x,data[x])
+ finally:
+ waflib.Node.pickle_lock.release()
+ finally:
+ if f:
+ f.close()
+ self.init_dirs()
+ def store(self):
+ data={}
+ for x in SAVED_ATTRS:
+ data[x]=getattr(self,x)
+ db=os.path.join(self.variant_dir,Context.DBFILE)
+ try:
+ waflib.Node.pickle_lock.acquire()
+ waflib.Node.Nod3=self.node_class
+ f=None
+ try:
+ f=open(db+'.tmp','wb')
+ cPickle.dump(data,f,-1)
+ finally:
+ if f:
+ f.close()
+ finally:
+ waflib.Node.pickle_lock.release()
+ try:
+ st=os.stat(db)
+ os.unlink(db)
+ if not Utils.is_win32:
+ os.chown(db+'.tmp',st.st_uid,st.st_gid)
+ except(AttributeError,OSError):
+ pass
+ os.rename(db+'.tmp',db)
+ def compile(self):
+ Logs.debug('build: compile()')
+ self.producer=Runner.Parallel(self,self.jobs)
+ self.producer.biter=self.get_build_iterator()
+ self.returned_tasks=[]
+ try:
+ self.producer.start()
+ except KeyboardInterrupt:
+ self.store()
+ raise
+ else:
+ if self.producer.dirty:
+ self.store()
+ if self.producer.error:
+ raise Errors.BuildError(self.producer.error)
+ def setup(self,tool,tooldir=None,funs=None):
+ if isinstance(tool,list):
+ for i in tool:self.setup(i,tooldir)
+ return
+ module=Context.load_tool(tool,tooldir)
+ if hasattr(module,"setup"):module.setup(self)
+ def get_env(self):
+ try:
+ return self.all_envs[self.variant]
+ except KeyError:
+ return self.all_envs['']
+ def set_env(self,val):
+ self.all_envs[self.variant]=val
+ env=property(get_env,set_env)
+ def add_manual_dependency(self,path,value):
+ if path is None:
+ raise ValueError('Invalid input')
+ if isinstance(path,waflib.Node.Node):
+ node=path
+ elif os.path.isabs(path):
+ node=self.root.find_resource(path)
+ else:
+ node=self.path.find_resource(path)
+ if isinstance(value,list):
+ self.deps_man[id(node)].extend(value)
+ else:
+ self.deps_man[id(node)].append(value)
+ def launch_node(self):
+ try:
+ return self.p_ln
+ except AttributeError:
+ self.p_ln=self.root.find_dir(self.launch_dir)
+ return self.p_ln
+ def hash_env_vars(self,env,vars_lst):
+ if not env.table:
+ env=env.parent
+ if not env:
+ return Utils.SIG_NIL
+ idx=str(id(env))+str(vars_lst)
+ try:
+ cache=self.cache_env
+ except AttributeError:
+ cache=self.cache_env={}
+ else:
+ try:
+ return self.cache_env[idx]
+ except KeyError:
+ pass
+ lst=[env[a]for a in vars_lst]
+ ret=Utils.h_list(lst)
+ Logs.debug('envhash: %s %r',Utils.to_hex(ret),lst)
+ cache[idx]=ret
+ return ret
+ def get_tgen_by_name(self,name):
+ cache=self.task_gen_cache_names
+ if not cache:
+ for g in self.groups:
+ for tg in g:
+ try:
+ cache[tg.name]=tg
+ except AttributeError:
+ pass
+ try:
+ return cache[name]
+ except KeyError:
+ raise Errors.WafError('Could not find a task generator for the name %r'%name)
+ def progress_line(self,state,total,col1,col2):
+ n=len(str(total))
+ Utils.rot_idx+=1
+ ind=Utils.rot_chr[Utils.rot_idx%4]
+ pc=(100.*state)/total
+ eta=str(self.timer)
+ fs="[%%%dd/%%%dd][%%s%%2d%%%%%%s][%s]["%(n,n,ind)
+ left=fs%(state,total,col1,pc,col2)
+ right='][%s%s%s]'%(col1,eta,col2)
+ cols=Logs.get_term_cols()-len(left)-len(right)+2*len(col1)+2*len(col2)
+ if cols<7:cols=7
+ ratio=((cols*state)//total)-1
+ bar=('='*ratio+'>').ljust(cols)
+ msg=Utils.indicator%(left,bar,right)
+ return msg
+ def declare_chain(self,*k,**kw):
+ return TaskGen.declare_chain(*k,**kw)
+ def pre_build(self):
+ for m in getattr(self,'pre_funs',[]):
+ m(self)
+ def post_build(self):
+ for m in getattr(self,'post_funs',[]):
+ m(self)
+ def add_pre_fun(self,meth):
+ try:
+ self.pre_funs.append(meth)
+ except AttributeError:
+ self.pre_funs=[meth]
+ def add_post_fun(self,meth):
+ try:
+ self.post_funs.append(meth)
+ except AttributeError:
+ self.post_funs=[meth]
+ def get_group(self,x):
+ if not self.groups:
+ self.add_group()
+ if x is None:
+ return self.groups[self.current_group]
+ if x in self.group_names:
+ return self.group_names[x]
+ return self.groups[x]
+ def add_to_group(self,tgen,group=None):
+ assert(isinstance(tgen,TaskGen.task_gen)or isinstance(tgen,Task.TaskBase))
+ tgen.bld=self
+ self.get_group(group).append(tgen)
+ def get_group_name(self,g):
+ if not isinstance(g,list):
+ g=self.groups[g]
+ for x in self.group_names:
+ if id(self.group_names[x])==id(g):
+ return x
+ return''
+ def get_group_idx(self,tg):
+ se=id(tg)
+ for i in range(len(self.groups)):
+ for t in self.groups[i]:
+ if id(t)==se:
+ return i
+ return None
+ def add_group(self,name=None,move=True):
+ if name and name in self.group_names:
+ Logs.error('add_group: name %s already present'%name)
+ g=[]
+ self.group_names[name]=g
+ self.groups.append(g)
+ if move:
+ self.current_group=len(self.groups)-1
+ def set_group(self,idx):
+ if isinstance(idx,str):
+ g=self.group_names[idx]
+ for i in range(len(self.groups)):
+ if id(g)==id(self.groups[i]):
+ self.current_group=i
+ else:
+ self.current_group=idx
+ def total(self):
+ total=0
+ for group in self.groups:
+ for tg in group:
+ try:
+ total+=len(tg.tasks)
+ except AttributeError:
+ total+=1
+ return total
+ def get_targets(self):
+ to_post=[]
+ min_grp=0
+ for name in self.targets.split(','):
+ tg=self.get_tgen_by_name(name)
+ if not tg:
+ raise Errors.WafError('target %r does not exist'%name)
+ m=self.get_group_idx(tg)
+ if m>min_grp:
+ min_grp=m
+ to_post=[tg]
+ elif m==min_grp:
+ to_post.append(tg)
+ return(min_grp,to_post)
+ def get_all_task_gen(self):
+ lst=[]
+ for g in self.groups:
+ lst.extend(g)
+ return lst
+ def post_group(self):
+ if self.targets=='*':
+ for tg in self.groups[self.cur]:
+ try:
+ f=tg.post
+ except AttributeError:
+ pass
+ else:
+ f()
+ elif self.targets:
+ if self.cur<self._min_grp:
+ for tg in self.groups[self.cur]:
+ try:
+ f=tg.post
+ except AttributeError:
+ pass
+ else:
+ f()
+ else:
+ for tg in self._exact_tg:
+ tg.post()
+ else:
+ ln=self.launch_node()
+ if ln.is_child_of(self.bldnode):
+ Logs.warn('Building from the build directory, forcing --targets=*')
+ ln=self.srcnode
+ elif not ln.is_child_of(self.srcnode):
+ Logs.warn('CWD %s is not under %s, forcing --targets=* (run distclean?)'%(ln.abspath(),self.srcnode.abspath()))
+ ln=self.srcnode
+ for tg in self.groups[self.cur]:
+ try:
+ f=tg.post
+ except AttributeError:
+ pass
+ else:
+ if tg.path.is_child_of(ln):
+ f()
+ def get_tasks_group(self,idx):
+ tasks=[]
+ for tg in self.groups[idx]:
+ try:
+ tasks.extend(tg.tasks)
+ except AttributeError:
+ tasks.append(tg)
+ return tasks
+ def get_build_iterator(self):
+ self.cur=0
+ if self.targets and self.targets!='*':
+ (self._min_grp,self._exact_tg)=self.get_targets()
+ global lazy_post
+ if self.post_mode!=POST_LAZY:
+ while self.cur<len(self.groups):
+ self.post_group()
+ self.cur+=1
+ self.cur=0
+ while self.cur<len(self.groups):
+ if self.post_mode!=POST_AT_ONCE:
+ self.post_group()
+ tasks=self.get_tasks_group(self.cur)
+ Task.set_file_constraints(tasks)
+ Task.set_precedence_constraints(tasks)
+ self.cur_tasks=tasks
+ self.cur+=1
+ if not tasks:
+ continue
+ yield tasks
+ while 1:
+ yield[]
+class inst(Task.Task):
+ color='CYAN'
+ def uid(self):
+ lst=[self.dest,self.path]+self.source
+ return Utils.h_list(repr(lst))
+ def post(self):
+ buf=[]
+ for x in self.source:
+ if isinstance(x,waflib.Node.Node):
+ y=x
+ else:
+ y=self.path.find_resource(x)
+ if not y:
+ if Logs.verbose:
+ Logs.warn('Could not find %s immediately (may cause broken builds)'%x)
+ idx=self.generator.bld.get_group_idx(self)
+ for tg in self.generator.bld.groups[idx]:
+ if not isinstance(tg,inst)and id(tg)!=id(self):
+ tg.post()
+ y=self.path.find_resource(x)
+ if y:
+ break
+ else:
+ raise Errors.WafError('Could not find %r in %r'%(x,self.path))
+ buf.append(y)
+ self.inputs=buf
+ def runnable_status(self):
+ ret=super(inst,self).runnable_status()
+ if ret==Task.SKIP_ME:
+ return Task.RUN_ME
+ return ret
+ def __str__(self):
+ return''
+ def run(self):
+ return self.generator.exec_task()
+ def get_install_path(self,destdir=True):
+ dest=Utils.subst_vars(self.dest,self.env)
+ dest=dest.replace('/',os.sep)
+ if destdir and Options.options.destdir:
+ dest=os.path.join(Options.options.destdir,os.path.splitdrive(dest)[1].lstrip(os.sep))
+ return dest
+ def exec_install_files(self):
+ destpath=self.get_install_path()
+ if not destpath:
+ raise Errors.WafError('unknown installation path %r'%self.generator)
+ for x,y in zip(self.source,self.inputs):
+ if self.relative_trick:
+ destfile=os.path.join(destpath,y.path_from(self.path))
+ Utils.check_dir(os.path.dirname(destfile))
+ else:
+ destfile=os.path.join(destpath,y.name)
+ self.generator.bld.do_install(y.abspath(),destfile,self.chmod)
+ def exec_install_as(self):
+ destfile=self.get_install_path()
+ self.generator.bld.do_install(self.inputs[0].abspath(),destfile,self.chmod)
+ def exec_symlink_as(self):
+ destfile=self.get_install_path()
+ src=self.link
+ if self.relative_trick:
+ src=os.path.relpath(src,os.path.dirname(destfile))
+ self.generator.bld.do_link(src,destfile)
+class InstallContext(BuildContext):
+ '''installs the targets on the system'''
+ cmd='install'
+ def __init__(self,**kw):
+ super(InstallContext,self).__init__(**kw)
+ self.uninstall=[]
+ self.is_install=INSTALL
+ def do_install(self,src,tgt,chmod=Utils.O644):
+ d,_=os.path.split(tgt)
+ if not d:
+ raise Errors.WafError('Invalid installation given %r->%r'%(src,tgt))
+ Utils.check_dir(d)
+ srclbl=src.replace(self.srcnode.abspath()+os.sep,'')
+ if not Options.options.force:
+ try:
+ st1=os.stat(tgt)
+ st2=os.stat(src)
+ except OSError:
+ pass
+ else:
+ if st1.st_mtime+2>=st2.st_mtime and st1.st_size==st2.st_size:
+ if not self.progress_bar:
+ Logs.info('- install %s (from %s)'%(tgt,srclbl))
+ return False
+ if not self.progress_bar:
+ Logs.info('+ install %s (from %s)'%(tgt,srclbl))
+ try:
+ os.remove(tgt)
+ except OSError:
+ pass
+ try:
+ shutil.copy2(src,tgt)
+ os.chmod(tgt,chmod)
+ except IOError:
+ try:
+ os.stat(src)
+ except(OSError,IOError):
+ Logs.error('File %r does not exist'%src)
+ raise Errors.WafError('Could not install the file %r'%tgt)
+ def do_link(self,src,tgt):
+ d,_=os.path.split(tgt)
+ Utils.check_dir(d)
+ link=False
+ if not os.path.islink(tgt):
+ link=True
+ elif os.readlink(tgt)!=src:
+ link=True
+ if link:
+ try:os.remove(tgt)
+ except OSError:pass
+ if not self.progress_bar:
+ Logs.info('+ symlink %s (to %s)'%(tgt,src))
+ os.symlink(src,tgt)
+ else:
+ if not self.progress_bar:
+ Logs.info('- symlink %s (to %s)'%(tgt,src))
+ def run_task_now(self,tsk,postpone):
+ tsk.post()
+ if not postpone:
+ if tsk.runnable_status()==Task.ASK_LATER:
+ raise self.WafError('cannot post the task %r'%tsk)
+ tsk.run()
+ def install_files(self,dest,files,env=None,chmod=Utils.O644,relative_trick=False,cwd=None,add=True,postpone=True):
+ tsk=inst(env=env or self.env)
+ tsk.bld=self
+ tsk.path=cwd or self.path
+ tsk.chmod=chmod
+ if isinstance(files,waflib.Node.Node):
+ tsk.source=[files]
+ else:
+ tsk.source=Utils.to_list(files)
+ tsk.dest=dest
+ tsk.exec_task=tsk.exec_install_files
+ tsk.relative_trick=relative_trick
+ if add:self.add_to_group(tsk)
+ self.run_task_now(tsk,postpone)
+ return tsk
+ def install_as(self,dest,srcfile,env=None,chmod=Utils.O644,cwd=None,add=True,postpone=True):
+ tsk=inst(env=env or self.env)
+ tsk.bld=self
+ tsk.path=cwd or self.path
+ tsk.chmod=chmod
+ tsk.source=[srcfile]
+ tsk.dest=dest
+ tsk.exec_task=tsk.exec_install_as
+ if add:self.add_to_group(tsk)
+ self.run_task_now(tsk,postpone)
+ return tsk
+ def symlink_as(self,dest,src,env=None,cwd=None,add=True,postpone=True,relative_trick=False):
+ if Utils.is_win32:
+ return
+ tsk=inst(env=env or self.env)
+ tsk.bld=self
+ tsk.dest=dest
+ tsk.path=cwd or self.path
+ tsk.source=[]
+ tsk.link=src
+ tsk.relative_trick=relative_trick
+ tsk.exec_task=tsk.exec_symlink_as
+ if add:self.add_to_group(tsk)
+ self.run_task_now(tsk,postpone)
+ return tsk
+class UninstallContext(InstallContext):
+ '''removes the targets installed'''
+ cmd='uninstall'
+ def __init__(self,**kw):
+ super(UninstallContext,self).__init__(**kw)
+ self.is_install=UNINSTALL
+ def do_install(self,src,tgt,chmod=Utils.O644):
+ if not self.progress_bar:
+ Logs.info('- remove %s'%tgt)
+ self.uninstall.append(tgt)
+ try:
+ os.remove(tgt)
+ except OSError ,e:
+ if e.errno!=errno.ENOENT:
+ if not getattr(self,'uninstall_error',None):
+ self.uninstall_error=True
+ Logs.warn('build: some files could not be uninstalled (retry with -vv to list them)')
+ if Logs.verbose>1:
+ Logs.warn('Could not remove %s (error code %r)'%(e.filename,e.errno))
+ while tgt:
+ tgt=os.path.dirname(tgt)
+ try:
+ os.rmdir(tgt)
+ except OSError:
+ break
+ def do_link(self,src,tgt):
+ try:
+ if not self.progress_bar:
+ Logs.info('- unlink %s'%tgt)
+ os.remove(tgt)
+ except OSError:
+ pass
+ while tgt:
+ tgt=os.path.dirname(tgt)
+ try:
+ os.rmdir(tgt)
+ except OSError:
+ break
+ def execute(self):
+ try:
+ def runnable_status(self):
+ return Task.SKIP_ME
+ setattr(Task.Task,'runnable_status_back',Task.Task.runnable_status)
+ setattr(Task.Task,'runnable_status',runnable_status)
+ super(UninstallContext,self).execute()
+ finally:
+ setattr(Task.Task,'runnable_status',Task.Task.runnable_status_back)
+class CleanContext(BuildContext):
+ '''cleans the project'''
+ cmd='clean'
+ def execute(self):
+ self.restore()
+ if not self.all_envs:
+ self.load_envs()
+ self.recurse([self.run_dir])
+ try:
+ self.clean()
+ finally:
+ self.store()
+ def clean(self):
+ Logs.debug('build: clean called')
+ if self.bldnode!=self.srcnode:
+ lst=[]
+ for e in self.all_envs.values():
+ lst.extend(self.root.find_or_declare(f)for f in e[CFG_FILES])
+ for n in self.bldnode.ant_glob('**/*',excl='.lock* *conf_check_*/** config.log c4che/*',quiet=True):
+ if n in lst:
+ continue
+ n.delete()
+ self.root.children={}
+ for v in'node_deps task_sigs raw_deps'.split():
+ setattr(self,v,{})
+class ListContext(BuildContext):
+ '''lists the targets to execute'''
+ cmd='list'
+ def execute(self):
+ self.restore()
+ if not self.all_envs:
+ self.load_envs()
+ self.recurse([self.run_dir])
+ self.pre_build()
+ self.timer=Utils.Timer()
+ for g in self.groups:
+ for tg in g:
+ try:
+ f=tg.post
+ except AttributeError:
+ pass
+ else:
+ f()
+ try:
+ self.get_tgen_by_name('')
+ except Exception:
+ pass
+ lst=list(self.task_gen_cache_names.keys())
+ lst.sort()
+ for k in lst:
+ Logs.pprint('GREEN',k)
+class StepContext(BuildContext):
+ '''executes tasks in a step-by-step fashion, for debugging'''
+ cmd='step'
+ def __init__(self,**kw):
+ super(StepContext,self).__init__(**kw)
+ self.files=Options.options.files
+ def compile(self):
+ if not self.files:
+ Logs.warn('Add a pattern for the debug build, for example "waf step --files=main.c,app"')
+ BuildContext.compile(self)
+ return
+ targets=None
+ if self.targets and self.targets!='*':
+ targets=self.targets.split(',')
+ for g in self.groups:
+ for tg in g:
+ if targets and tg.name not in targets:
+ continue
+ try:
+ f=tg.post
+ except AttributeError:
+ pass
+ else:
+ f()
+ for pat in self.files.split(','):
+ matcher=self.get_matcher(pat)
+ for tg in g:
+ if isinstance(tg,Task.TaskBase):
+ lst=[tg]
+ else:
+ lst=tg.tasks
+ for tsk in lst:
+ do_exec=False
+ for node in getattr(tsk,'inputs',[]):
+ if matcher(node,output=False):
+ do_exec=True
+ break
+ for node in getattr(tsk,'outputs',[]):
+ if matcher(node,output=True):
+ do_exec=True
+ break
+ if do_exec:
+ ret=tsk.run()
+ Logs.info('%s -> exit %r'%(str(tsk),ret))
+ def get_matcher(self,pat):
+ inn=True
+ out=True
+ if pat.startswith('in:'):
+ out=False
+ pat=pat.replace('in:','')
+ elif pat.startswith('out:'):
+ inn=False
+ pat=pat.replace('out:','')
+ anode=self.root.find_node(pat)
+ pattern=None
+ if not anode:
+ if not pat.startswith('^'):
+ pat='^.+?%s'%pat
+ if not pat.endswith('$'):
+ pat='%s$'%pat
+ pattern=re.compile(pat)
+ def match(node,output):
+ if output==True and not out:
+ return False
+ if output==False and not inn:
+ return False
+ if anode:
+ return anode==node
+ else:
+ return pattern.match(node.abspath())
+ return match
+BuildContext.store=Utils.nogc(BuildContext.store)
+BuildContext.restore=Utils.nogc(BuildContext.restore)
diff --git a/waflib/ConfigSet.py b/waflib/ConfigSet.py
new file mode 100644
index 0000000..42ecde3
--- /dev/null
+++ b/waflib/ConfigSet.py
@@ -0,0 +1,152 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import copy,re,os
+from waflib import Logs,Utils
+re_imp=re.compile('^(#)*?([^#=]*?)\ =\ (.*?)$',re.M)
+class ConfigSet(object):
+ __slots__=('table','parent')
+ def __init__(self,filename=None):
+ self.table={}
+ if filename:
+ self.load(filename)
+ def __contains__(self,key):
+ if key in self.table:return True
+ try:return self.parent.__contains__(key)
+ except AttributeError:return False
+ def keys(self):
+ keys=set()
+ cur=self
+ while cur:
+ keys.update(cur.table.keys())
+ cur=getattr(cur,'parent',None)
+ keys=list(keys)
+ keys.sort()
+ return keys
+ def __str__(self):
+ return"\n".join(["%r %r"%(x,self.__getitem__(x))for x in self.keys()])
+ def __getitem__(self,key):
+ try:
+ while 1:
+ x=self.table.get(key,None)
+ if not x is None:
+ return x
+ self=self.parent
+ except AttributeError:
+ return[]
+ def __setitem__(self,key,value):
+ self.table[key]=value
+ def __delitem__(self,key):
+ self[key]=[]
+ def __getattr__(self,name):
+ if name in self.__slots__:
+ return object.__getattr__(self,name)
+ else:
+ return self[name]
+ def __setattr__(self,name,value):
+ if name in self.__slots__:
+ object.__setattr__(self,name,value)
+ else:
+ self[name]=value
+ def __delattr__(self,name):
+ if name in self.__slots__:
+ object.__delattr__(self,name)
+ else:
+ del self[name]
+ def derive(self):
+ newenv=ConfigSet()
+ newenv.parent=self
+ return newenv
+ def detach(self):
+ tbl=self.get_merged_dict()
+ try:
+ delattr(self,'parent')
+ except AttributeError:
+ pass
+ else:
+ keys=tbl.keys()
+ for x in keys:
+ tbl[x]=copy.deepcopy(tbl[x])
+ self.table=tbl
+ def get_flat(self,key):
+ s=self[key]
+ if isinstance(s,str):return s
+ return' '.join(s)
+ def _get_list_value_for_modification(self,key):
+ try:
+ value=self.table[key]
+ except KeyError:
+ try:value=self.parent[key]
+ except AttributeError:value=[]
+ if isinstance(value,list):
+ value=value[:]
+ else:
+ value=[value]
+ else:
+ if not isinstance(value,list):
+ value=[value]
+ self.table[key]=value
+ return value
+ def append_value(self,var,val):
+ current_value=self._get_list_value_for_modification(var)
+ if isinstance(val,str):
+ val=[val]
+ current_value.extend(val)
+ def prepend_value(self,var,val):
+ if isinstance(val,str):
+ val=[val]
+ self.table[var]=val+self._get_list_value_for_modification(var)
+ def append_unique(self,var,val):
+ if isinstance(val,str):
+ val=[val]
+ current_value=self._get_list_value_for_modification(var)
+ for x in val:
+ if x not in current_value:
+ current_value.append(x)
+ def get_merged_dict(self):
+ table_list=[]
+ env=self
+ while 1:
+ table_list.insert(0,env.table)
+ try:env=env.parent
+ except AttributeError:break
+ merged_table={}
+ for table in table_list:
+ merged_table.update(table)
+ return merged_table
+ def store(self,filename):
+ try:
+ os.makedirs(os.path.split(filename)[0])
+ except OSError:
+ pass
+ f=None
+ try:
+ f=open(filename,'w')
+ merged_table=self.get_merged_dict()
+ keys=list(merged_table.keys())
+ keys.sort()
+ for k in keys:
+ if k!='undo_stack':
+ f.write('%s = %r\n'%(k,merged_table[k]))
+ finally:
+ if f:
+ f.close()
+ def load(self,filename):
+ tbl=self.table
+ code=Utils.readf(filename,m='rU')
+ for m in re_imp.finditer(code):
+ g=m.group
+ tbl[g(2)]=eval(g(3))
+ Logs.debug('env: %s'%str(self.table))
+ def update(self,d):
+ for k,v in d.items():
+ self[k]=v
+ def stash(self):
+ orig=self.table
+ tbl=self.table=self.table.copy()
+ for x in tbl.keys():
+ tbl[x]=copy.deepcopy(tbl[x])
+ self.undo_stack=self.undo_stack+[orig]
+ def revert(self):
+ self.table=self.undo_stack.pop(-1)
diff --git a/waflib/Configure.py b/waflib/Configure.py
new file mode 100644
index 0000000..e8148d5
--- /dev/null
+++ b/waflib/Configure.py
@@ -0,0 +1,317 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import os,shlex,sys,time
+from waflib import ConfigSet,Utils,Options,Logs,Context,Build,Errors
+try:
+ from urllib import request
+except ImportError:
+ from urllib import urlopen
+else:
+ urlopen=request.urlopen
+BREAK='break'
+CONTINUE='continue'
+WAF_CONFIG_LOG='config.log'
+autoconfig=False
+conf_template='''# project %(app)s configured on %(now)s by
+# waf %(wafver)s (abi %(abi)s, python %(pyver)x on %(systype)s)
+# using %(args)s
+#'''
+def download_check(node):
+ pass
+def download_tool(tool,force=False,ctx=None):
+ for x in Utils.to_list(Context.remote_repo):
+ for sub in Utils.to_list(Context.remote_locs):
+ url='/'.join((x,sub,tool+'.py'))
+ try:
+ web=urlopen(url)
+ try:
+ if web.getcode()!=200:
+ continue
+ except AttributeError:
+ pass
+ except Exception:
+ continue
+ else:
+ tmp=ctx.root.make_node(os.sep.join((Context.waf_dir,'waflib','extras',tool+'.py')))
+ tmp.write(web.read(),'wb')
+ Logs.warn('Downloaded %s from %s'%(tool,url))
+ download_check(tmp)
+ try:
+ module=Context.load_tool(tool)
+ except Exception:
+ Logs.warn('The tool %s from %s is unusable'%(tool,url))
+ try:
+ tmp.delete()
+ except Exception:
+ pass
+ continue
+ return module
+ raise Errors.WafError('Could not load the Waf tool')
+class ConfigurationContext(Context.Context):
+ '''configures the project'''
+ cmd='configure'
+ error_handlers=[]
+ def __init__(self,**kw):
+ super(ConfigurationContext,self).__init__(**kw)
+ self.environ=dict(os.environ)
+ self.all_envs={}
+ self.top_dir=None
+ self.out_dir=None
+ self.tools=[]
+ self.hash=0
+ self.files=[]
+ self.tool_cache=[]
+ self.setenv('')
+ def setenv(self,name,env=None):
+ if name not in self.all_envs or env:
+ if not env:
+ env=ConfigSet.ConfigSet()
+ self.prepare_env(env)
+ else:
+ env=env.derive()
+ self.all_envs[name]=env
+ self.variant=name
+ def get_env(self):
+ return self.all_envs[self.variant]
+ def set_env(self,val):
+ self.all_envs[self.variant]=val
+ env=property(get_env,set_env)
+ def init_dirs(self):
+ top=self.top_dir
+ if not top:
+ top=Options.options.top
+ if not top:
+ top=getattr(Context.g_module,Context.TOP,None)
+ if not top:
+ top=self.path.abspath()
+ top=os.path.abspath(top)
+ self.srcnode=(os.path.isabs(top)and self.root or self.path).find_dir(top)
+ assert(self.srcnode)
+ out=self.out_dir
+ if not out:
+ out=Options.options.out
+ if not out:
+ out=getattr(Context.g_module,Context.OUT,None)
+ if not out:
+ out=Options.lockfile.replace('.lock-waf_%s_'%sys.platform,'').replace('.lock-waf','')
+ self.bldnode=(os.path.isabs(out)and self.root or self.path).make_node(out)
+ self.bldnode.mkdir()
+ if not os.path.isdir(self.bldnode.abspath()):
+ conf.fatal('Could not create the build directory %s'%self.bldnode.abspath())
+ def execute(self):
+ self.init_dirs()
+ self.cachedir=self.bldnode.make_node(Build.CACHE_DIR)
+ self.cachedir.mkdir()
+ path=os.path.join(self.bldnode.abspath(),WAF_CONFIG_LOG)
+ self.logger=Logs.make_logger(path,'cfg')
+ app=getattr(Context.g_module,'APPNAME','')
+ if app:
+ ver=getattr(Context.g_module,'VERSION','')
+ if ver:
+ app="%s (%s)"%(app,ver)
+ now=time.ctime()
+ pyver=sys.hexversion
+ systype=sys.platform
+ args=" ".join(sys.argv)
+ wafver=Context.WAFVERSION
+ abi=Context.ABI
+ self.to_log(conf_template%vars())
+ self.msg('Setting top to',self.srcnode.abspath())
+ self.msg('Setting out to',self.bldnode.abspath())
+ if id(self.srcnode)==id(self.bldnode):
+ Logs.warn('Setting top == out (remember to use "update_outputs")')
+ elif id(self.path)!=id(self.srcnode):
+ if self.srcnode.is_child_of(self.path):
+ Logs.warn('Are you certain that you do not want to set top="." ?')
+ super(ConfigurationContext,self).execute()
+ self.store()
+ Context.top_dir=self.srcnode.abspath()
+ Context.out_dir=self.bldnode.abspath()
+ env=ConfigSet.ConfigSet()
+ env['argv']=sys.argv
+ env['options']=Options.options.__dict__
+ env.run_dir=Context.run_dir
+ env.top_dir=Context.top_dir
+ env.out_dir=Context.out_dir
+ env['hash']=self.hash
+ env['files']=self.files
+ env['environ']=dict(self.environ)
+ if not self.env.NO_LOCK_IN_RUN:
+ env.store(Context.run_dir+os.sep+Options.lockfile)
+ if not self.env.NO_LOCK_IN_TOP:
+ env.store(Context.top_dir+os.sep+Options.lockfile)
+ if not self.env.NO_LOCK_IN_OUT:
+ env.store(Context.out_dir+os.sep+Options.lockfile)
+ def prepare_env(self,env):
+ if not env.PREFIX:
+ if Options.options.prefix or Utils.is_win32:
+ env.PREFIX=os.path.abspath(os.path.expanduser(Options.options.prefix))
+ else:
+ env.PREFIX=''
+ if not env.BINDIR:
+ env.BINDIR=Utils.subst_vars('${PREFIX}/bin',env)
+ if not env.LIBDIR:
+ env.LIBDIR=Utils.subst_vars('${PREFIX}/lib',env)
+ def store(self):
+ n=self.cachedir.make_node('build.config.py')
+ n.write('version = 0x%x\ntools = %r\n'%(Context.HEXVERSION,self.tools))
+ if not self.all_envs:
+ self.fatal('nothing to store in the configuration context!')
+ for key in self.all_envs:
+ tmpenv=self.all_envs[key]
+ tmpenv.store(os.path.join(self.cachedir.abspath(),key+Build.CACHE_SUFFIX))
+ def load(self,input,tooldir=None,funs=None,download=True):
+ tools=Utils.to_list(input)
+ if tooldir:tooldir=Utils.to_list(tooldir)
+ for tool in tools:
+ mag=(tool,id(self.env),funs)
+ if mag in self.tool_cache:
+ self.to_log('(tool %s is already loaded, skipping)'%tool)
+ continue
+ self.tool_cache.append(mag)
+ module=None
+ try:
+ module=Context.load_tool(tool,tooldir)
+ except ImportError ,e:
+ if Options.options.download:
+ module=download_tool(tool,ctx=self)
+ if not module:
+ self.fatal('Could not load the Waf tool %r or download a suitable replacement from the repository (sys.path %r)\n%s'%(tool,sys.path,e))
+ else:
+ self.fatal('Could not load the Waf tool %r from %r (try the --download option?):\n%s'%(tool,sys.path,e))
+ except Exception ,e:
+ self.to_log('imp %r (%r & %r)'%(tool,tooldir,funs))
+ self.to_log(Utils.ex_stack())
+ raise
+ if funs is not None:
+ self.eval_rules(funs)
+ else:
+ func=getattr(module,'configure',None)
+ if func:
+ if type(func)is type(Utils.readf):func(self)
+ else:self.eval_rules(func)
+ self.tools.append({'tool':tool,'tooldir':tooldir,'funs':funs})
+ def post_recurse(self,node):
+ super(ConfigurationContext,self).post_recurse(node)
+ self.hash=hash((self.hash,node.read('rb')))
+ self.files.append(node.abspath())
+ def eval_rules(self,rules):
+ self.rules=Utils.to_list(rules)
+ for x in self.rules:
+ f=getattr(self,x)
+ if not f:self.fatal("No such method '%s'."%x)
+ try:
+ f()
+ except Exception ,e:
+ ret=self.err_handler(x,e)
+ if ret==BREAK:
+ break
+ elif ret==CONTINUE:
+ continue
+ else:
+ raise
+ def err_handler(self,fun,error):
+ pass
+def conf(f):
+ def fun(*k,**kw):
+ mandatory=True
+ if'mandatory'in kw:
+ mandatory=kw['mandatory']
+ del kw['mandatory']
+ try:
+ return f(*k,**kw)
+ except Errors.ConfigurationError:
+ if mandatory:
+ raise
+ setattr(ConfigurationContext,f.__name__,fun)
+ setattr(Build.BuildContext,f.__name__,fun)
+ return f
+ at conf
+def add_os_flags(self,var,dest=None):
+ try:self.env.append_value(dest or var,shlex.split(self.environ[var]))
+ except KeyError:pass
+ at conf
+def cmd_to_list(self,cmd):
+ if isinstance(cmd,str)and cmd.find(' '):
+ try:
+ os.stat(cmd)
+ except OSError:
+ return shlex.split(cmd)
+ else:
+ return[cmd]
+ return cmd
+ at conf
+def check_waf_version(self,mini='1.6.99',maxi='1.8.0'):
+ self.start_msg('Checking for waf version in %s-%s'%(str(mini),str(maxi)))
+ ver=Context.HEXVERSION
+ if Utils.num2ver(mini)>ver:
+ self.fatal('waf version should be at least %r (%r found)'%(Utils.num2ver(mini),ver))
+ if Utils.num2ver(maxi)<ver:
+ self.fatal('waf version should be at most %r (%r found)'%(Utils.num2ver(maxi),ver))
+ self.end_msg('ok')
+ at conf
+def find_file(self,filename,path_list=[]):
+ for n in Utils.to_list(filename):
+ for d in Utils.to_list(path_list):
+ p=os.path.join(d,n)
+ if os.path.exists(p):
+ return p
+ self.fatal('Could not find %r'%filename)
+ at conf
+def find_program(self,filename,**kw):
+ exts=kw.get('exts',Utils.is_win32 and'.exe,.com,.bat,.cmd'or',.sh,.pl,.py')
+ environ=kw.get('environ',os.environ)
+ ret=''
+ filename=Utils.to_list(filename)
+ var=kw.get('var','')
+ if not var:
+ var=filename[0].upper()
+ if self.env[var]:
+ ret=self.env[var]
+ elif var in environ:
+ ret=environ[var]
+ path_list=kw.get('path_list','')
+ if not ret:
+ if path_list:
+ path_list=Utils.to_list(path_list)
+ else:
+ path_list=environ.get('PATH','').split(os.pathsep)
+ if not isinstance(filename,list):
+ filename=[filename]
+ for a in exts.split(','):
+ if ret:
+ break
+ for b in filename:
+ if ret:
+ break
+ for c in path_list:
+ if ret:
+ break
+ x=os.path.expanduser(os.path.join(c,b+a))
+ if os.path.isfile(x):
+ ret=x
+ if not ret and Utils.winreg:
+ ret=Utils.get_registry_app_path(Utils.winreg.HKEY_CURRENT_USER,filename)
+ if not ret and Utils.winreg:
+ ret=Utils.get_registry_app_path(Utils.winreg.HKEY_LOCAL_MACHINE,filename)
+ self.msg('Checking for program '+','.join(filename),ret or False)
+ self.to_log('find program=%r paths=%r var=%r -> %r'%(filename,path_list,var,ret))
+ if not ret:
+ self.fatal(kw.get('errmsg','')or'Could not find the program %s'%','.join(filename))
+ if var:
+ self.env[var]=ret
+ return ret
+ at conf
+def find_perl_program(self,filename,path_list=[],var=None,environ=None,exts=''):
+ try:
+ app=self.find_program(filename,path_list=path_list,var=var,environ=environ,exts=exts)
+ except Exception:
+ self.find_program('perl',var='PERL')
+ app=self.find_file(filename,os.environ['PATH'].split(os.pathsep))
+ if not app:
+ raise
+ if var:
+ self.env[var]=Utils.to_list(self.env['PERL'])+[app]
+ self.msg('Checking for %r'%filename,app)
diff --git a/waflib/Context.py b/waflib/Context.py
new file mode 100644
index 0000000..494ece2
--- /dev/null
+++ b/waflib/Context.py
@@ -0,0 +1,319 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import os,imp,sys
+from waflib import Utils,Errors,Logs
+import waflib.Node
+HEXVERSION=0x1070900
+WAFVERSION="1.7.9"
+WAFREVISION="9e92489dbc008e4abae9c147b1d63b48296797c2"
+ABI=98
+DBFILE='.wafpickle-%s-%d-%d'%(sys.platform,sys.hexversion,ABI)
+APPNAME='APPNAME'
+VERSION='VERSION'
+TOP='top'
+OUT='out'
+WSCRIPT_FILE='wscript'
+launch_dir=''
+run_dir=''
+top_dir=''
+out_dir=''
+waf_dir=''
+local_repo=''
+remote_repo='http://waf.googlecode.com/git/'
+remote_locs=['waflib/extras','waflib/Tools']
+g_module=None
+STDOUT=1
+STDERR=-1
+BOTH=0
+classes=[]
+def create_context(cmd_name,*k,**kw):
+ global classes
+ for x in classes:
+ if x.cmd==cmd_name:
+ return x(*k,**kw)
+ ctx=Context(*k,**kw)
+ ctx.fun=cmd_name
+ return ctx
+class store_context(type):
+ def __init__(cls,name,bases,dict):
+ super(store_context,cls).__init__(name,bases,dict)
+ name=cls.__name__
+ if name=='ctx'or name=='Context':
+ return
+ try:
+ cls.cmd
+ except AttributeError:
+ raise Errors.WafError('Missing command for the context class %r (cmd)'%name)
+ if not getattr(cls,'fun',None):
+ cls.fun=cls.cmd
+ global classes
+ classes.insert(0,cls)
+ctx=store_context('ctx',(object,),{})
+class Context(ctx):
+ errors=Errors
+ tools={}
+ def __init__(self,**kw):
+ try:
+ rd=kw['run_dir']
+ except KeyError:
+ global run_dir
+ rd=run_dir
+ class node_class(waflib.Node.Node):
+ pass
+ self.node_class=node_class
+ self.node_class.__module__="waflib.Node"
+ self.node_class.__name__="Nod3"
+ self.node_class.ctx=self
+ self.root=self.node_class('',None)
+ self.cur_script=None
+ self.path=self.root.find_dir(rd)
+ self.stack_path=[]
+ self.exec_dict={'ctx':self,'conf':self,'bld':self,'opt':self}
+ self.logger=None
+ def __hash__(self):
+ return id(self)
+ def load(self,tool_list,*k,**kw):
+ tools=Utils.to_list(tool_list)
+ path=Utils.to_list(kw.get('tooldir',''))
+ for t in tools:
+ module=load_tool(t,path)
+ fun=getattr(module,kw.get('name',self.fun),None)
+ if fun:
+ fun(self)
+ def execute(self):
+ global g_module
+ self.recurse([os.path.dirname(g_module.root_path)])
+ def pre_recurse(self,node):
+ self.stack_path.append(self.cur_script)
+ self.cur_script=node
+ self.path=node.parent
+ def post_recurse(self,node):
+ self.cur_script=self.stack_path.pop()
+ if self.cur_script:
+ self.path=self.cur_script.parent
+ def recurse(self,dirs,name=None,mandatory=True,once=True):
+ try:
+ cache=self.recurse_cache
+ except AttributeError:
+ cache=self.recurse_cache={}
+ for d in Utils.to_list(dirs):
+ if not os.path.isabs(d):
+ d=os.path.join(self.path.abspath(),d)
+ WSCRIPT=os.path.join(d,WSCRIPT_FILE)
+ WSCRIPT_FUN=WSCRIPT+'_'+(name or self.fun)
+ node=self.root.find_node(WSCRIPT_FUN)
+ if node and(not once or node not in cache):
+ cache[node]=True
+ self.pre_recurse(node)
+ try:
+ function_code=node.read('rU')
+ exec(compile(function_code,node.abspath(),'exec'),self.exec_dict)
+ finally:
+ self.post_recurse(node)
+ elif not node:
+ node=self.root.find_node(WSCRIPT)
+ tup=(node,name or self.fun)
+ if node and(not once or tup not in cache):
+ cache[tup]=True
+ self.pre_recurse(node)
+ try:
+ wscript_module=load_module(node.abspath())
+ user_function=getattr(wscript_module,(name or self.fun),None)
+ if not user_function:
+ if not mandatory:
+ continue
+ raise Errors.WafError('No function %s defined in %s'%(name or self.fun,node.abspath()))
+ user_function(self)
+ finally:
+ self.post_recurse(node)
+ elif not node:
+ if not mandatory:
+ continue
+ raise Errors.WafError('No wscript file in directory %s'%d)
+ def exec_command(self,cmd,**kw):
+ subprocess=Utils.subprocess
+ kw['shell']=isinstance(cmd,str)
+ Logs.debug('runner: %r'%cmd)
+ Logs.debug('runner_env: kw=%s'%kw)
+ if self.logger:
+ self.logger.info(cmd)
+ if'stdout'not in kw:
+ kw['stdout']=subprocess.PIPE
+ if'stderr'not in kw:
+ kw['stderr']=subprocess.PIPE
+ try:
+ if kw['stdout']or kw['stderr']:
+ p=subprocess.Popen(cmd,**kw)
+ (out,err)=p.communicate()
+ ret=p.returncode
+ else:
+ out,err=(None,None)
+ ret=subprocess.Popen(cmd,**kw).wait()
+ except Exception ,e:
+ raise Errors.WafError('Execution failure: %s'%str(e),ex=e)
+ if out:
+ if not isinstance(out,str):
+ out=out.decode(sys.stdout.encoding or'iso8859-1')
+ if self.logger:
+ self.logger.debug('out: %s'%out)
+ else:
+ sys.stdout.write(out)
+ if err:
+ if not isinstance(err,str):
+ err=err.decode(sys.stdout.encoding or'iso8859-1')
+ if self.logger:
+ self.logger.error('err: %s'%err)
+ else:
+ sys.stderr.write(err)
+ return ret
+ def cmd_and_log(self,cmd,**kw):
+ subprocess=Utils.subprocess
+ kw['shell']=isinstance(cmd,str)
+ Logs.debug('runner: %r'%cmd)
+ if'quiet'in kw:
+ quiet=kw['quiet']
+ del kw['quiet']
+ else:
+ quiet=None
+ if'output'in kw:
+ to_ret=kw['output']
+ del kw['output']
+ else:
+ to_ret=STDOUT
+ kw['stdout']=kw['stderr']=subprocess.PIPE
+ if quiet is None:
+ self.to_log(cmd)
+ try:
+ p=subprocess.Popen(cmd,**kw)
+ (out,err)=p.communicate()
+ except Exception ,e:
+ raise Errors.WafError('Execution failure: %s'%str(e),ex=e)
+ if not isinstance(out,str):
+ out=out.decode(sys.stdout.encoding or'iso8859-1')
+ if not isinstance(err,str):
+ err=err.decode(sys.stdout.encoding or'iso8859-1')
+ if out and quiet!=STDOUT and quiet!=BOTH:
+ self.to_log('out: %s'%out)
+ if err and quiet!=STDERR and quiet!=BOTH:
+ self.to_log('err: %s'%err)
+ if p.returncode:
+ e=Errors.WafError('Command %r returned %r'%(cmd,p.returncode))
+ e.returncode=p.returncode
+ e.stderr=err
+ e.stdout=out
+ raise e
+ if to_ret==BOTH:
+ return(out,err)
+ elif to_ret==STDERR:
+ return err
+ return out
+ def fatal(self,msg,ex=None):
+ if self.logger:
+ self.logger.info('from %s: %s'%(self.path.abspath(),msg))
+ try:
+ msg='%s\n(complete log in %s)'%(msg,self.logger.handlers[0].baseFilename)
+ except Exception:
+ pass
+ raise self.errors.ConfigurationError(msg,ex=ex)
+ def to_log(self,msg):
+ if not msg:
+ return
+ if self.logger:
+ self.logger.info(msg)
+ else:
+ sys.stderr.write(str(msg))
+ sys.stderr.flush()
+ def msg(self,msg,result,color=None):
+ self.start_msg(msg)
+ if not isinstance(color,str):
+ color=result and'GREEN'or'YELLOW'
+ self.end_msg(result,color)
+ def start_msg(self,msg):
+ try:
+ if self.in_msg:
+ self.in_msg+=1
+ return
+ except AttributeError:
+ self.in_msg=0
+ self.in_msg+=1
+ try:
+ self.line_just=max(self.line_just,len(msg))
+ except AttributeError:
+ self.line_just=max(40,len(msg))
+ for x in(self.line_just*'-',msg):
+ self.to_log(x)
+ Logs.pprint('NORMAL',"%s :"%msg.ljust(self.line_just),sep='')
+ def end_msg(self,result,color=None):
+ self.in_msg-=1
+ if self.in_msg:
+ return
+ defcolor='GREEN'
+ if result==True:
+ msg='ok'
+ elif result==False:
+ msg='not found'
+ defcolor='YELLOW'
+ else:
+ msg=str(result)
+ self.to_log(msg)
+ Logs.pprint(color or defcolor,msg)
+ def load_special_tools(self,var,ban=[]):
+ global waf_dir
+ lst=self.root.find_node(waf_dir).find_node('waflib/extras').ant_glob(var)
+ for x in lst:
+ if not x.name in ban:
+ load_tool(x.name.replace('.py',''))
+cache_modules={}
+def load_module(path):
+ try:
+ return cache_modules[path]
+ except KeyError:
+ pass
+ module=imp.new_module(WSCRIPT_FILE)
+ try:
+ code=Utils.readf(path,m='rU')
+ except(IOError,OSError):
+ raise Errors.WafError('Could not read the file %r'%path)
+ module_dir=os.path.dirname(path)
+ sys.path.insert(0,module_dir)
+ exec(compile(code,path,'exec'),module.__dict__)
+ sys.path.remove(module_dir)
+ cache_modules[path]=module
+ return module
+def load_tool(tool,tooldir=None):
+ if tool=='java':
+ tool='javaw'
+ elif tool=='compiler_cc':
+ tool='compiler_c'
+ else:
+ tool=tool.replace('++','xx')
+ if tooldir:
+ assert isinstance(tooldir,list)
+ sys.path=tooldir+sys.path
+ try:
+ __import__(tool)
+ ret=sys.modules[tool]
+ Context.tools[tool]=ret
+ return ret
+ finally:
+ for d in tooldir:
+ sys.path.remove(d)
+ else:
+ global waf_dir
+ try:
+ os.stat(os.path.join(waf_dir,'waflib','extras',tool+'.py'))
+ except OSError:
+ try:
+ os.stat(os.path.join(waf_dir,'waflib','Tools',tool+'.py'))
+ except OSError:
+ d=tool
+ else:
+ d='waflib.Tools.%s'%tool
+ else:
+ d='waflib.extras.%s'%tool
+ __import__(d)
+ ret=sys.modules[d]
+ Context.tools[tool]=ret
+ return ret
diff --git a/waflib/Errors.py b/waflib/Errors.py
new file mode 100644
index 0000000..aacc1a9
--- /dev/null
+++ b/waflib/Errors.py
@@ -0,0 +1,37 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import traceback,sys
+class WafError(Exception):
+ def __init__(self,msg='',ex=None):
+ self.msg=msg
+ assert not isinstance(msg,Exception)
+ self.stack=[]
+ if ex:
+ if not msg:
+ self.msg=str(ex)
+ if isinstance(ex,WafError):
+ self.stack=ex.stack
+ else:
+ self.stack=traceback.extract_tb(sys.exc_info()[2])
+ self.stack+=traceback.extract_stack()[:-1]
+ self.verbose_msg=''.join(traceback.format_list(self.stack))
+ def __str__(self):
+ return str(self.msg)
+class BuildError(WafError):
+ def __init__(self,error_tasks=[]):
+ self.tasks=error_tasks
+ WafError.__init__(self,self.format_error())
+ def format_error(self):
+ lst=['Build failed']
+ for tsk in self.tasks:
+ txt=tsk.format_error()
+ if txt:lst.append(txt)
+ return'\n'.join(lst)
+class ConfigurationError(WafError):
+ pass
+class TaskRescan(WafError):
+ pass
+class TaskNotReady(WafError):
+ pass
diff --git a/waflib/Logs.py b/waflib/Logs.py
new file mode 100644
index 0000000..d6d4ddd
--- /dev/null
+++ b/waflib/Logs.py
@@ -0,0 +1,176 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import os,re,traceback,sys
+try:
+ import threading
+except ImportError:
+ pass
+else:
+ wlock=threading.Lock()
+ class sync_stream(object):
+ def __init__(self,stream):
+ self.stream=stream
+ self.encoding=self.stream.encoding
+ def write(self,txt):
+ try:
+ wlock.acquire()
+ self.stream.write(txt)
+ self.stream.flush()
+ finally:
+ wlock.release()
+ def fileno(self):
+ return self.stream.fileno()
+ def flush(self):
+ self.stream.flush()
+ def isatty(self):
+ return self.stream.isatty()
+ _nocolor=os.environ.get('NOCOLOR','no')not in('no','0','false')
+ try:
+ if not _nocolor:
+ import waflib.ansiterm
+ except ImportError:
+ pass
+ if not os.environ.get('NOSYNC',False):
+ if id(sys.stdout)==id(sys.__stdout__):
+ sys.stdout=sync_stream(sys.stdout)
+ sys.stderr=sync_stream(sys.stderr)
+import logging
+LOG_FORMAT="%(asctime)s %(c1)s%(zone)s%(c2)s %(message)s"
+HOUR_FORMAT="%H:%M:%S"
+zones=''
+verbose=0
+colors_lst={'USE':True,'BOLD':'\x1b[01;1m','RED':'\x1b[01;31m','GREEN':'\x1b[32m','YELLOW':'\x1b[33m','PINK':'\x1b[35m','BLUE':'\x1b[01;34m','CYAN':'\x1b[36m','NORMAL':'\x1b[0m','cursor_on':'\x1b[?25h','cursor_off':'\x1b[?25l',}
+got_tty=not os.environ.get('TERM','dumb')in['dumb','emacs']
+if got_tty:
+ try:
+ got_tty=sys.stderr.isatty()and sys.stdout.isatty()
+ except AttributeError:
+ got_tty=False
+if(not got_tty and os.environ.get('TERM','dumb')!='msys')or _nocolor:
+ colors_lst['USE']=False
+def get_term_cols():
+ return 80
+try:
+ import struct,fcntl,termios
+except ImportError:
+ pass
+else:
+ if got_tty:
+ def get_term_cols_real():
+ dummy_lines,cols=struct.unpack("HHHH",fcntl.ioctl(sys.stderr.fileno(),termios.TIOCGWINSZ,struct.pack("HHHH",0,0,0,0)))[:2]
+ return cols
+ try:
+ get_term_cols_real()
+ except Exception:
+ pass
+ else:
+ get_term_cols=get_term_cols_real
+get_term_cols.__doc__="""
+ Get the console width in characters.
+
+ :return: the number of characters per line
+ :rtype: int
+ """
+def get_color(cl):
+ if not colors_lst['USE']:return''
+ return colors_lst.get(cl,'')
+class color_dict(object):
+ def __getattr__(self,a):
+ return get_color(a)
+ def __call__(self,a):
+ return get_color(a)
+colors=color_dict()
+re_log=re.compile(r'(\w+): (.*)',re.M)
+class log_filter(logging.Filter):
+ def __init__(self,name=None):
+ pass
+ def filter(self,rec):
+ rec.c1=colors.PINK
+ rec.c2=colors.NORMAL
+ rec.zone=rec.module
+ if rec.levelno>=logging.INFO:
+ if rec.levelno>=logging.ERROR:
+ rec.c1=colors.RED
+ elif rec.levelno>=logging.WARNING:
+ rec.c1=colors.YELLOW
+ else:
+ rec.c1=colors.GREEN
+ return True
+ m=re_log.match(rec.msg)
+ if m:
+ rec.zone=m.group(1)
+ rec.msg=m.group(2)
+ if zones:
+ return getattr(rec,'zone','')in zones or'*'in zones
+ elif not verbose>2:
+ return False
+ return True
+class formatter(logging.Formatter):
+ def __init__(self):
+ logging.Formatter.__init__(self,LOG_FORMAT,HOUR_FORMAT)
+ def format(self,rec):
+ if rec.levelno>=logging.WARNING or rec.levelno==logging.INFO:
+ try:
+ msg=rec.msg.decode('utf-8')
+ except Exception:
+ msg=rec.msg
+ return'%s%s%s'%(rec.c1,msg,rec.c2)
+ return logging.Formatter.format(self,rec)
+log=None
+def debug(*k,**kw):
+ if verbose:
+ k=list(k)
+ k[0]=k[0].replace('\n',' ')
+ global log
+ log.debug(*k,**kw)
+def error(*k,**kw):
+ global log
+ log.error(*k,**kw)
+ if verbose>2:
+ st=traceback.extract_stack()
+ if st:
+ st=st[:-1]
+ buf=[]
+ for filename,lineno,name,line in st:
+ buf.append(' File "%s", line %d, in %s'%(filename,lineno,name))
+ if line:
+ buf.append(' %s'%line.strip())
+ if buf:log.error("\n".join(buf))
+def warn(*k,**kw):
+ global log
+ log.warn(*k,**kw)
+def info(*k,**kw):
+ global log
+ log.info(*k,**kw)
+def init_log():
+ global log
+ log=logging.getLogger('waflib')
+ log.handlers=[]
+ log.filters=[]
+ hdlr=logging.StreamHandler()
+ hdlr.setFormatter(formatter())
+ log.addHandler(hdlr)
+ log.addFilter(log_filter())
+ log.setLevel(logging.DEBUG)
+def make_logger(path,name):
+ logger=logging.getLogger(name)
+ hdlr=logging.FileHandler(path,'w')
+ formatter=logging.Formatter('%(message)s')
+ hdlr.setFormatter(formatter)
+ logger.addHandler(hdlr)
+ logger.setLevel(logging.DEBUG)
+ return logger
+def make_mem_logger(name,to_log,size=10000):
+ from logging.handlers import MemoryHandler
+ logger=logging.getLogger(name)
+ hdlr=MemoryHandler(size,target=to_log)
+ formatter=logging.Formatter('%(message)s')
+ hdlr.setFormatter(formatter)
+ logger.addHandler(hdlr)
+ logger.memhandler=hdlr
+ logger.setLevel(logging.DEBUG)
+ return logger
+def pprint(col,str,label='',sep='\n'):
+ sys.stderr.write("%s%s%s %s%s"%(colors(col),str,colors.NORMAL,label,sep))
diff --git a/waflib/Node.py b/waflib/Node.py
new file mode 100644
index 0000000..79d6bbf
--- /dev/null
+++ b/waflib/Node.py
@@ -0,0 +1,466 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import os,re,sys,shutil
+from waflib import Utils,Errors
+exclude_regs='''
+**/*~
+**/#*#
+**/.#*
+**/%*%
+**/._*
+**/CVS
+**/CVS/**
+**/.cvsignore
+**/SCCS
+**/SCCS/**
+**/vssver.scc
+**/.svn
+**/.svn/**
+**/BitKeeper
+**/.git
+**/.git/**
+**/.gitignore
+**/.bzr
+**/.bzrignore
+**/.bzr/**
+**/.hg
+**/.hg/**
+**/_MTN
+**/_MTN/**
+**/.arch-ids
+**/{arch}
+**/_darcs
+**/_darcs/**
+**/.DS_Store'''
+def split_path(path):
+ return path.split('/')
+def split_path_cygwin(path):
+ if path.startswith('//'):
+ ret=path.split('/')[2:]
+ ret[0]='/'+ret[0]
+ return ret
+ return path.split('/')
+re_sp=re.compile('[/\\\\]')
+def split_path_win32(path):
+ if path.startswith('\\\\'):
+ ret=re.split(re_sp,path)[2:]
+ ret[0]='\\'+ret[0]
+ return ret
+ return re.split(re_sp,path)
+if sys.platform=='cygwin':
+ split_path=split_path_cygwin
+elif Utils.is_win32:
+ split_path=split_path_win32
+class Node(object):
+ __slots__=('name','sig','children','parent','cache_abspath','cache_isdir','cache_sig')
+ def __init__(self,name,parent):
+ self.name=name
+ self.parent=parent
+ if parent:
+ if name in parent.children:
+ raise Errors.WafError('node %s exists in the parent files %r already'%(name,parent))
+ parent.children[name]=self
+ def __setstate__(self,data):
+ self.name=data[0]
+ self.parent=data[1]
+ if data[2]is not None:
+ self.children=data[2]
+ if data[3]is not None:
+ self.sig=data[3]
+ def __getstate__(self):
+ return(self.name,self.parent,getattr(self,'children',None),getattr(self,'sig',None))
+ def __str__(self):
+ return self.name
+ def __repr__(self):
+ return self.abspath()
+ def __hash__(self):
+ return id(self)
+ def __eq__(self,node):
+ return id(self)==id(node)
+ def __copy__(self):
+ raise Errors.WafError('nodes are not supposed to be copied')
+ def read(self,flags='r',encoding='ISO8859-1'):
+ return Utils.readf(self.abspath(),flags,encoding)
+ def write(self,data,flags='w',encoding='ISO8859-1'):
+ Utils.writef(self.abspath(),data,flags,encoding)
+ def chmod(self,val):
+ os.chmod(self.abspath(),val)
+ def delete(self):
+ try:
+ if getattr(self,'children',None):
+ shutil.rmtree(self.abspath())
+ else:
+ os.unlink(self.abspath())
+ except OSError:
+ pass
+ self.evict()
+ def evict(self):
+ del self.parent.children[self.name]
+ def suffix(self):
+ k=max(0,self.name.rfind('.'))
+ return self.name[k:]
+ def height(self):
+ d=self
+ val=-1
+ while d:
+ d=d.parent
+ val+=1
+ return val
+ def listdir(self):
+ lst=Utils.listdir(self.abspath())
+ lst.sort()
+ return lst
+ def mkdir(self):
+ if getattr(self,'cache_isdir',None):
+ return
+ try:
+ self.parent.mkdir()
+ except OSError:
+ pass
+ if self.name:
+ try:
+ os.makedirs(self.abspath())
+ except OSError:
+ pass
+ if not os.path.isdir(self.abspath()):
+ raise Errors.WafError('Could not create the directory %s'%self.abspath())
+ try:
+ self.children
+ except AttributeError:
+ self.children={}
+ self.cache_isdir=True
+ def find_node(self,lst):
+ if isinstance(lst,str):
+ lst=[x for x in split_path(lst)if x and x!='.']
+ cur=self
+ for x in lst:
+ if x=='..':
+ cur=cur.parent or cur
+ continue
+ try:
+ ch=cur.children
+ except AttributeError:
+ cur.children={}
+ else:
+ try:
+ cur=cur.children[x]
+ continue
+ except KeyError:
+ pass
+ cur=self.__class__(x,cur)
+ try:
+ os.stat(cur.abspath())
+ except OSError:
+ cur.evict()
+ return None
+ ret=cur
+ try:
+ os.stat(ret.abspath())
+ except OSError:
+ ret.evict()
+ return None
+ try:
+ while not getattr(cur.parent,'cache_isdir',None):
+ cur=cur.parent
+ cur.cache_isdir=True
+ except AttributeError:
+ pass
+ return ret
+ def make_node(self,lst):
+ if isinstance(lst,str):
+ lst=[x for x in split_path(lst)if x and x!='.']
+ cur=self
+ for x in lst:
+ if x=='..':
+ cur=cur.parent or cur
+ continue
+ if getattr(cur,'children',{}):
+ if x in cur.children:
+ cur=cur.children[x]
+ continue
+ else:
+ cur.children={}
+ cur=self.__class__(x,cur)
+ return cur
+ def search_node(self,lst):
+ if isinstance(lst,str):
+ lst=[x for x in split_path(lst)if x and x!='.']
+ cur=self
+ for x in lst:
+ if x=='..':
+ cur=cur.parent or cur
+ else:
+ try:
+ cur=cur.children[x]
+ except(AttributeError,KeyError):
+ return None
+ return cur
+ def path_from(self,node):
+ c1=self
+ c2=node
+ c1h=c1.height()
+ c2h=c2.height()
+ lst=[]
+ up=0
+ while c1h>c2h:
+ lst.append(c1.name)
+ c1=c1.parent
+ c1h-=1
+ while c2h>c1h:
+ up+=1
+ c2=c2.parent
+ c2h-=1
+ while id(c1)!=id(c2):
+ lst.append(c1.name)
+ up+=1
+ c1=c1.parent
+ c2=c2.parent
+ for i in range(up):
+ lst.append('..')
+ lst.reverse()
+ return os.sep.join(lst)or'.'
+ def abspath(self):
+ try:
+ return self.cache_abspath
+ except AttributeError:
+ pass
+ if os.sep=='/':
+ if not self.parent:
+ val=os.sep
+ elif not self.parent.name:
+ val=os.sep+self.name
+ else:
+ val=self.parent.abspath()+os.sep+self.name
+ else:
+ if not self.parent:
+ val=''
+ elif not self.parent.name:
+ val=self.name+os.sep
+ else:
+ val=self.parent.abspath().rstrip(os.sep)+os.sep+self.name
+ self.cache_abspath=val
+ return val
+ def is_child_of(self,node):
+ p=self
+ diff=self.height()-node.height()
+ while diff>0:
+ diff-=1
+ p=p.parent
+ return id(p)==id(node)
+ def ant_iter(self,accept=None,maxdepth=25,pats=[],dir=False,src=True,remove=True):
+ dircont=self.listdir()
+ dircont.sort()
+ try:
+ lst=set(self.children.keys())
+ except AttributeError:
+ self.children={}
+ else:
+ if remove:
+ for x in lst-set(dircont):
+ self.children[x].evict()
+ for name in dircont:
+ npats=accept(name,pats)
+ if npats and npats[0]:
+ accepted=[]in npats[0]
+ node=self.make_node([name])
+ isdir=os.path.isdir(node.abspath())
+ if accepted:
+ if isdir:
+ if dir:
+ yield node
+ else:
+ if src:
+ yield node
+ if getattr(node,'cache_isdir',None)or isdir:
+ node.cache_isdir=True
+ if maxdepth:
+ for k in node.ant_iter(accept=accept,maxdepth=maxdepth-1,pats=npats,dir=dir,src=src,remove=remove):
+ yield k
+ raise StopIteration
+ def ant_glob(self,*k,**kw):
+ src=kw.get('src',True)
+ dir=kw.get('dir',False)
+ excl=kw.get('excl',exclude_regs)
+ incl=k and k[0]or kw.get('incl','**')
+ reflags=kw.get('ignorecase',0)and re.I
+ def to_pat(s):
+ lst=Utils.to_list(s)
+ ret=[]
+ for x in lst:
+ x=x.replace('\\','/').replace('//','/')
+ if x.endswith('/'):
+ x+='**'
+ lst2=x.split('/')
+ accu=[]
+ for k in lst2:
+ if k=='**':
+ accu.append(k)
+ else:
+ k=k.replace('.','[.]').replace('*','.*').replace('?','.').replace('+','\\+')
+ k='^%s$'%k
+ try:
+ accu.append(re.compile(k,flags=reflags))
+ except Exception ,e:
+ raise Errors.WafError("Invalid pattern: %s"%k,e)
+ ret.append(accu)
+ return ret
+ def filtre(name,nn):
+ ret=[]
+ for lst in nn:
+ if not lst:
+ pass
+ elif lst[0]=='**':
+ ret.append(lst)
+ if len(lst)>1:
+ if lst[1].match(name):
+ ret.append(lst[2:])
+ else:
+ ret.append([])
+ elif lst[0].match(name):
+ ret.append(lst[1:])
+ return ret
+ def accept(name,pats):
+ nacc=filtre(name,pats[0])
+ nrej=filtre(name,pats[1])
+ if[]in nrej:
+ nacc=[]
+ return[nacc,nrej]
+ ret=[x for x in self.ant_iter(accept=accept,pats=[to_pat(incl),to_pat(excl)],maxdepth=25,dir=dir,src=src,remove=kw.get('remove',True))]
+ if kw.get('flat',False):
+ return' '.join([x.path_from(self)for x in ret])
+ return ret
+ def is_src(self):
+ cur=self
+ x=id(self.ctx.srcnode)
+ y=id(self.ctx.bldnode)
+ while cur.parent:
+ if id(cur)==y:
+ return False
+ if id(cur)==x:
+ return True
+ cur=cur.parent
+ return False
+ def is_bld(self):
+ cur=self
+ y=id(self.ctx.bldnode)
+ while cur.parent:
+ if id(cur)==y:
+ return True
+ cur=cur.parent
+ return False
+ def get_src(self):
+ cur=self
+ x=id(self.ctx.srcnode)
+ y=id(self.ctx.bldnode)
+ lst=[]
+ while cur.parent:
+ if id(cur)==y:
+ lst.reverse()
+ return self.ctx.srcnode.make_node(lst)
+ if id(cur)==x:
+ return self
+ lst.append(cur.name)
+ cur=cur.parent
+ return self
+ def get_bld(self):
+ cur=self
+ x=id(self.ctx.srcnode)
+ y=id(self.ctx.bldnode)
+ lst=[]
+ while cur.parent:
+ if id(cur)==y:
+ return self
+ if id(cur)==x:
+ lst.reverse()
+ return self.ctx.bldnode.make_node(lst)
+ lst.append(cur.name)
+ cur=cur.parent
+ lst.reverse()
+ if lst and Utils.is_win32 and len(lst[0])==2 and lst[0].endswith(':'):
+ lst[0]=lst[0][0]
+ return self.ctx.bldnode.make_node(['__root__']+lst)
+ def find_resource(self,lst):
+ if isinstance(lst,str):
+ lst=[x for x in split_path(lst)if x and x!='.']
+ node=self.get_bld().search_node(lst)
+ if not node:
+ self=self.get_src()
+ node=self.find_node(lst)
+ if node:
+ if os.path.isdir(node.abspath()):
+ return None
+ return node
+ def find_or_declare(self,lst):
+ if isinstance(lst,str):
+ lst=[x for x in split_path(lst)if x and x!='.']
+ node=self.get_bld().search_node(lst)
+ if node:
+ if not os.path.isfile(node.abspath()):
+ node.sig=None
+ node.parent.mkdir()
+ return node
+ self=self.get_src()
+ node=self.find_node(lst)
+ if node:
+ if not os.path.isfile(node.abspath()):
+ node.sig=None
+ node.parent.mkdir()
+ return node
+ node=self.get_bld().make_node(lst)
+ node.parent.mkdir()
+ return node
+ def find_dir(self,lst):
+ if isinstance(lst,str):
+ lst=[x for x in split_path(lst)if x and x!='.']
+ node=self.find_node(lst)
+ try:
+ if not os.path.isdir(node.abspath()):
+ return None
+ except(OSError,AttributeError):
+ return None
+ return node
+ def change_ext(self,ext,ext_in=None):
+ name=self.name
+ if ext_in is None:
+ k=name.rfind('.')
+ if k>=0:
+ name=name[:k]+ext
+ else:
+ name=name+ext
+ else:
+ name=name[:-len(ext_in)]+ext
+ return self.parent.find_or_declare([name])
+ def nice_path(self,env=None):
+ return self.path_from(self.ctx.launch_node())
+ def bldpath(self):
+ return self.path_from(self.ctx.bldnode)
+ def srcpath(self):
+ return self.path_from(self.ctx.srcnode)
+ def relpath(self):
+ cur=self
+ x=id(self.ctx.bldnode)
+ while cur.parent:
+ if id(cur)==x:
+ return self.bldpath()
+ cur=cur.parent
+ return self.srcpath()
+ def bld_dir(self):
+ return self.parent.bldpath()
+ def bld_base(self):
+ s=os.path.splitext(self.name)[0]
+ return self.bld_dir()+os.sep+s
+ def get_bld_sig(self):
+ try:
+ return self.cache_sig
+ except AttributeError:
+ pass
+ if not self.is_bld()or self.ctx.bldnode is self.ctx.srcnode:
+ self.sig=Utils.h_file(self.abspath())
+ self.cache_sig=ret=self.sig
+ return ret
+ search=search_node
+pickle_lock=Utils.threading.Lock()
+class Nod3(Node):
+ pass
diff --git a/waflib/Options.py b/waflib/Options.py
new file mode 100644
index 0000000..21f4254
--- /dev/null
+++ b/waflib/Options.py
@@ -0,0 +1,135 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import os,tempfile,optparse,sys,re
+from waflib import Logs,Utils,Context
+cmds='distclean configure build install clean uninstall check dist distcheck'.split()
+options={}
+commands=[]
+lockfile=os.environ.get('WAFLOCK','.lock-waf_%s_build'%sys.platform)
+try:cache_global=os.path.abspath(os.environ['WAFCACHE'])
+except KeyError:cache_global=''
+platform=Utils.unversioned_sys_platform()
+class opt_parser(optparse.OptionParser):
+ def __init__(self,ctx):
+ optparse.OptionParser.__init__(self,conflict_handler="resolve",version='waf %s (%s)'%(Context.WAFVERSION,Context.WAFREVISION))
+ self.formatter.width=Logs.get_term_cols()
+ p=self.add_option
+ self.ctx=ctx
+ jobs=ctx.jobs()
+ p('-j','--jobs',dest='jobs',default=jobs,type='int',help='amount of parallel jobs (%r)'%jobs)
+ p('-k','--keep',dest='keep',default=0,action='count',help='keep running happily even if errors are found')
+ p('-v','--verbose',dest='verbose',default=0,action='count',help='verbosity level -v -vv or -vvv [default: 0]')
+ p('--nocache',dest='nocache',default=False,action='store_true',help='ignore the WAFCACHE (if set)')
+ p('--zones',dest='zones',default='',action='store',help='debugging zones (task_gen, deps, tasks, etc)')
+ gr=optparse.OptionGroup(self,'configure options')
+ self.add_option_group(gr)
+ gr.add_option('-o','--out',action='store',default='',help='build dir for the project',dest='out')
+ gr.add_option('-t','--top',action='store',default='',help='src dir for the project',dest='top')
+ default_prefix=os.environ.get('PREFIX')
+ if not default_prefix:
+ if platform=='win32':
+ d=tempfile.gettempdir()
+ default_prefix=d[0].upper()+d[1:]
+ else:
+ default_prefix='/usr/local/'
+ gr.add_option('--prefix',dest='prefix',default=default_prefix,help='installation prefix [default: %r]'%default_prefix)
+ gr.add_option('--download',dest='download',default=False,action='store_true',help='try to download the tools if missing')
+ gr=optparse.OptionGroup(self,'build and install options')
+ self.add_option_group(gr)
+ gr.add_option('-p','--progress',dest='progress_bar',default=0,action='count',help='-p: progress bar; -pp: ide output')
+ gr.add_option('--targets',dest='targets',default='',action='store',help='task generators, e.g. "target1,target2"')
+ gr=optparse.OptionGroup(self,'step options')
+ self.add_option_group(gr)
+ gr.add_option('--files',dest='files',default='',action='store',help='files to process, by regexp, e.g. "*/main.c,*/test/main.o"')
+ default_destdir=os.environ.get('DESTDIR','')
+ gr=optparse.OptionGroup(self,'install/uninstall options')
+ self.add_option_group(gr)
+ gr.add_option('--destdir',help='installation root [default: %r]'%default_destdir,default=default_destdir,dest='destdir')
+ gr.add_option('-f','--force',dest='force',default=False,action='store_true',help='force file installation')
+ gr.add_option('--distcheck-args',help='arguments to pass to distcheck',default=None,action='store')
+ def get_usage(self):
+ cmds_str={}
+ for cls in Context.classes:
+ if not cls.cmd or cls.cmd=='options':
+ continue
+ s=cls.__doc__ or''
+ cmds_str[cls.cmd]=s
+ if Context.g_module:
+ for(k,v)in Context.g_module.__dict__.items():
+ if k in['options','init','shutdown']:
+ continue
+ if type(v)is type(Context.create_context):
+ if v.__doc__ and not k.startswith('_'):
+ cmds_str[k]=v.__doc__
+ just=0
+ for k in cmds_str:
+ just=max(just,len(k))
+ lst=[' %s: %s'%(k.ljust(just),v)for(k,v)in cmds_str.items()]
+ lst.sort()
+ ret='\n'.join(lst)
+ return'''waf [commands] [options]
+
+Main commands (example: ./waf build -j4)
+%s
+'''%ret
+class OptionsContext(Context.Context):
+ cmd='options'
+ fun='options'
+ def __init__(self,**kw):
+ super(OptionsContext,self).__init__(**kw)
+ self.parser=opt_parser(self)
+ self.option_groups={}
+ def jobs(self):
+ count=int(os.environ.get('JOBS',0))
+ if count<1:
+ if'NUMBER_OF_PROCESSORS'in os.environ:
+ count=int(os.environ.get('NUMBER_OF_PROCESSORS',1))
+ else:
+ if hasattr(os,'sysconf_names'):
+ if'SC_NPROCESSORS_ONLN'in os.sysconf_names:
+ count=int(os.sysconf('SC_NPROCESSORS_ONLN'))
+ elif'SC_NPROCESSORS_CONF'in os.sysconf_names:
+ count=int(os.sysconf('SC_NPROCESSORS_CONF'))
+ if not count and os.name not in('nt','java'):
+ try:
+ tmp=self.cmd_and_log(['sysctl','-n','hw.ncpu'],quiet=0)
+ except Exception:
+ pass
+ else:
+ if re.match('^[0-9]+$',tmp):
+ count=int(tmp)
+ if count<1:
+ count=1
+ elif count>1024:
+ count=1024
+ return count
+ def add_option(self,*k,**kw):
+ return self.parser.add_option(*k,**kw)
+ def add_option_group(self,*k,**kw):
+ try:
+ gr=self.option_groups[k[0]]
+ except KeyError:
+ gr=self.parser.add_option_group(*k,**kw)
+ self.option_groups[k[0]]=gr
+ return gr
+ def get_option_group(self,opt_str):
+ try:
+ return self.option_groups[opt_str]
+ except KeyError:
+ for group in self.parser.option_groups:
+ if group.title==opt_str:
+ return group
+ return None
+ def parse_args(self,_args=None):
+ global options,commands
+ (options,leftover_args)=self.parser.parse_args(args=_args)
+ commands=leftover_args
+ if options.destdir:
+ options.destdir=os.path.abspath(os.path.expanduser(options.destdir))
+ if options.verbose>=1:
+ self.load('errcheck')
+ def execute(self):
+ super(OptionsContext,self).execute()
+ self.parse_args()
diff --git a/waflib/Runner.py b/waflib/Runner.py
new file mode 100644
index 0000000..15b6a27
--- /dev/null
+++ b/waflib/Runner.py
@@ -0,0 +1,197 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import random,atexit
+try:
+ from queue import Queue
+except ImportError:
+ from Queue import Queue
+from waflib import Utils,Task,Errors,Logs
+GAP=10
+class TaskConsumer(Utils.threading.Thread):
+ def __init__(self):
+ Utils.threading.Thread.__init__(self)
+ self.ready=Queue()
+ self.setDaemon(1)
+ self.start()
+ def run(self):
+ try:
+ self.loop()
+ except Exception:
+ pass
+ def loop(self):
+ while 1:
+ tsk=self.ready.get()
+ if not isinstance(tsk,Task.TaskBase):
+ tsk(self)
+ else:
+ tsk.process()
+pool=Queue()
+def get_pool():
+ try:
+ return pool.get(False)
+ except Exception:
+ return TaskConsumer()
+def put_pool(x):
+ pool.put(x)
+def _free_resources():
+ global pool
+ lst=[]
+ while pool.qsize():
+ lst.append(pool.get())
+ for x in lst:
+ x.ready.put(None)
+ for x in lst:
+ x.join()
+ pool=None
+atexit.register(_free_resources)
+class Parallel(object):
+ def __init__(self,bld,j=2):
+ self.numjobs=j
+ self.bld=bld
+ self.outstanding=[]
+ self.frozen=[]
+ self.out=Queue(0)
+ self.count=0
+ self.processed=1
+ self.stop=False
+ self.error=[]
+ self.biter=None
+ self.dirty=False
+ def get_next_task(self):
+ if not self.outstanding:
+ return None
+ return self.outstanding.pop(0)
+ def postpone(self,tsk):
+ if random.randint(0,1):
+ self.frozen.insert(0,tsk)
+ else:
+ self.frozen.append(tsk)
+ def refill_task_list(self):
+ while self.count>self.numjobs*GAP:
+ self.get_out()
+ while not self.outstanding:
+ if self.count:
+ self.get_out()
+ elif self.frozen:
+ try:
+ cond=self.deadlock==self.processed
+ except AttributeError:
+ pass
+ else:
+ if cond:
+ msg='check the build order for the tasks'
+ for tsk in self.frozen:
+ if not tsk.run_after:
+ msg='check the methods runnable_status'
+ break
+ lst=[]
+ for tsk in self.frozen:
+ lst.append('%s\t-> %r'%(repr(tsk),[id(x)for x in tsk.run_after]))
+ raise Errors.WafError('Deadlock detected: %s%s'%(msg,''.join(lst)))
+ self.deadlock=self.processed
+ if self.frozen:
+ self.outstanding+=self.frozen
+ self.frozen=[]
+ elif not self.count:
+ self.outstanding.extend(self.biter.next())
+ self.total=self.bld.total()
+ break
+ def add_more_tasks(self,tsk):
+ if getattr(tsk,'more_tasks',None):
+ self.outstanding+=tsk.more_tasks
+ self.total+=len(tsk.more_tasks)
+ def get_out(self):
+ tsk=self.out.get()
+ if not self.stop:
+ self.add_more_tasks(tsk)
+ self.count-=1
+ self.dirty=True
+ return tsk
+ def error_handler(self,tsk):
+ if not self.bld.keep:
+ self.stop=True
+ self.error.append(tsk)
+ def add_task(self,tsk):
+ try:
+ self.pool
+ except AttributeError:
+ self.init_task_pool()
+ self.ready.put(tsk)
+ def init_task_pool(self):
+ pool=self.pool=[get_pool()for i in range(self.numjobs)]
+ self.ready=Queue(0)
+ def setq(consumer):
+ consumer.ready=self.ready
+ for x in pool:
+ x.ready.put(setq)
+ return pool
+ def free_task_pool(self):
+ def setq(consumer):
+ consumer.ready=Queue(0)
+ self.out.put(self)
+ try:
+ pool=self.pool
+ except AttributeError:
+ pass
+ else:
+ for x in pool:
+ self.ready.put(setq)
+ for x in pool:
+ self.get_out()
+ for x in pool:
+ put_pool(x)
+ self.pool=[]
+ def start(self):
+ self.total=self.bld.total()
+ while not self.stop:
+ self.refill_task_list()
+ tsk=self.get_next_task()
+ if not tsk:
+ if self.count:
+ continue
+ else:
+ break
+ if tsk.hasrun:
+ self.processed+=1
+ continue
+ if self.stop:
+ break
+ try:
+ st=tsk.runnable_status()
+ except Exception:
+ self.processed+=1
+ tsk.err_msg=Utils.ex_stack()
+ if not self.stop and self.bld.keep:
+ tsk.hasrun=Task.SKIPPED
+ if self.bld.keep==1:
+ if Logs.verbose>1 or not self.error:
+ self.error.append(tsk)
+ self.stop=True
+ else:
+ if Logs.verbose>1:
+ self.error.append(tsk)
+ continue
+ tsk.hasrun=Task.EXCEPTION
+ self.error_handler(tsk)
+ continue
+ if st==Task.ASK_LATER:
+ self.postpone(tsk)
+ elif st==Task.SKIP_ME:
+ self.processed+=1
+ tsk.hasrun=Task.SKIPPED
+ self.add_more_tasks(tsk)
+ else:
+ tsk.position=(self.processed,self.total)
+ self.count+=1
+ tsk.master=self
+ self.processed+=1
+ if self.numjobs==1:
+ tsk.process()
+ else:
+ self.add_task(tsk)
+ while self.error and self.count:
+ self.get_out()
+ assert(self.count==0 or self.stop)
+ self.free_task_pool()
diff --git a/waflib/Scripting.py b/waflib/Scripting.py
new file mode 100644
index 0000000..c33ab32
--- /dev/null
+++ b/waflib/Scripting.py
@@ -0,0 +1,373 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import os,shlex,shutil,traceback,errno,sys,stat
+from waflib import Utils,Configure,Logs,Options,ConfigSet,Context,Errors,Build,Node
+build_dir_override=None
+no_climb_commands=['configure']
+default_cmd="build"
+def waf_entry_point(current_directory,version,wafdir):
+ Logs.init_log()
+ if Context.WAFVERSION!=version:
+ Logs.error('Waf script %r and library %r do not match (directory %r)'%(version,Context.WAFVERSION,wafdir))
+ sys.exit(1)
+ if'--version'in sys.argv:
+ Context.run_dir=current_directory
+ ctx=Context.create_context('options')
+ ctx.curdir=current_directory
+ ctx.parse_args()
+ sys.exit(0)
+ Context.waf_dir=wafdir
+ Context.launch_dir=current_directory
+ no_climb=os.environ.get('NOCLIMB',None)
+ if not no_climb:
+ for k in no_climb_commands:
+ if k in sys.argv:
+ no_climb=True
+ break
+ cur=current_directory
+ while cur:
+ lst=os.listdir(cur)
+ if Options.lockfile in lst:
+ env=ConfigSet.ConfigSet()
+ try:
+ env.load(os.path.join(cur,Options.lockfile))
+ ino=os.stat(cur)[stat.ST_INO]
+ except Exception:
+ pass
+ else:
+ for x in[env.run_dir,env.top_dir,env.out_dir]:
+ if Utils.is_win32:
+ if cur==x:
+ load=True
+ break
+ else:
+ try:
+ ino2=os.stat(x)[stat.ST_INO]
+ except OSError:
+ pass
+ else:
+ if ino==ino2:
+ load=True
+ break
+ else:
+ Logs.warn('invalid lock file in %s'%cur)
+ load=False
+ if load:
+ Context.run_dir=env.run_dir
+ Context.top_dir=env.top_dir
+ Context.out_dir=env.out_dir
+ break
+ if not Context.run_dir:
+ if Context.WSCRIPT_FILE in lst:
+ Context.run_dir=cur
+ next=os.path.dirname(cur)
+ if next==cur:
+ break
+ cur=next
+ if no_climb:
+ break
+ if not Context.run_dir:
+ if'-h'in sys.argv or'--help'in sys.argv:
+ Logs.warn('No wscript file found: the help message may be incomplete')
+ Context.run_dir=current_directory
+ ctx=Context.create_context('options')
+ ctx.curdir=current_directory
+ ctx.parse_args()
+ sys.exit(0)
+ Logs.error('Waf: Run from a directory containing a file named %r'%Context.WSCRIPT_FILE)
+ sys.exit(1)
+ try:
+ os.chdir(Context.run_dir)
+ except OSError:
+ Logs.error('Waf: The folder %r is unreadable'%Context.run_dir)
+ sys.exit(1)
+ try:
+ set_main_module(Context.run_dir+os.sep+Context.WSCRIPT_FILE)
+ except Errors.WafError ,e:
+ Logs.pprint('RED',e.verbose_msg)
+ Logs.error(str(e))
+ sys.exit(1)
+ except Exception ,e:
+ Logs.error('Waf: The wscript in %r is unreadable'%Context.run_dir,e)
+ traceback.print_exc(file=sys.stdout)
+ sys.exit(2)
+ try:
+ run_commands()
+ except Errors.WafError ,e:
+ if Logs.verbose>1:
+ Logs.pprint('RED',e.verbose_msg)
+ Logs.error(e.msg)
+ sys.exit(1)
+ except SystemExit:
+ raise
+ except Exception ,e:
+ traceback.print_exc(file=sys.stdout)
+ sys.exit(2)
+ except KeyboardInterrupt:
+ Logs.pprint('RED','Interrupted')
+ sys.exit(68)
+def set_main_module(file_path):
+ Context.g_module=Context.load_module(file_path)
+ Context.g_module.root_path=file_path
+ def set_def(obj):
+ name=obj.__name__
+ if not name in Context.g_module.__dict__:
+ setattr(Context.g_module,name,obj)
+ for k in[update,dist,distclean,distcheck,update]:
+ set_def(k)
+ if not'init'in Context.g_module.__dict__:
+ Context.g_module.init=Utils.nada
+ if not'shutdown'in Context.g_module.__dict__:
+ Context.g_module.shutdown=Utils.nada
+ if not'options'in Context.g_module.__dict__:
+ Context.g_module.options=Utils.nada
+def parse_options():
+ Context.create_context('options').execute()
+ if not Options.commands:
+ Options.commands=[default_cmd]
+ Options.commands=[x for x in Options.commands if x!='options']
+ Logs.verbose=Options.options.verbose
+ Logs.init_log()
+ if Options.options.zones:
+ Logs.zones=Options.options.zones.split(',')
+ if not Logs.verbose:
+ Logs.verbose=1
+ elif Logs.verbose>0:
+ Logs.zones=['runner']
+ if Logs.verbose>2:
+ Logs.zones=['*']
+def run_command(cmd_name):
+ ctx=Context.create_context(cmd_name)
+ ctx.log_timer=Utils.Timer()
+ ctx.options=Options.options
+ ctx.cmd=cmd_name
+ ctx.execute()
+ return ctx
+def run_commands():
+ parse_options()
+ run_command('init')
+ while Options.commands:
+ cmd_name=Options.commands.pop(0)
+ ctx=run_command(cmd_name)
+ Logs.info('%r finished successfully (%s)'%(cmd_name,str(ctx.log_timer)))
+ run_command('shutdown')
+def _can_distclean(name):
+ for k in'.o .moc .exe'.split():
+ if name.endswith(k):
+ return True
+ return False
+def distclean_dir(dirname):
+ for(root,dirs,files)in os.walk(dirname):
+ for f in files:
+ if _can_distclean(f):
+ fname=root+os.sep+f
+ try:
+ os.unlink(fname)
+ except OSError:
+ Logs.warn('Could not remove %r'%fname)
+ for x in[Context.DBFILE,'config.log']:
+ try:
+ os.unlink(x)
+ except OSError:
+ pass
+ try:
+ shutil.rmtree('c4che')
+ except OSError:
+ pass
+def distclean(ctx):
+ '''removes the build directory'''
+ lst=os.listdir('.')
+ for f in lst:
+ if f==Options.lockfile:
+ try:
+ proj=ConfigSet.ConfigSet(f)
+ except IOError:
+ Logs.warn('Could not read %r'%f)
+ continue
+ if proj['out_dir']!=proj['top_dir']:
+ try:
+ shutil.rmtree(proj['out_dir'])
+ except IOError:
+ pass
+ except OSError ,e:
+ if e.errno!=errno.ENOENT:
+ Logs.warn('project %r cannot be removed'%proj[Context.OUT])
+ else:
+ distclean_dir(proj['out_dir'])
+ for k in(proj['out_dir'],proj['top_dir'],proj['run_dir']):
+ try:
+ os.remove(os.path.join(k,Options.lockfile))
+ except OSError ,e:
+ if e.errno!=errno.ENOENT:
+ Logs.warn('file %r cannot be removed'%f)
+ if f.startswith('.waf')and not Options.commands:
+ shutil.rmtree(f,ignore_errors=True)
+class Dist(Context.Context):
+ '''creates an archive containing the project source code'''
+ cmd='dist'
+ fun='dist'
+ algo='tar.bz2'
+ ext_algo={}
+ def execute(self):
+ self.recurse([os.path.dirname(Context.g_module.root_path)])
+ self.archive()
+ def archive(self):
+ import tarfile
+ arch_name=self.get_arch_name()
+ try:
+ self.base_path
+ except AttributeError:
+ self.base_path=self.path
+ node=self.base_path.make_node(arch_name)
+ try:
+ node.delete()
+ except Exception:
+ pass
+ files=self.get_files()
+ if self.algo.startswith('tar.'):
+ tar=tarfile.open(arch_name,'w:'+self.algo.replace('tar.',''))
+ for x in files:
+ self.add_tar_file(x,tar)
+ tar.close()
+ elif self.algo=='zip':
+ import zipfile
+ zip=zipfile.ZipFile(arch_name,'w',compression=zipfile.ZIP_DEFLATED)
+ for x in files:
+ archive_name=self.get_base_name()+'/'+x.path_from(self.base_path)
+ zip.write(x.abspath(),archive_name,zipfile.ZIP_DEFLATED)
+ zip.close()
+ else:
+ self.fatal('Valid algo types are tar.bz2, tar.gz or zip')
+ try:
+ from hashlib import sha1 as sha
+ except ImportError:
+ from sha import sha
+ try:
+ digest=" (sha=%r)"%sha(node.read()).hexdigest()
+ except Exception:
+ digest=''
+ Logs.info('New archive created: %s%s'%(self.arch_name,digest))
+ def get_tar_path(self,node):
+ return node.abspath()
+ def add_tar_file(self,x,tar):
+ p=self.get_tar_path(x)
+ tinfo=tar.gettarinfo(name=p,arcname=self.get_tar_prefix()+'/'+x.path_from(self.base_path))
+ tinfo.uid=0
+ tinfo.gid=0
+ tinfo.uname='root'
+ tinfo.gname='root'
+ fu=None
+ try:
+ fu=open(p,'rb')
+ tar.addfile(tinfo,fileobj=fu)
+ finally:
+ if fu:
+ fu.close()
+ def get_tar_prefix(self):
+ try:
+ return self.tar_prefix
+ except AttributeError:
+ return self.get_base_name()
+ def get_arch_name(self):
+ try:
+ self.arch_name
+ except AttributeError:
+ self.arch_name=self.get_base_name()+'.'+self.ext_algo.get(self.algo,self.algo)
+ return self.arch_name
+ def get_base_name(self):
+ try:
+ self.base_name
+ except AttributeError:
+ appname=getattr(Context.g_module,Context.APPNAME,'noname')
+ version=getattr(Context.g_module,Context.VERSION,'1.0')
+ self.base_name=appname+'-'+version
+ return self.base_name
+ def get_excl(self):
+ try:
+ return self.excl
+ except AttributeError:
+ self.excl=Node.exclude_regs+' **/waf-1.7.* **/.waf-1.7* **/waf3-1.7.* **/.waf3-1.7* **/*~ **/*.rej **/*.orig **/*.pyc **/*.pyo **/*.bak **/*.swp **/.lock-w*'
+ nd=self.root.find_node(Context.out_dir)
+ if nd:
+ self.excl+=' '+nd.path_from(self.base_path)
+ return self.excl
+ def get_files(self):
+ try:
+ files=self.files
+ except AttributeError:
+ files=self.base_path.ant_glob('**/*',excl=self.get_excl())
+ return files
+def dist(ctx):
+ '''makes a tarball for redistributing the sources'''
+ pass
+class DistCheck(Dist):
+ fun='distcheck'
+ cmd='distcheck'
+ def execute(self):
+ self.recurse([os.path.dirname(Context.g_module.root_path)])
+ self.archive()
+ self.check()
+ def check(self):
+ import tempfile,tarfile
+ t=None
+ try:
+ t=tarfile.open(self.get_arch_name())
+ for x in t:
+ t.extract(x)
+ finally:
+ if t:
+ t.close()
+ cfg=[]
+ if Options.options.distcheck_args:
+ cfg=shlex.split(Options.options.distcheck_args)
+ else:
+ cfg=[x for x in sys.argv if x.startswith('-')]
+ instdir=tempfile.mkdtemp('.inst',self.get_base_name())
+ ret=Utils.subprocess.Popen([sys.argv[0],'configure','install','uninstall','--destdir='+instdir]+cfg,cwd=self.get_base_name()).wait()
+ if ret:
+ raise Errors.WafError('distcheck failed with code %i'%ret)
+ if os.path.exists(instdir):
+ raise Errors.WafError('distcheck succeeded, but files were left in %s'%instdir)
+ shutil.rmtree(self.get_base_name())
+def distcheck(ctx):
+ '''checks if the project compiles (tarball from 'dist')'''
+ pass
+def update(ctx):
+ '''updates the plugins from the *waflib/extras* directory'''
+ lst=Options.options.files.split(',')
+ if not lst:
+ lst=[x for x in Utils.listdir(Context.waf_dir+'/waflib/extras')if x.endswith('.py')]
+ for x in lst:
+ tool=x.replace('.py','')
+ try:
+ Configure.download_tool(tool,force=True,ctx=ctx)
+ except Errors.WafError:
+ Logs.error('Could not find the tool %s in the remote repository'%x)
+def autoconfigure(execute_method):
+ def execute(self):
+ if not Configure.autoconfig:
+ return execute_method(self)
+ env=ConfigSet.ConfigSet()
+ do_config=False
+ try:
+ env.load(os.path.join(Context.top_dir,Options.lockfile))
+ except Exception:
+ Logs.warn('Configuring the project')
+ do_config=True
+ else:
+ if env.run_dir!=Context.run_dir:
+ do_config=True
+ else:
+ h=0
+ for f in env['files']:
+ h=hash((h,Utils.readf(f,'rb')))
+ do_config=h!=env.hash
+ if do_config:
+ Options.commands.insert(0,self.cmd)
+ Options.commands.insert(0,'configure')
+ return
+ return execute_method(self)
+ return execute
+Build.BuildContext.execute=autoconfigure(Build.BuildContext.execute)
diff --git a/waflib/Task.py b/waflib/Task.py
new file mode 100644
index 0000000..1b54a54
--- /dev/null
+++ b/waflib/Task.py
@@ -0,0 +1,677 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import os,shutil,re,tempfile
+from waflib import Utils,Logs,Errors
+NOT_RUN=0
+MISSING=1
+CRASHED=2
+EXCEPTION=3
+SKIPPED=8
+SUCCESS=9
+ASK_LATER=-1
+SKIP_ME=-2
+RUN_ME=-3
+COMPILE_TEMPLATE_SHELL='''
+def f(tsk):
+ env = tsk.env
+ gen = tsk.generator
+ bld = gen.bld
+ wd = getattr(tsk, 'cwd', None)
+ p = env.get_flat
+ tsk.last_cmd = cmd = \'\'\' %s \'\'\' % s
+ return tsk.exec_command(cmd, cwd=wd, env=env.env or None)
+'''
+COMPILE_TEMPLATE_NOSHELL='''
+def f(tsk):
+ env = tsk.env
+ gen = tsk.generator
+ bld = gen.bld
+ wd = getattr(tsk, 'cwd', None)
+ def to_list(xx):
+ if isinstance(xx, str): return [xx]
+ return xx
+ tsk.last_cmd = lst = []
+ %s
+ lst = [x for x in lst if x]
+ return tsk.exec_command(lst, cwd=wd, env=env.env or None)
+'''
+def cache_outputs(cls):
+ m1=cls.run
+ def run(self):
+ bld=self.generator.bld
+ if bld.cache_global and not bld.nocache:
+ if self.can_retrieve_cache():
+ return 0
+ return m1(self)
+ cls.run=run
+ m2=cls.post_run
+ def post_run(self):
+ bld=self.generator.bld
+ ret=m2(self)
+ if bld.cache_global and not bld.nocache:
+ self.put_files_cache()
+ return ret
+ cls.post_run=post_run
+ return cls
+classes={}
+class store_task_type(type):
+ def __init__(cls,name,bases,dict):
+ super(store_task_type,cls).__init__(name,bases,dict)
+ name=cls.__name__
+ if name.endswith('_task'):
+ name=name.replace('_task','')
+ if name!='evil'and name!='TaskBase':
+ global classes
+ if getattr(cls,'run_str',None):
+ (f,dvars)=compile_fun(cls.run_str,cls.shell)
+ cls.hcode=cls.run_str
+ cls.run_str=None
+ cls.run=f
+ cls.vars=list(set(cls.vars+dvars))
+ cls.vars.sort()
+ elif getattr(cls,'run',None)and not'hcode'in cls.__dict__:
+ cls.hcode=Utils.h_fun(cls.run)
+ if not getattr(cls,'nocache',None):
+ cls=cache_outputs(cls)
+ getattr(cls,'register',classes)[name]=cls
+evil=store_task_type('evil',(object,),{})
+class TaskBase(evil):
+ color='GREEN'
+ ext_in=[]
+ ext_out=[]
+ before=[]
+ after=[]
+ hcode=''
+ def __init__(self,*k,**kw):
+ self.hasrun=NOT_RUN
+ try:
+ self.generator=kw['generator']
+ except KeyError:
+ self.generator=self
+ def __repr__(self):
+ return'\n\t{task %r: %s %s}'%(self.__class__.__name__,id(self),str(getattr(self,'fun','')))
+ def __str__(self):
+ if hasattr(self,'fun'):
+ return'executing: %s\n'%self.fun.__name__
+ return self.__class__.__name__+'\n'
+ def __hash__(self):
+ return id(self)
+ def exec_command(self,cmd,**kw):
+ bld=self.generator.bld
+ try:
+ if not kw.get('cwd',None):
+ kw['cwd']=bld.cwd
+ except AttributeError:
+ bld.cwd=kw['cwd']=bld.variant_dir
+ return bld.exec_command(cmd,**kw)
+ def runnable_status(self):
+ return RUN_ME
+ def process(self):
+ m=self.master
+ if m.stop:
+ m.out.put(self)
+ return
+ try:
+ del self.generator.bld.task_sigs[self.uid()]
+ except KeyError:
+ pass
+ try:
+ self.generator.bld.returned_tasks.append(self)
+ self.log_display(self.generator.bld)
+ ret=self.run()
+ except Exception:
+ self.err_msg=Utils.ex_stack()
+ self.hasrun=EXCEPTION
+ m.error_handler(self)
+ m.out.put(self)
+ return
+ if ret:
+ self.err_code=ret
+ self.hasrun=CRASHED
+ else:
+ try:
+ self.post_run()
+ except Errors.WafError:
+ pass
+ except Exception:
+ self.err_msg=Utils.ex_stack()
+ self.hasrun=EXCEPTION
+ else:
+ self.hasrun=SUCCESS
+ if self.hasrun!=SUCCESS:
+ m.error_handler(self)
+ m.out.put(self)
+ def run(self):
+ if hasattr(self,'fun'):
+ return self.fun(self)
+ return 0
+ def post_run(self):
+ pass
+ def log_display(self,bld):
+ bld.to_log(self.display())
+ def display(self):
+ col1=Logs.colors(self.color)
+ col2=Logs.colors.NORMAL
+ master=self.master
+ def cur():
+ tmp=-1
+ if hasattr(master,'ready'):
+ tmp-=master.ready.qsize()
+ return master.processed+tmp
+ if self.generator.bld.progress_bar==1:
+ return self.generator.bld.progress_line(cur(),master.total,col1,col2)
+ if self.generator.bld.progress_bar==2:
+ ela=str(self.generator.bld.timer)
+ try:
+ ins=','.join([n.name for n in self.inputs])
+ except AttributeError:
+ ins=''
+ try:
+ outs=','.join([n.name for n in self.outputs])
+ except AttributeError:
+ outs=''
+ return'|Total %s|Current %s|Inputs %s|Outputs %s|Time %s|\n'%(master.total,cur(),ins,outs,ela)
+ s=str(self)
+ if not s:
+ return None
+ total=master.total
+ n=len(str(total))
+ fs='[%%%dd/%%%dd] %%s%%s%%s'%(n,n)
+ return fs%(cur(),total,col1,s,col2)
+ def attr(self,att,default=None):
+ ret=getattr(self,att,self)
+ if ret is self:return getattr(self.__class__,att,default)
+ return ret
+ def hash_constraints(self):
+ cls=self.__class__
+ tup=(str(cls.before),str(cls.after),str(cls.ext_in),str(cls.ext_out),cls.__name__,cls.hcode)
+ h=hash(tup)
+ return h
+ def format_error(self):
+ msg=getattr(self,'last_cmd','')
+ name=getattr(self.generator,'name','')
+ if getattr(self,"err_msg",None):
+ return self.err_msg
+ elif not self.hasrun:
+ return'task in %r was not executed for some reason: %r'%(name,self)
+ elif self.hasrun==CRASHED:
+ try:
+ return' -> task in %r failed (exit status %r): %r\n%r'%(name,self.err_code,self,msg)
+ except AttributeError:
+ return' -> task in %r failed: %r\n%r'%(name,self,msg)
+ elif self.hasrun==MISSING:
+ return' -> missing files in %r: %r\n%r'%(name,self,msg)
+ else:
+ return'invalid status for task in %r: %r'%(name,self.hasrun)
+ def colon(self,var1,var2):
+ tmp=self.env[var1]
+ if isinstance(var2,str):
+ it=self.env[var2]
+ else:
+ it=var2
+ if isinstance(tmp,str):
+ return[tmp%x for x in it]
+ else:
+ if Logs.verbose and not tmp and it:
+ Logs.warn('Missing env variable %r for task %r (generator %r)'%(var1,self,self.generator))
+ lst=[]
+ for y in it:
+ lst.extend(tmp)
+ lst.append(y)
+ return lst
+class Task(TaskBase):
+ vars=[]
+ shell=False
+ def __init__(self,*k,**kw):
+ TaskBase.__init__(self,*k,**kw)
+ self.env=kw['env']
+ self.inputs=[]
+ self.outputs=[]
+ self.dep_nodes=[]
+ self.run_after=set([])
+ def __str__(self):
+ env=self.env
+ src_str=' '.join([a.nice_path()for a in self.inputs])
+ tgt_str=' '.join([a.nice_path()for a in self.outputs])
+ if self.outputs:sep=' -> '
+ else:sep=''
+ return'%s: %s%s%s\n'%(self.__class__.__name__.replace('_task',''),src_str,sep,tgt_str)
+ def __repr__(self):
+ try:
+ ins=",".join([x.name for x in self.inputs])
+ outs=",".join([x.name for x in self.outputs])
+ except AttributeError:
+ ins=",".join([str(x)for x in self.inputs])
+ outs=",".join([str(x)for x in self.outputs])
+ return"".join(['\n\t{task %r: '%id(self),self.__class__.__name__," ",ins," -> ",outs,'}'])
+ def uid(self):
+ try:
+ return self.uid_
+ except AttributeError:
+ m=Utils.md5()
+ up=m.update
+ up(self.__class__.__name__)
+ for x in self.inputs+self.outputs:
+ up(x.abspath())
+ self.uid_=m.digest()
+ return self.uid_
+ def set_inputs(self,inp):
+ if isinstance(inp,list):self.inputs+=inp
+ else:self.inputs.append(inp)
+ def set_outputs(self,out):
+ if isinstance(out,list):self.outputs+=out
+ else:self.outputs.append(out)
+ def set_run_after(self,task):
+ assert isinstance(task,TaskBase)
+ self.run_after.add(task)
+ def signature(self):
+ try:return self.cache_sig
+ except AttributeError:pass
+ self.m=Utils.md5()
+ self.m.update(self.hcode)
+ self.sig_explicit_deps()
+ self.sig_vars()
+ if self.scan:
+ try:
+ self.sig_implicit_deps()
+ except Errors.TaskRescan:
+ return self.signature()
+ ret=self.cache_sig=self.m.digest()
+ return ret
+ def runnable_status(self):
+ for t in self.run_after:
+ if not t.hasrun:
+ return ASK_LATER
+ bld=self.generator.bld
+ try:
+ new_sig=self.signature()
+ except Errors.TaskNotReady:
+ return ASK_LATER
+ key=self.uid()
+ try:
+ prev_sig=bld.task_sigs[key]
+ except KeyError:
+ Logs.debug("task: task %r must run as it was never run before or the task code changed"%self)
+ return RUN_ME
+ for node in self.outputs:
+ try:
+ if node.sig!=new_sig:
+ return RUN_ME
+ except AttributeError:
+ Logs.debug("task: task %r must run as the output nodes do not exist"%self)
+ return RUN_ME
+ if new_sig!=prev_sig:
+ return RUN_ME
+ return SKIP_ME
+ def post_run(self):
+ bld=self.generator.bld
+ sig=self.signature()
+ for node in self.outputs:
+ try:
+ os.stat(node.abspath())
+ except OSError:
+ self.hasrun=MISSING
+ self.err_msg='-> missing file: %r'%node.abspath()
+ raise Errors.WafError(self.err_msg)
+ node.sig=sig
+ bld.task_sigs[self.uid()]=self.cache_sig
+ def sig_explicit_deps(self):
+ bld=self.generator.bld
+ upd=self.m.update
+ for x in self.inputs+self.dep_nodes:
+ try:
+ upd(x.get_bld_sig())
+ except(AttributeError,TypeError):
+ raise Errors.WafError('Missing node signature for %r (required by %r)'%(x,self))
+ if bld.deps_man:
+ additional_deps=bld.deps_man
+ for x in self.inputs+self.outputs:
+ try:
+ d=additional_deps[id(x)]
+ except KeyError:
+ continue
+ for v in d:
+ if isinstance(v,bld.root.__class__):
+ try:
+ v=v.get_bld_sig()
+ except AttributeError:
+ raise Errors.WafError('Missing node signature for %r (required by %r)'%(v,self))
+ elif hasattr(v,'__call__'):
+ v=v()
+ upd(v)
+ return self.m.digest()
+ def sig_vars(self):
+ bld=self.generator.bld
+ env=self.env
+ upd=self.m.update
+ act_sig=bld.hash_env_vars(env,self.__class__.vars)
+ upd(act_sig)
+ dep_vars=getattr(self,'dep_vars',None)
+ if dep_vars:
+ upd(bld.hash_env_vars(env,dep_vars))
+ return self.m.digest()
+ scan=None
+ def sig_implicit_deps(self):
+ bld=self.generator.bld
+ key=self.uid()
+ prev=bld.task_sigs.get((key,'imp'),[])
+ if prev:
+ try:
+ if prev==self.compute_sig_implicit_deps():
+ return prev
+ except Exception:
+ for x in bld.node_deps.get(self.uid(),[]):
+ if x.is_child_of(bld.srcnode):
+ try:
+ os.stat(x.abspath())
+ except OSError:
+ try:
+ del x.parent.children[x.name]
+ except KeyError:
+ pass
+ del bld.task_sigs[(key,'imp')]
+ raise Errors.TaskRescan('rescan')
+ (nodes,names)=self.scan()
+ if Logs.verbose:
+ Logs.debug('deps: scanner for %s returned %s %s'%(str(self),str(nodes),str(names)))
+ bld.node_deps[key]=nodes
+ bld.raw_deps[key]=names
+ self.are_implicit_nodes_ready()
+ try:
+ bld.task_sigs[(key,'imp')]=sig=self.compute_sig_implicit_deps()
+ except Exception:
+ if Logs.verbose:
+ for k in bld.node_deps.get(self.uid(),[]):
+ try:
+ k.get_bld_sig()
+ except Exception:
+ Logs.warn('Missing signature for node %r (may cause rebuilds)'%k)
+ else:
+ return sig
+ def compute_sig_implicit_deps(self):
+ upd=self.m.update
+ bld=self.generator.bld
+ self.are_implicit_nodes_ready()
+ for k in bld.node_deps.get(self.uid(),[]):
+ upd(k.get_bld_sig())
+ return self.m.digest()
+ def are_implicit_nodes_ready(self):
+ bld=self.generator.bld
+ try:
+ cache=bld.dct_implicit_nodes
+ except AttributeError:
+ bld.dct_implicit_nodes=cache={}
+ try:
+ dct=cache[bld.cur]
+ except KeyError:
+ dct=cache[bld.cur]={}
+ for tsk in bld.cur_tasks:
+ for x in tsk.outputs:
+ dct[x]=tsk
+ modified=False
+ for x in bld.node_deps.get(self.uid(),[]):
+ if x in dct:
+ self.run_after.add(dct[x])
+ modified=True
+ if modified:
+ for tsk in self.run_after:
+ if not tsk.hasrun:
+ raise Errors.TaskNotReady('not ready')
+ def can_retrieve_cache(self):
+ if not getattr(self,'outputs',None):
+ return None
+ sig=self.signature()
+ ssig=Utils.to_hex(self.uid())+Utils.to_hex(sig)
+ dname=os.path.join(self.generator.bld.cache_global,ssig)
+ try:
+ t1=os.stat(dname).st_mtime
+ except OSError:
+ return None
+ for node in self.outputs:
+ orig=os.path.join(dname,node.name)
+ try:
+ shutil.copy2(orig,node.abspath())
+ os.utime(orig,None)
+ except(OSError,IOError):
+ Logs.debug('task: failed retrieving file')
+ return None
+ try:
+ t2=os.stat(dname).st_mtime
+ except OSError:
+ return None
+ if t1!=t2:
+ return None
+ for node in self.outputs:
+ node.sig=sig
+ if self.generator.bld.progress_bar<1:
+ self.generator.bld.to_log('restoring from cache %r\n'%node.abspath())
+ self.cached=True
+ return True
+ def put_files_cache(self):
+ if getattr(self,'cached',None):
+ return None
+ if not getattr(self,'outputs',None):
+ return None
+ sig=self.signature()
+ ssig=Utils.to_hex(self.uid())+Utils.to_hex(sig)
+ dname=os.path.join(self.generator.bld.cache_global,ssig)
+ tmpdir=tempfile.mkdtemp(prefix=self.generator.bld.cache_global+os.sep+'waf')
+ try:
+ shutil.rmtree(dname)
+ except Exception:
+ pass
+ try:
+ for node in self.outputs:
+ dest=os.path.join(tmpdir,node.name)
+ shutil.copy2(node.abspath(),dest)
+ except(OSError,IOError):
+ try:
+ shutil.rmtree(tmpdir)
+ except Exception:
+ pass
+ else:
+ try:
+ os.rename(tmpdir,dname)
+ except OSError:
+ try:
+ shutil.rmtree(tmpdir)
+ except Exception:
+ pass
+ else:
+ try:
+ os.chmod(dname,Utils.O755)
+ except Exception:
+ pass
+def is_before(t1,t2):
+ to_list=Utils.to_list
+ for k in to_list(t2.ext_in):
+ if k in to_list(t1.ext_out):
+ return 1
+ if t1.__class__.__name__ in to_list(t2.after):
+ return 1
+ if t2.__class__.__name__ in to_list(t1.before):
+ return 1
+ return 0
+def set_file_constraints(tasks):
+ ins=Utils.defaultdict(set)
+ outs=Utils.defaultdict(set)
+ for x in tasks:
+ for a in getattr(x,'inputs',[])+getattr(x,'dep_nodes',[]):
+ ins[id(a)].add(x)
+ for a in getattr(x,'outputs',[]):
+ outs[id(a)].add(x)
+ links=set(ins.keys()).intersection(outs.keys())
+ for k in links:
+ for a in ins[k]:
+ a.run_after.update(outs[k])
+def set_precedence_constraints(tasks):
+ cstr_groups=Utils.defaultdict(list)
+ for x in tasks:
+ h=x.hash_constraints()
+ cstr_groups[h].append(x)
+ keys=list(cstr_groups.keys())
+ maxi=len(keys)
+ for i in range(maxi):
+ t1=cstr_groups[keys[i]][0]
+ for j in range(i+1,maxi):
+ t2=cstr_groups[keys[j]][0]
+ if is_before(t1,t2):
+ a=i
+ b=j
+ elif is_before(t2,t1):
+ a=j
+ b=i
+ else:
+ continue
+ aval=set(cstr_groups[keys[a]])
+ for x in cstr_groups[keys[b]]:
+ x.run_after.update(aval)
+def funex(c):
+ dc={}
+ exec(c,dc)
+ return dc['f']
+reg_act=re.compile(r"(?P<backslash>\\)|(?P<dollar>\$\$)|(?P<subst>\$\{(?P<var>\w+)(?P<code>.*?)\})",re.M)
+def compile_fun_shell(line):
+ extr=[]
+ def repl(match):
+ g=match.group
+ if g('dollar'):return"$"
+ elif g('backslash'):return'\\\\'
+ elif g('subst'):extr.append((g('var'),g('code')));return"%s"
+ return None
+ line=reg_act.sub(repl,line)or line
+ parm=[]
+ dvars=[]
+ app=parm.append
+ for(var,meth)in extr:
+ if var=='SRC':
+ if meth:app('tsk.inputs%s'%meth)
+ else:app('" ".join([a.path_from(bld.bldnode) for a in tsk.inputs])')
+ elif var=='TGT':
+ if meth:app('tsk.outputs%s'%meth)
+ else:app('" ".join([a.path_from(bld.bldnode) for a in tsk.outputs])')
+ elif meth:
+ if meth.startswith(':'):
+ m=meth[1:]
+ if m=='SRC':
+ m='[a.path_from(bld.bldnode) for a in tsk.inputs]'
+ elif m=='TGT':
+ m='[a.path_from(bld.bldnode) for a in tsk.outputs]'
+ elif m[:3]not in('tsk','gen','bld'):
+ dvars.extend([var,meth[1:]])
+ m='%r'%m
+ app('" ".join(tsk.colon(%r, %s))'%(var,m))
+ else:
+ app('%s%s'%(var,meth))
+ else:
+ if not var in dvars:dvars.append(var)
+ app("p('%s')"%var)
+ if parm:parm="%% (%s) "%(',\n\t\t'.join(parm))
+ else:parm=''
+ c=COMPILE_TEMPLATE_SHELL%(line,parm)
+ Logs.debug('action: %s'%c.strip().splitlines())
+ return(funex(c),dvars)
+def compile_fun_noshell(line):
+ extr=[]
+ def repl(match):
+ g=match.group
+ if g('dollar'):return"$"
+ elif g('subst'):extr.append((g('var'),g('code')));return"<<|@|>>"
+ return None
+ line2=reg_act.sub(repl,line)
+ params=line2.split('<<|@|>>')
+ assert(extr)
+ buf=[]
+ dvars=[]
+ app=buf.append
+ for x in range(len(extr)):
+ params[x]=params[x].strip()
+ if params[x]:
+ app("lst.extend(%r)"%params[x].split())
+ (var,meth)=extr[x]
+ if var=='SRC':
+ if meth:app('lst.append(tsk.inputs%s)'%meth)
+ else:app("lst.extend([a.path_from(bld.bldnode) for a in tsk.inputs])")
+ elif var=='TGT':
+ if meth:app('lst.append(tsk.outputs%s)'%meth)
+ else:app("lst.extend([a.path_from(bld.bldnode) for a in tsk.outputs])")
+ elif meth:
+ if meth.startswith(':'):
+ m=meth[1:]
+ if m=='SRC':
+ m='[a.path_from(bld.bldnode) for a in tsk.inputs]'
+ elif m=='TGT':
+ m='[a.path_from(bld.bldnode) for a in tsk.outputs]'
+ elif m[:3]not in('tsk','gen','bld'):
+ dvars.extend([var,m])
+ m='%r'%m
+ app('lst.extend(tsk.colon(%r, %s))'%(var,m))
+ else:
+ app('lst.extend(gen.to_list(%s%s))'%(var,meth))
+ else:
+ app('lst.extend(to_list(env[%r]))'%var)
+ if not var in dvars:dvars.append(var)
+ if extr:
+ if params[-1]:
+ app("lst.extend(%r)"%params[-1].split())
+ fun=COMPILE_TEMPLATE_NOSHELL%"\n\t".join(buf)
+ Logs.debug('action: %s'%fun.strip().splitlines())
+ return(funex(fun),dvars)
+def compile_fun(line,shell=False):
+ if line.find('<')>0 or line.find('>')>0 or line.find('&&')>0:
+ shell=True
+ if shell:
+ return compile_fun_shell(line)
+ else:
+ return compile_fun_noshell(line)
+def task_factory(name,func=None,vars=None,color='GREEN',ext_in=[],ext_out=[],before=[],after=[],shell=False,scan=None):
+ params={'vars':vars or[],'color':color,'name':name,'ext_in':Utils.to_list(ext_in),'ext_out':Utils.to_list(ext_out),'before':Utils.to_list(before),'after':Utils.to_list(after),'shell':shell,'scan':scan,}
+ if isinstance(func,str):
+ params['run_str']=func
+ else:
+ params['run']=func
+ cls=type(Task)(name,(Task,),params)
+ global classes
+ classes[name]=cls
+ return cls
+def always_run(cls):
+ old=cls.runnable_status
+ def always(self):
+ ret=old(self)
+ if ret==SKIP_ME:
+ ret=RUN_ME
+ return ret
+ cls.runnable_status=always
+ return cls
+def update_outputs(cls):
+ old_post_run=cls.post_run
+ def post_run(self):
+ old_post_run(self)
+ for node in self.outputs:
+ node.sig=Utils.h_file(node.abspath())
+ self.generator.bld.task_sigs[node.abspath()]=self.uid()
+ cls.post_run=post_run
+ old_runnable_status=cls.runnable_status
+ def runnable_status(self):
+ status=old_runnable_status(self)
+ if status!=RUN_ME:
+ return status
+ try:
+ bld=self.generator.bld
+ prev_sig=bld.task_sigs[self.uid()]
+ if prev_sig==self.signature():
+ for x in self.outputs:
+ if not x.sig or bld.task_sigs[x.abspath()]!=self.uid():
+ return RUN_ME
+ return SKIP_ME
+ except KeyError:
+ pass
+ except IndexError:
+ pass
+ except AttributeError:
+ pass
+ return RUN_ME
+ cls.runnable_status=runnable_status
+ return cls
diff --git a/waflib/TaskGen.py b/waflib/TaskGen.py
new file mode 100644
index 0000000..acd5166
--- /dev/null
+++ b/waflib/TaskGen.py
@@ -0,0 +1,400 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import copy,re,os
+from waflib import Task,Utils,Logs,Errors,ConfigSet,Node
+feats=Utils.defaultdict(set)
+class task_gen(object):
+ mappings={}
+ prec=Utils.defaultdict(list)
+ def __init__(self,*k,**kw):
+ self.source=''
+ self.target=''
+ self.meths=[]
+ self.prec=Utils.defaultdict(list)
+ self.mappings={}
+ self.features=[]
+ self.tasks=[]
+ if not'bld'in kw:
+ self.env=ConfigSet.ConfigSet()
+ self.idx=0
+ self.path=None
+ else:
+ self.bld=kw['bld']
+ self.env=self.bld.env.derive()
+ self.path=self.bld.path
+ try:
+ self.idx=self.bld.idx[id(self.path)]=self.bld.idx.get(id(self.path),0)+1
+ except AttributeError:
+ self.bld.idx={}
+ self.idx=self.bld.idx[id(self.path)]=1
+ for key,val in kw.items():
+ setattr(self,key,val)
+ def __str__(self):
+ return"<task_gen %r declared in %s>"%(self.name,self.path.abspath())
+ def __repr__(self):
+ lst=[]
+ for x in self.__dict__.keys():
+ if x not in['env','bld','compiled_tasks','tasks']:
+ lst.append("%s=%s"%(x,repr(getattr(self,x))))
+ return"bld(%s) in %s"%(", ".join(lst),self.path.abspath())
+ def get_name(self):
+ try:
+ return self._name
+ except AttributeError:
+ if isinstance(self.target,list):
+ lst=[str(x)for x in self.target]
+ name=self._name=','.join(lst)
+ else:
+ name=self._name=str(self.target)
+ return name
+ def set_name(self,name):
+ self._name=name
+ name=property(get_name,set_name)
+ def to_list(self,val):
+ if isinstance(val,str):return val.split()
+ else:return val
+ def post(self):
+ if getattr(self,'posted',None):
+ return False
+ self.posted=True
+ keys=set(self.meths)
+ self.features=Utils.to_list(self.features)
+ for x in self.features+['*']:
+ st=feats[x]
+ if not st:
+ if not x in Task.classes:
+ Logs.warn('feature %r does not exist - bind at least one method to it'%x)
+ keys.update(list(st))
+ prec={}
+ prec_tbl=self.prec or task_gen.prec
+ for x in prec_tbl:
+ if x in keys:
+ prec[x]=prec_tbl[x]
+ tmp=[]
+ for a in keys:
+ for x in prec.values():
+ if a in x:break
+ else:
+ tmp.append(a)
+ tmp.sort()
+ out=[]
+ while tmp:
+ e=tmp.pop()
+ if e in keys:out.append(e)
+ try:
+ nlst=prec[e]
+ except KeyError:
+ pass
+ else:
+ del prec[e]
+ for x in nlst:
+ for y in prec:
+ if x in prec[y]:
+ break
+ else:
+ tmp.append(x)
+ if prec:
+ raise Errors.WafError('Cycle detected in the method execution %r'%prec)
+ out.reverse()
+ self.meths=out
+ Logs.debug('task_gen: posting %s %d'%(self,id(self)))
+ for x in out:
+ try:
+ v=getattr(self,x)
+ except AttributeError:
+ raise Errors.WafError('%r is not a valid task generator method'%x)
+ Logs.debug('task_gen: -> %s (%d)'%(x,id(self)))
+ v()
+ Logs.debug('task_gen: posted %s'%self.name)
+ return True
+ def get_hook(self,node):
+ name=node.name
+ for k in self.mappings:
+ if name.endswith(k):
+ return self.mappings[k]
+ for k in task_gen.mappings:
+ if name.endswith(k):
+ return task_gen.mappings[k]
+ raise Errors.WafError("File %r has no mapping in %r (did you forget to load a waf tool?)"%(node,task_gen.mappings.keys()))
+ def create_task(self,name,src=None,tgt=None):
+ task=Task.classes[name](env=self.env.derive(),generator=self)
+ if src:
+ task.set_inputs(src)
+ if tgt:
+ task.set_outputs(tgt)
+ self.tasks.append(task)
+ return task
+ def clone(self,env):
+ newobj=self.bld()
+ for x in self.__dict__:
+ if x in['env','bld']:
+ continue
+ elif x in['path','features']:
+ setattr(newobj,x,getattr(self,x))
+ else:
+ setattr(newobj,x,copy.copy(getattr(self,x)))
+ newobj.posted=False
+ if isinstance(env,str):
+ newobj.env=self.bld.all_envs[env].derive()
+ else:
+ newobj.env=env.derive()
+ return newobj
+def declare_chain(name='',rule=None,reentrant=None,color='BLUE',ext_in=[],ext_out=[],before=[],after=[],decider=None,scan=None,install_path=None,shell=False):
+ ext_in=Utils.to_list(ext_in)
+ ext_out=Utils.to_list(ext_out)
+ if not name:
+ name=rule
+ cls=Task.task_factory(name,rule,color=color,ext_in=ext_in,ext_out=ext_out,before=before,after=after,scan=scan,shell=shell)
+ def x_file(self,node):
+ ext=decider and decider(self,node)or cls.ext_out
+ if ext_in:
+ _ext_in=ext_in[0]
+ tsk=self.create_task(name,node)
+ cnt=0
+ keys=list(self.mappings.keys())+list(self.__class__.mappings.keys())
+ for x in ext:
+ k=node.change_ext(x,ext_in=_ext_in)
+ tsk.outputs.append(k)
+ if reentrant!=None:
+ if cnt<int(reentrant):
+ self.source.append(k)
+ else:
+ for y in keys:
+ if k.name.endswith(y):
+ self.source.append(k)
+ break
+ cnt+=1
+ if install_path:
+ self.bld.install_files(install_path,tsk.outputs)
+ return tsk
+ for x in cls.ext_in:
+ task_gen.mappings[x]=x_file
+ return x_file
+def taskgen_method(func):
+ setattr(task_gen,func.__name__,func)
+ return func
+def feature(*k):
+ def deco(func):
+ setattr(task_gen,func.__name__,func)
+ for name in k:
+ feats[name].update([func.__name__])
+ return func
+ return deco
+def before_method(*k):
+ def deco(func):
+ setattr(task_gen,func.__name__,func)
+ for fun_name in k:
+ if not func.__name__ in task_gen.prec[fun_name]:
+ task_gen.prec[fun_name].append(func.__name__)
+ return func
+ return deco
+before=before_method
+def after_method(*k):
+ def deco(func):
+ setattr(task_gen,func.__name__,func)
+ for fun_name in k:
+ if not fun_name in task_gen.prec[func.__name__]:
+ task_gen.prec[func.__name__].append(fun_name)
+ return func
+ return deco
+after=after_method
+def extension(*k):
+ def deco(func):
+ setattr(task_gen,func.__name__,func)
+ for x in k:
+ task_gen.mappings[x]=func
+ return func
+ return deco
+ at taskgen_method
+def to_nodes(self,lst,path=None):
+ tmp=[]
+ path=path or self.path
+ find=path.find_resource
+ if isinstance(lst,self.path.__class__):
+ lst=[lst]
+ for x in Utils.to_list(lst):
+ if isinstance(x,str):
+ node=find(x)
+ else:
+ node=x
+ if not node:
+ raise Errors.WafError("source not found: %r in %r"%(x,self))
+ tmp.append(node)
+ return tmp
+ at feature('*')
+def process_source(self):
+ self.source=self.to_nodes(getattr(self,'source',[]))
+ for node in self.source:
+ self.get_hook(node)(self,node)
+ at feature('*')
+ at before_method('process_source')
+def process_rule(self):
+ if not getattr(self,'rule',None):
+ return
+ name=str(getattr(self,'name',None)or self.target or getattr(self.rule,'__name__',self.rule))
+ try:
+ cache=self.bld.cache_rule_attr
+ except AttributeError:
+ cache=self.bld.cache_rule_attr={}
+ cls=None
+ if getattr(self,'cache_rule','True'):
+ try:
+ cls=cache[(name,self.rule)]
+ except KeyError:
+ pass
+ if not cls:
+ cls=Task.task_factory(name,self.rule,getattr(self,'vars',[]),shell=getattr(self,'shell',True),color=getattr(self,'color','BLUE'),scan=getattr(self,'scan',None))
+ if getattr(self,'scan',None):
+ cls.scan=self.scan
+ elif getattr(self,'deps',None):
+ def scan(self):
+ nodes=[]
+ for x in self.generator.to_list(getattr(self.generator,'deps',None)):
+ node=self.generator.path.find_resource(x)
+ if not node:
+ self.generator.bld.fatal('Could not find %r (was it declared?)'%x)
+ nodes.append(node)
+ return[nodes,[]]
+ cls.scan=scan
+ if getattr(self,'update_outputs',None):
+ Task.update_outputs(cls)
+ if getattr(self,'always',None):
+ Task.always_run(cls)
+ for x in['after','before','ext_in','ext_out']:
+ setattr(cls,x,getattr(self,x,[]))
+ if getattr(self,'cache_rule','True'):
+ cache[(name,self.rule)]=cls
+ tsk=self.create_task(name)
+ if getattr(self,'target',None):
+ if isinstance(self.target,str):
+ self.target=self.target.split()
+ if not isinstance(self.target,list):
+ self.target=[self.target]
+ for x in self.target:
+ if isinstance(x,str):
+ tsk.outputs.append(self.path.find_or_declare(x))
+ else:
+ x.parent.mkdir()
+ tsk.outputs.append(x)
+ if getattr(self,'install_path',None):
+ self.bld.install_files(self.install_path,tsk.outputs)
+ if getattr(self,'source',None):
+ tsk.inputs=self.to_nodes(self.source)
+ self.source=[]
+ if getattr(self,'cwd',None):
+ tsk.cwd=self.cwd
+ at feature('seq')
+def sequence_order(self):
+ if self.meths and self.meths[-1]!='sequence_order':
+ self.meths.append('sequence_order')
+ return
+ if getattr(self,'seq_start',None):
+ return
+ if getattr(self.bld,'prev',None):
+ self.bld.prev.post()
+ for x in self.bld.prev.tasks:
+ for y in self.tasks:
+ y.set_run_after(x)
+ self.bld.prev=self
+re_m4=re.compile('@(\w+)@',re.M)
+class subst_pc(Task.Task):
+ def run(self):
+ if getattr(self.generator,'is_copy',None):
+ self.outputs[0].write(self.inputs[0].read('rb'),'wb')
+ if getattr(self.generator,'chmod',None):
+ os.chmod(self.outputs[0].abspath(),self.generator.chmod)
+ return
+ code=self.inputs[0].read(encoding=getattr(self.generator,'encoding','ISO8859-1'))
+ if getattr(self.generator,'subst_fun',None):
+ code=self.generator.subst_fun(self,code)
+ if code:
+ self.outputs[0].write(code,encoding=getattr(self.generator,'encoding','ISO8859-1'))
+ return
+ code=code.replace('%','%%')
+ lst=[]
+ def repl(match):
+ g=match.group
+ if g(1):
+ lst.append(g(1))
+ return"%%(%s)s"%g(1)
+ return''
+ code=re_m4.sub(repl,code)
+ try:
+ d=self.generator.dct
+ except AttributeError:
+ d={}
+ for x in lst:
+ tmp=getattr(self.generator,x,'')or self.env.get_flat(x)or self.env.get_flat(x.upper())
+ d[x]=str(tmp)
+ code=code%d
+ self.outputs[0].write(code,encoding=getattr(self.generator,'encoding','ISO8859-1'))
+ self.generator.bld.raw_deps[self.uid()]=self.dep_vars=lst
+ try:delattr(self,'cache_sig')
+ except AttributeError:pass
+ if getattr(self.generator,'chmod',None):
+ os.chmod(self.outputs[0].abspath(),self.generator.chmod)
+ def sig_vars(self):
+ bld=self.generator.bld
+ env=self.env
+ upd=self.m.update
+ if getattr(self.generator,'subst_fun',None):
+ upd(Utils.h_fun(self.generator.subst_fun))
+ vars=self.generator.bld.raw_deps.get(self.uid(),[])
+ act_sig=bld.hash_env_vars(env,vars)
+ upd(act_sig)
+ lst=[getattr(self.generator,x,'')for x in vars]
+ upd(Utils.h_list(lst))
+ return self.m.digest()
+ at extension('.pc.in')
+def add_pcfile(self,node):
+ tsk=self.create_task('subst_pc',node,node.change_ext('.pc','.pc.in'))
+ self.bld.install_files(getattr(self,'install_path','${LIBDIR}/pkgconfig/'),tsk.outputs)
+class subst(subst_pc):
+ pass
+ at feature('subst')
+ at before_method('process_source','process_rule')
+def process_subst(self):
+ src=Utils.to_list(getattr(self,'source',[]))
+ if isinstance(src,Node.Node):
+ src=[src]
+ tgt=Utils.to_list(getattr(self,'target',[]))
+ if isinstance(tgt,Node.Node):
+ tgt=[tgt]
+ if len(src)!=len(tgt):
+ raise Errors.WafError('invalid number of source/target for %r'%self)
+ for x,y in zip(src,tgt):
+ if not x or not y:
+ raise Errors.WafError('null source or target for %r'%self)
+ a,b=None,None
+ if isinstance(x,str)and isinstance(y,str)and x==y:
+ a=self.path.find_node(x)
+ b=self.path.get_bld().make_node(y)
+ if not os.path.isfile(b.abspath()):
+ b.sig=None
+ b.parent.mkdir()
+ else:
+ if isinstance(x,str):
+ a=self.path.find_resource(x)
+ elif isinstance(x,Node.Node):
+ a=x
+ if isinstance(y,str):
+ b=self.path.find_or_declare(y)
+ elif isinstance(y,Node.Node):
+ b=y
+ if not a:
+ raise Errors.WafError('cound not find %r for %r'%(x,self))
+ has_constraints=False
+ tsk=self.create_task('subst',a,b)
+ for k in('after','before','ext_in','ext_out'):
+ val=getattr(self,k,None)
+ if val:
+ has_constraints=True
+ setattr(tsk,k,val)
+ if not has_constraints and b.name.endswith('.h'):
+ tsk.before=[k for k in('c','cxx')if k in Task.classes]
+ inst_to=getattr(self,'install_path',None)
+ if inst_to:
+ self.bld.install_files(inst_to,b,chmod=getattr(self,'chmod',Utils.O644))
+ self.source=[]
diff --git a/waflib/Tools/__init__.py b/waflib/Tools/__init__.py
new file mode 100644
index 0000000..efeed79
--- /dev/null
+++ b/waflib/Tools/__init__.py
@@ -0,0 +1,4 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
diff --git a/waflib/Tools/ar.py b/waflib/Tools/ar.py
new file mode 100644
index 0000000..7a16dfe
--- /dev/null
+++ b/waflib/Tools/ar.py
@@ -0,0 +1,11 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+from waflib.Configure import conf
+ at conf
+def find_ar(conf):
+ conf.load('ar')
+def configure(conf):
+ conf.find_program('ar',var='AR')
+ conf.env.ARFLAGS='rcs'
diff --git a/waflib/Tools/asm.py b/waflib/Tools/asm.py
new file mode 100644
index 0000000..b9ed5f4
--- /dev/null
+++ b/waflib/Tools/asm.py
@@ -0,0 +1,25 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import os,sys
+from waflib import Task,Utils
+import waflib.Task
+from waflib.Tools.ccroot import link_task,stlink_task
+from waflib.TaskGen import extension,feature
+class asm(Task.Task):
+ color='BLUE'
+ run_str='${AS} ${ASFLAGS} ${ASMPATH_ST:INCPATHS} ${AS_SRC_F}${SRC} ${AS_TGT_F}${TGT}'
+ at extension('.s','.S','.asm','.ASM','.spp','.SPP')
+def asm_hook(self,node):
+ return self.create_compiled_task('asm',node)
+class asmprogram(link_task):
+ run_str='${ASLINK} ${ASLINKFLAGS} ${ASLNK_TGT_F}${TGT} ${ASLNK_SRC_F}${SRC}'
+ ext_out=['.bin']
+ inst_to='${BINDIR}'
+class asmshlib(asmprogram):
+ inst_to='${LIBDIR}'
+class asmstlib(stlink_task):
+ pass
+def configure(conf):
+ conf.env['ASMPATH_ST']='-I%s'
diff --git a/waflib/Tools/bison.py b/waflib/Tools/bison.py
new file mode 100644
index 0000000..6ae7898
--- /dev/null
+++ b/waflib/Tools/bison.py
@@ -0,0 +1,28 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+from waflib import Task
+from waflib.TaskGen import extension
+class bison(Task.Task):
+ color='BLUE'
+ run_str='${BISON} ${BISONFLAGS} ${SRC[0].abspath()} -o ${TGT[0].name}'
+ ext_out=['.h']
+ at extension('.y','.yc','.yy')
+def big_bison(self,node):
+ has_h='-d'in self.env['BISONFLAGS']
+ outs=[]
+ if node.name.endswith('.yc'):
+ outs.append(node.change_ext('.tab.cc'))
+ if has_h:
+ outs.append(node.change_ext('.tab.hh'))
+ else:
+ outs.append(node.change_ext('.tab.c'))
+ if has_h:
+ outs.append(node.change_ext('.tab.h'))
+ tsk=self.create_task('bison',node,outs)
+ tsk.cwd=node.parent.get_bld().abspath()
+ self.source.append(outs[0])
+def configure(conf):
+ conf.find_program('bison',var='BISON')
+ conf.env.BISONFLAGS=['-d']
diff --git a/waflib/Tools/c.py b/waflib/Tools/c.py
new file mode 100644
index 0000000..4d8cbd5
--- /dev/null
+++ b/waflib/Tools/c.py
@@ -0,0 +1,24 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+from waflib import TaskGen,Task,Utils
+from waflib.Tools import c_preproc
+from waflib.Tools.ccroot import link_task,stlink_task
+ at TaskGen.extension('.c')
+def c_hook(self,node):
+ return self.create_compiled_task('c',node)
+class c(Task.Task):
+ run_str='${CC} ${ARCH_ST:ARCH} ${CFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CC_SRC_F}${SRC} ${CC_TGT_F}${TGT}'
+ vars=['CCDEPS']
+ ext_in=['.h']
+ scan=c_preproc.scan
+class cprogram(link_task):
+ run_str='${LINK_CC} ${LINKFLAGS} ${CCLNK_SRC_F}${SRC} ${CCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB}'
+ ext_out=['.bin']
+ vars=['LINKDEPS']
+ inst_to='${BINDIR}'
+class cshlib(cprogram):
+ inst_to='${LIBDIR}'
+class cstlib(stlink_task):
+ pass
diff --git a/waflib/Tools/c_aliases.py b/waflib/Tools/c_aliases.py
new file mode 100644
index 0000000..a3a2bb9
--- /dev/null
+++ b/waflib/Tools/c_aliases.py
@@ -0,0 +1,55 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import os,sys,re
+from waflib import Utils,Build
+from waflib.Configure import conf
+def get_extensions(lst):
+ ret=[]
+ for x in Utils.to_list(lst):
+ try:
+ if not isinstance(x,str):
+ x=x.name
+ ret.append(x[x.rfind('.')+1:])
+ except Exception:
+ pass
+ return ret
+def sniff_features(**kw):
+ exts=get_extensions(kw['source'])
+ type=kw['_type']
+ feats=[]
+ if'cxx'in exts or'cpp'in exts or'c++'in exts or'cc'in exts or'C'in exts:
+ feats.append('cxx')
+ if'c'in exts or'vala'in exts:
+ feats.append('c')
+ if'd'in exts:
+ feats.append('d')
+ if'java'in exts:
+ feats.append('java')
+ if'java'in exts:
+ return'java'
+ if type in['program','shlib','stlib']:
+ for x in feats:
+ if x in['cxx','d','c']:
+ feats.append(x+type)
+ return feats
+def set_features(kw,_type):
+ kw['_type']=_type
+ kw['features']=Utils.to_list(kw.get('features',[]))+Utils.to_list(sniff_features(**kw))
+ at conf
+def program(bld,*k,**kw):
+ set_features(kw,'program')
+ return bld(*k,**kw)
+ at conf
+def shlib(bld,*k,**kw):
+ set_features(kw,'shlib')
+ return bld(*k,**kw)
+ at conf
+def stlib(bld,*k,**kw):
+ set_features(kw,'stlib')
+ return bld(*k,**kw)
+ at conf
+def objects(bld,*k,**kw):
+ set_features(kw,'objects')
+ return bld(*k,**kw)
diff --git a/waflib/Tools/c_config.py b/waflib/Tools/c_config.py
new file mode 100755
index 0000000..e6d3b5d
--- /dev/null
+++ b/waflib/Tools/c_config.py
@@ -0,0 +1,728 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import os,re,shlex,sys
+from waflib import Build,Utils,Task,Options,Logs,Errors,ConfigSet,Runner
+from waflib.TaskGen import after_method,feature
+from waflib.Configure import conf
+WAF_CONFIG_H='config.h'
+DEFKEYS='define_key'
+INCKEYS='include_key'
+cfg_ver={'atleast-version':'>=','exact-version':'==','max-version':'<=',}
+SNIP_FUNCTION='''
+int main(int argc, char **argv) {
+ void *p;
+ (void)argc; (void)argv;
+ p=(void*)(%s);
+ return 0;
+}
+'''
+SNIP_TYPE='''
+int main(int argc, char **argv) {
+ (void)argc; (void)argv;
+ if ((%(type_name)s *) 0) return 0;
+ if (sizeof (%(type_name)s)) return 0;
+ return 1;
+}
+'''
+SNIP_EMPTY_PROGRAM='''
+int main(int argc, char **argv) {
+ (void)argc; (void)argv;
+ return 0;
+}
+'''
+SNIP_FIELD='''
+int main(int argc, char **argv) {
+ char *off;
+ (void)argc; (void)argv;
+ off = (char*) &((%(type_name)s*)0)->%(field_name)s;
+ return (size_t) off < sizeof(%(type_name)s);
+}
+'''
+MACRO_TO_DESTOS={'__linux__':'linux','__GNU__':'gnu','__FreeBSD__':'freebsd','__NetBSD__':'netbsd','__OpenBSD__':'openbsd','__sun':'sunos','__hpux':'hpux','__sgi':'irix','_AIX':'aix','__CYGWIN__':'cygwin','__MSYS__':'msys','_UWIN':'uwin','_WIN64':'win32','_WIN32':'win32','__ENVIRONMENT_MAC_OS_X_VERSION_MIN_REQUIRED__':'darwin','__ENVIRONMENT_IPHONE_OS_VERSION_MIN_REQUIRED__':'darwin','__QNX__':'qnx','__native_client__':'nacl'}
+MACRO_TO_DEST_CPU={'__x86_64__':'x86_64','__amd64__':'x86_64','__i386__':'x86','__ia64__':'ia','__mips__':'mips','__sparc__':'sparc','__alpha__':'alpha','__aarch64__':'aarch64','__thumb__':'thumb','__arm__':'arm','__hppa__':'hppa','__powerpc__':'powerpc','__ppc__':'powerpc','__convex__':'convex','__m68k__':'m68k','__s390x__':'s390x','__s390__':'s390','__sh__':'sh',}
+ at conf
+def parse_flags(self,line,uselib_store,env=None,force_static=False):
+ assert(isinstance(line,str))
+ env=env or self.env
+ app=env.append_value
+ appu=env.append_unique
+ lex=shlex.shlex(line,posix=False)
+ lex.whitespace_split=True
+ lex.commenters=''
+ lst=list(lex)
+ uselib=uselib_store
+ while lst:
+ x=lst.pop(0)
+ st=x[:2]
+ ot=x[2:]
+ if st=='-I'or st=='/I':
+ if not ot:ot=lst.pop(0)
+ appu('INCLUDES_'+uselib,[ot])
+ elif st=='-include':
+ tmp=[x,lst.pop(0)]
+ app('CFLAGS',tmp)
+ app('CXXFLAGS',tmp)
+ elif st=='-D'or(env.CXX_NAME=='msvc'and st=='/D'):
+ if not ot:ot=lst.pop(0)
+ app('DEFINES_'+uselib,[ot])
+ elif st=='-l':
+ if not ot:ot=lst.pop(0)
+ prefix=force_static and'STLIB_'or'LIB_'
+ appu(prefix+uselib,[ot])
+ elif st=='-L':
+ if not ot:ot=lst.pop(0)
+ appu('LIBPATH_'+uselib,[ot])
+ elif x.startswith('/LIBPATH:'):
+ appu('LIBPATH_'+uselib,[x.replace('/LIBPATH:','')])
+ elif x=='-pthread'or x.startswith('+')or x.startswith('-std'):
+ app('CFLAGS_'+uselib,[x])
+ app('CXXFLAGS_'+uselib,[x])
+ app('LINKFLAGS_'+uselib,[x])
+ elif x=='-framework':
+ appu('FRAMEWORK_'+uselib,[lst.pop(0)])
+ elif x.startswith('-F'):
+ appu('FRAMEWORKPATH_'+uselib,[x[2:]])
+ elif x.startswith('-Wl'):
+ app('LINKFLAGS_'+uselib,[x])
+ elif x.startswith('-m')or x.startswith('-f')or x.startswith('-dynamic'):
+ app('CFLAGS_'+uselib,[x])
+ app('CXXFLAGS_'+uselib,[x])
+ elif x.startswith('-bundle'):
+ app('LINKFLAGS_'+uselib,[x])
+ elif x.startswith('-undefined'):
+ arg=lst.pop(0)
+ app('LINKFLAGS_'+uselib,[x,arg])
+ elif x.startswith('-arch')or x.startswith('-isysroot'):
+ tmp=[x,lst.pop(0)]
+ app('CFLAGS_'+uselib,tmp)
+ app('CXXFLAGS_'+uselib,tmp)
+ app('LINKFLAGS_'+uselib,tmp)
+ elif x.endswith('.a')or x.endswith('.so')or x.endswith('.dylib')or x.endswith('.lib'):
+ appu('LINKFLAGS_'+uselib,[x])
+ at conf
+def ret_msg(self,f,kw):
+ if isinstance(f,str):
+ return f
+ return f(kw)
+ at conf
+def validate_cfg(self,kw):
+ if not'path'in kw:
+ if not self.env.PKGCONFIG:
+ self.find_program('pkg-config',var='PKGCONFIG')
+ kw['path']=self.env.PKGCONFIG
+ if'atleast_pkgconfig_version'in kw:
+ if not'msg'in kw:
+ kw['msg']='Checking for pkg-config version >= %r'%kw['atleast_pkgconfig_version']
+ return
+ if not'okmsg'in kw:
+ kw['okmsg']='yes'
+ if not'errmsg'in kw:
+ kw['errmsg']='not found'
+ if'modversion'in kw:
+ if not'msg'in kw:
+ kw['msg']='Checking for %r version'%kw['modversion']
+ return
+ for x in cfg_ver.keys():
+ y=x.replace('-','_')
+ if y in kw:
+ if not'package'in kw:
+ raise ValueError('%s requires a package'%x)
+ if not'msg'in kw:
+ kw['msg']='Checking for %r %s %s'%(kw['package'],cfg_ver[x],kw[y])
+ return
+ if not'msg'in kw:
+ kw['msg']='Checking for %r'%(kw['package']or kw['path'])
+ at conf
+def exec_cfg(self,kw):
+ def define_it():
+ self.define(self.have_define(kw.get('uselib_store',kw['package'])),1,0)
+ if'atleast_pkgconfig_version'in kw:
+ cmd=[kw['path'],'--atleast-pkgconfig-version=%s'%kw['atleast_pkgconfig_version']]
+ self.cmd_and_log(cmd)
+ if not'okmsg'in kw:
+ kw['okmsg']='yes'
+ return
+ for x in cfg_ver:
+ y=x.replace('-','_')
+ if y in kw:
+ self.cmd_and_log([kw['path'],'--%s=%s'%(x,kw[y]),kw['package']])
+ if not'okmsg'in kw:
+ kw['okmsg']='yes'
+ define_it()
+ break
+ if'modversion'in kw:
+ version=self.cmd_and_log([kw['path'],'--modversion',kw['modversion']]).strip()
+ self.define('%s_VERSION'%Utils.quote_define_name(kw.get('uselib_store',kw['modversion'])),version)
+ return version
+ lst=[kw['path']]
+ defi=kw.get('define_variable',None)
+ if not defi:
+ defi=self.env.PKG_CONFIG_DEFINES or{}
+ for key,val in defi.items():
+ lst.append('--define-variable=%s=%s'%(key,val))
+ if'variables'in kw:
+ env=kw.get('env',self.env)
+ uselib=kw.get('uselib_store',kw['package'].upper())
+ vars=Utils.to_list(kw['variables'])
+ for v in vars:
+ val=self.cmd_and_log(lst+['--variable='+v]).strip()
+ var='%s_%s'%(uselib,v)
+ env[var]=val
+ if not'okmsg'in kw:
+ kw['okmsg']='yes'
+ return
+ static=False
+ if'args'in kw:
+ args=Utils.to_list(kw['args'])
+ if'--static'in args or'--static-libs'in args:
+ static=True
+ lst+=args
+ lst.extend(Utils.to_list(kw['package']))
+ ret=self.cmd_and_log(lst)
+ if not'okmsg'in kw:
+ kw['okmsg']='yes'
+ define_it()
+ self.parse_flags(ret,kw.get('uselib_store',kw['package'].upper()),kw.get('env',self.env),force_static=static)
+ return ret
+ at conf
+def check_cfg(self,*k,**kw):
+ if k:
+ lst=k[0].split()
+ kw['package']=lst[0]
+ kw['args']=' '.join(lst[1:])
+ self.validate_cfg(kw)
+ if'msg'in kw:
+ self.start_msg(kw['msg'])
+ ret=None
+ try:
+ ret=self.exec_cfg(kw)
+ except self.errors.WafError:
+ if'errmsg'in kw:
+ self.end_msg(kw['errmsg'],'YELLOW')
+ if Logs.verbose>1:
+ raise
+ else:
+ self.fatal('The configuration failed')
+ else:
+ kw['success']=ret
+ if'okmsg'in kw:
+ self.end_msg(self.ret_msg(kw['okmsg'],kw))
+ return ret
+ at conf
+def validate_c(self,kw):
+ if not'env'in kw:
+ kw['env']=self.env.derive()
+ env=kw['env']
+ if not'compiler'in kw and not'features'in kw:
+ kw['compiler']='c'
+ if env['CXX_NAME']and Task.classes.get('cxx',None):
+ kw['compiler']='cxx'
+ if not self.env['CXX']:
+ self.fatal('a c++ compiler is required')
+ else:
+ if not self.env['CC']:
+ self.fatal('a c compiler is required')
+ if not'compile_mode'in kw:
+ kw['compile_mode']='c'
+ if'cxx'in Utils.to_list(kw.get('features',[]))or kw.get('compiler','')=='cxx':
+ kw['compile_mode']='cxx'
+ if not'type'in kw:
+ kw['type']='cprogram'
+ if not'features'in kw:
+ kw['features']=[kw['compile_mode'],kw['type']]
+ else:
+ kw['features']=Utils.to_list(kw['features'])
+ if not'compile_filename'in kw:
+ kw['compile_filename']='test.c'+((kw['compile_mode']=='cxx')and'pp'or'')
+ def to_header(dct):
+ if'header_name'in dct:
+ dct=Utils.to_list(dct['header_name'])
+ return''.join(['#include <%s>\n'%x for x in dct])
+ return''
+ if'framework_name'in kw:
+ fwkname=kw['framework_name']
+ if not'uselib_store'in kw:
+ kw['uselib_store']=fwkname.upper()
+ if not kw.get('no_header',False):
+ if not'header_name'in kw:
+ kw['header_name']=[]
+ fwk='%s/%s.h'%(fwkname,fwkname)
+ if kw.get('remove_dot_h',None):
+ fwk=fwk[:-2]
+ kw['header_name']=Utils.to_list(kw['header_name'])+[fwk]
+ kw['msg']='Checking for framework %s'%fwkname
+ kw['framework']=fwkname
+ if'function_name'in kw:
+ fu=kw['function_name']
+ if not'msg'in kw:
+ kw['msg']='Checking for function %s'%fu
+ kw['code']=to_header(kw)+SNIP_FUNCTION%fu
+ if not'uselib_store'in kw:
+ kw['uselib_store']=fu.upper()
+ if not'define_name'in kw:
+ kw['define_name']=self.have_define(fu)
+ elif'type_name'in kw:
+ tu=kw['type_name']
+ if not'header_name'in kw:
+ kw['header_name']='stdint.h'
+ if'field_name'in kw:
+ field=kw['field_name']
+ kw['code']=to_header(kw)+SNIP_FIELD%{'type_name':tu,'field_name':field}
+ if not'msg'in kw:
+ kw['msg']='Checking for field %s in %s'%(field,tu)
+ if not'define_name'in kw:
+ kw['define_name']=self.have_define((tu+'_'+field).upper())
+ else:
+ kw['code']=to_header(kw)+SNIP_TYPE%{'type_name':tu}
+ if not'msg'in kw:
+ kw['msg']='Checking for type %s'%tu
+ if not'define_name'in kw:
+ kw['define_name']=self.have_define(tu.upper())
+ elif'header_name'in kw:
+ if not'msg'in kw:
+ kw['msg']='Checking for header %s'%kw['header_name']
+ l=Utils.to_list(kw['header_name'])
+ assert len(l)>0,'list of headers in header_name is empty'
+ kw['code']=to_header(kw)+SNIP_EMPTY_PROGRAM
+ if not'uselib_store'in kw:
+ kw['uselib_store']=l[0].upper()
+ if not'define_name'in kw:
+ kw['define_name']=self.have_define(l[0])
+ if'lib'in kw:
+ if not'msg'in kw:
+ kw['msg']='Checking for library %s'%kw['lib']
+ if not'uselib_store'in kw:
+ kw['uselib_store']=kw['lib'].upper()
+ if'stlib'in kw:
+ if not'msg'in kw:
+ kw['msg']='Checking for static library %s'%kw['stlib']
+ if not'uselib_store'in kw:
+ kw['uselib_store']=kw['stlib'].upper()
+ if'fragment'in kw:
+ kw['code']=kw['fragment']
+ if not'msg'in kw:
+ kw['msg']='Checking for code snippet'
+ if not'errmsg'in kw:
+ kw['errmsg']='no'
+ for(flagsname,flagstype)in[('cxxflags','compiler'),('cflags','compiler'),('linkflags','linker')]:
+ if flagsname in kw:
+ if not'msg'in kw:
+ kw['msg']='Checking for %s flags %s'%(flagstype,kw[flagsname])
+ if not'errmsg'in kw:
+ kw['errmsg']='no'
+ if not'execute'in kw:
+ kw['execute']=False
+ if kw['execute']:
+ kw['features'].append('test_exec')
+ if not'errmsg'in kw:
+ kw['errmsg']='not found'
+ if not'okmsg'in kw:
+ kw['okmsg']='yes'
+ if not'code'in kw:
+ kw['code']=SNIP_EMPTY_PROGRAM
+ if self.env[INCKEYS]:
+ kw['code']='\n'.join(['#include <%s>'%x for x in self.env[INCKEYS]])+'\n'+kw['code']
+ if not kw.get('success'):kw['success']=None
+ if'define_name'in kw:
+ self.undefine(kw['define_name'])
+ assert'msg'in kw,'invalid parameters, read http://freehackers.org/~tnagy/wafbook/single.html#config_helpers_c'
+ at conf
+def post_check(self,*k,**kw):
+ is_success=0
+ if kw['execute']:
+ if kw['success']is not None:
+ if kw.get('define_ret',False):
+ is_success=kw['success']
+ else:
+ is_success=(kw['success']==0)
+ else:
+ is_success=(kw['success']==0)
+ if'define_name'in kw:
+ if'header_name'in kw or'function_name'in kw or'type_name'in kw or'fragment'in kw:
+ if kw['execute']and kw.get('define_ret',None)and isinstance(is_success,str):
+ self.define(kw['define_name'],is_success,quote=kw.get('quote',1))
+ else:
+ self.define_cond(kw['define_name'],is_success)
+ else:
+ self.define_cond(kw['define_name'],is_success)
+ if'header_name'in kw:
+ if kw.get('auto_add_header_name',False):
+ self.env.append_value(INCKEYS,Utils.to_list(kw['header_name']))
+ if is_success and'uselib_store'in kw:
+ from waflib.Tools import ccroot
+ _vars=set([])
+ for x in kw['features']:
+ if x in ccroot.USELIB_VARS:
+ _vars|=ccroot.USELIB_VARS[x]
+ for k in _vars:
+ lk=k.lower()
+ if k=='INCLUDES':lk='includes'
+ if k=='DEFINES':lk='defines'
+ if lk in kw:
+ val=kw[lk]
+ if isinstance(val,str):
+ val=val.rstrip(os.path.sep)
+ self.env.append_unique(k+'_'+kw['uselib_store'],val)
+ return is_success
+ at conf
+def check(self,*k,**kw):
+ self.validate_c(kw)
+ self.start_msg(kw['msg'])
+ ret=None
+ try:
+ ret=self.run_c_code(*k,**kw)
+ except self.errors.ConfigurationError:
+ self.end_msg(kw['errmsg'],'YELLOW')
+ if Logs.verbose>1:
+ raise
+ else:
+ self.fatal('The configuration failed')
+ else:
+ kw['success']=ret
+ ret=self.post_check(*k,**kw)
+ if not ret:
+ self.end_msg(kw['errmsg'],'YELLOW')
+ self.fatal('The configuration failed %r'%ret)
+ else:
+ self.end_msg(self.ret_msg(kw['okmsg'],kw))
+ return ret
+class test_exec(Task.Task):
+ color='PINK'
+ def run(self):
+ if getattr(self.generator,'rpath',None):
+ if getattr(self.generator,'define_ret',False):
+ self.generator.bld.retval=self.generator.bld.cmd_and_log([self.inputs[0].abspath()])
+ else:
+ self.generator.bld.retval=self.generator.bld.exec_command([self.inputs[0].abspath()])
+ else:
+ env=self.env.env or{}
+ env.update(dict(os.environ))
+ for var in('LD_LIBRARY_PATH','DYLD_LIBRARY_PATH','PATH'):
+ env[var]=self.inputs[0].parent.abspath()+os.path.pathsep+env.get(var,'')
+ if getattr(self.generator,'define_ret',False):
+ self.generator.bld.retval=self.generator.bld.cmd_and_log([self.inputs[0].abspath()],env=env)
+ else:
+ self.generator.bld.retval=self.generator.bld.exec_command([self.inputs[0].abspath()],env=env)
+ at feature('test_exec')
+ at after_method('apply_link')
+def test_exec_fun(self):
+ self.create_task('test_exec',self.link_task.outputs[0])
+CACHE_RESULTS=1
+COMPILE_ERRORS=2
+ at conf
+def run_c_code(self,*k,**kw):
+ lst=[str(v)for(p,v)in kw.items()if p!='env']
+ h=Utils.h_list(lst)
+ dir=self.bldnode.abspath()+os.sep+(not Utils.is_win32 and'.'or'')+'conf_check_'+Utils.to_hex(h)
+ try:
+ os.makedirs(dir)
+ except OSError:
+ pass
+ try:
+ os.stat(dir)
+ except OSError:
+ self.fatal('cannot use the configuration test folder %r'%dir)
+ cachemode=getattr(Options.options,'confcache',None)
+ if cachemode==CACHE_RESULTS:
+ try:
+ proj=ConfigSet.ConfigSet(os.path.join(dir,'cache_run_c_code'))
+ except OSError:
+ pass
+ else:
+ ret=proj['cache_run_c_code']
+ if isinstance(ret,str)and ret.startswith('Test does not build'):
+ self.fatal(ret)
+ return ret
+ bdir=os.path.join(dir,'testbuild')
+ if not os.path.exists(bdir):
+ os.makedirs(bdir)
+ self.test_bld=bld=Build.BuildContext(top_dir=dir,out_dir=bdir)
+ bld.init_dirs()
+ bld.progress_bar=0
+ bld.targets='*'
+ if kw['compile_filename']:
+ node=bld.srcnode.make_node(kw['compile_filename'])
+ node.write(kw['code'])
+ bld.logger=self.logger
+ bld.all_envs.update(self.all_envs)
+ bld.env=kw['env']
+ o=bld(features=kw['features'],source=kw['compile_filename'],target='testprog')
+ for k,v in kw.items():
+ setattr(o,k,v)
+ self.to_log("==>\n%s\n<=="%kw['code'])
+ bld.targets='*'
+ ret=-1
+ try:
+ try:
+ bld.compile()
+ except Errors.WafError:
+ ret='Test does not build: %s'%Utils.ex_stack()
+ self.fatal(ret)
+ else:
+ ret=getattr(bld,'retval',0)
+ finally:
+ proj=ConfigSet.ConfigSet()
+ proj['cache_run_c_code']=ret
+ proj.store(os.path.join(dir,'cache_run_c_code'))
+ return ret
+ at conf
+def check_cxx(self,*k,**kw):
+ kw['compiler']='cxx'
+ return self.check(*k,**kw)
+ at conf
+def check_cc(self,*k,**kw):
+ kw['compiler']='c'
+ return self.check(*k,**kw)
+ at conf
+def define(self,key,val,quote=True):
+ assert key and isinstance(key,str)
+ if val is True:
+ val=1
+ elif val in(False,None):
+ val=0
+ if isinstance(val,int)or isinstance(val,float):
+ s='%s=%s'
+ else:
+ s=quote and'%s="%s"'or'%s=%s'
+ app=s%(key,str(val))
+ ban=key+'='
+ lst=self.env['DEFINES']
+ for x in lst:
+ if x.startswith(ban):
+ lst[lst.index(x)]=app
+ break
+ else:
+ self.env.append_value('DEFINES',app)
+ self.env.append_unique(DEFKEYS,key)
+ at conf
+def undefine(self,key):
+ assert key and isinstance(key,str)
+ ban=key+'='
+ lst=[x for x in self.env['DEFINES']if not x.startswith(ban)]
+ self.env['DEFINES']=lst
+ self.env.append_unique(DEFKEYS,key)
+ at conf
+def define_cond(self,key,val):
+ assert key and isinstance(key,str)
+ if val:
+ self.define(key,1)
+ else:
+ self.undefine(key)
+ at conf
+def is_defined(self,key):
+ assert key and isinstance(key,str)
+ ban=key+'='
+ for x in self.env['DEFINES']:
+ if x.startswith(ban):
+ return True
+ return False
+ at conf
+def get_define(self,key):
+ assert key and isinstance(key,str)
+ ban=key+'='
+ for x in self.env['DEFINES']:
+ if x.startswith(ban):
+ return x[len(ban):]
+ return None
+ at conf
+def have_define(self,key):
+ return(self.env.HAVE_PAT or'HAVE_%s')%Utils.quote_define_name(key)
+ at conf
+def write_config_header(self,configfile='',guard='',top=False,env=None,defines=True,headers=False,remove=True,define_prefix=''):
+ if env:
+ Logs.warn('Cannot pass env to write_config_header')
+ if not configfile:configfile=WAF_CONFIG_H
+ waf_guard=guard or'W_%s_WAF'%Utils.quote_define_name(configfile)
+ node=top and self.bldnode or self.path.get_bld()
+ node=node.make_node(configfile)
+ node.parent.mkdir()
+ lst=['/* WARNING! All changes made to this file will be lost! */\n']
+ lst.append('#ifndef %s\n#define %s\n'%(waf_guard,waf_guard))
+ lst.append(self.get_config_header(defines,headers,define_prefix=define_prefix))
+ lst.append('\n#endif /* %s */\n'%waf_guard)
+ node.write('\n'.join(lst))
+ self.env.append_unique(Build.CFG_FILES,[node.abspath()])
+ if remove:
+ for key in self.env[DEFKEYS]:
+ self.undefine(key)
+ self.env[DEFKEYS]=[]
+ at conf
+def get_config_header(self,defines=True,headers=False,define_prefix=''):
+ lst=[]
+ if headers:
+ for x in self.env[INCKEYS]:
+ lst.append('#include <%s>'%x)
+ if defines:
+ for x in self.env[DEFKEYS]:
+ if self.is_defined(x):
+ val=self.get_define(x)
+ lst.append('#define %s %s'%(define_prefix+x,val))
+ else:
+ lst.append('/* #undef %s */'%(define_prefix+x))
+ return"\n".join(lst)
+ at conf
+def cc_add_flags(conf):
+ conf.add_os_flags('CPPFLAGS','CFLAGS')
+ conf.add_os_flags('CFLAGS')
+ at conf
+def cxx_add_flags(conf):
+ conf.add_os_flags('CPPFLAGS','CXXFLAGS')
+ conf.add_os_flags('CXXFLAGS')
+ at conf
+def link_add_flags(conf):
+ conf.add_os_flags('LINKFLAGS')
+ conf.add_os_flags('LDFLAGS','LINKFLAGS')
+ at conf
+def cc_load_tools(conf):
+ if not conf.env.DEST_OS:
+ conf.env.DEST_OS=Utils.unversioned_sys_platform()
+ conf.load('c')
+ at conf
+def cxx_load_tools(conf):
+ if not conf.env.DEST_OS:
+ conf.env.DEST_OS=Utils.unversioned_sys_platform()
+ conf.load('cxx')
+ at conf
+def get_cc_version(conf,cc,gcc=False,icc=False):
+ cmd=cc+['-dM','-E','-']
+ env=conf.env.env or None
+ try:
+ p=Utils.subprocess.Popen(cmd,stdin=Utils.subprocess.PIPE,stdout=Utils.subprocess.PIPE,stderr=Utils.subprocess.PIPE,env=env)
+ p.stdin.write('\n')
+ out=p.communicate()[0]
+ except Exception:
+ conf.fatal('Could not determine the compiler version %r'%cmd)
+ if not isinstance(out,str):
+ out=out.decode(sys.stdout.encoding or'iso8859-1')
+ if gcc:
+ if out.find('__INTEL_COMPILER')>=0:
+ conf.fatal('The intel compiler pretends to be gcc')
+ if out.find('__GNUC__')<0:
+ conf.fatal('Could not determine the compiler type')
+ if icc and out.find('__INTEL_COMPILER')<0:
+ conf.fatal('Not icc/icpc')
+ k={}
+ if icc or gcc:
+ out=out.splitlines()
+ for line in out:
+ lst=shlex.split(line)
+ if len(lst)>2:
+ key=lst[1]
+ val=lst[2]
+ k[key]=val
+ def isD(var):
+ return var in k
+ def isT(var):
+ return var in k and k[var]!='0'
+ if not conf.env.DEST_OS:
+ conf.env.DEST_OS=''
+ for i in MACRO_TO_DESTOS:
+ if isD(i):
+ conf.env.DEST_OS=MACRO_TO_DESTOS[i]
+ break
+ else:
+ if isD('__APPLE__')and isD('__MACH__'):
+ conf.env.DEST_OS='darwin'
+ elif isD('__unix__'):
+ conf.env.DEST_OS='generic'
+ if isD('__ELF__'):
+ conf.env.DEST_BINFMT='elf'
+ elif isD('__WINNT__')or isD('__CYGWIN__'):
+ conf.env.DEST_BINFMT='pe'
+ conf.env.LIBDIR=conf.env['PREFIX']+'/bin'
+ elif isD('__APPLE__'):
+ conf.env.DEST_BINFMT='mac-o'
+ if not conf.env.DEST_BINFMT:
+ conf.env.DEST_BINFMT=Utils.destos_to_binfmt(conf.env.DEST_OS)
+ for i in MACRO_TO_DEST_CPU:
+ if isD(i):
+ conf.env.DEST_CPU=MACRO_TO_DEST_CPU[i]
+ break
+ Logs.debug('ccroot: dest platform: '+' '.join([conf.env[x]or'?'for x in('DEST_OS','DEST_BINFMT','DEST_CPU')]))
+ if icc:
+ ver=k['__INTEL_COMPILER']
+ conf.env['CC_VERSION']=(ver[:-2],ver[-2],ver[-1])
+ else:
+ if isD('__clang__'):
+ conf.env['CC_VERSION']=(k['__clang_major__'],k['__clang_minor__'],k['__clang_patchlevel__'])
+ else:
+ conf.env['CC_VERSION']=(k['__GNUC__'],k['__GNUC_MINOR__'],k['__GNUC_PATCHLEVEL__'])
+ return k
+ at conf
+def get_xlc_version(conf,cc):
+ cmd=cc+['-qversion']
+ try:
+ out,err=conf.cmd_and_log(cmd,output=0)
+ except Errors.WafError:
+ conf.fatal('Could not find xlc %r'%cmd)
+ for v in(r"IBM XL C/C\+\+.* V(?P<major>\d*)\.(?P<minor>\d*)",):
+ version_re=re.compile(v,re.I).search
+ match=version_re(out or err)
+ if match:
+ k=match.groupdict()
+ conf.env['CC_VERSION']=(k['major'],k['minor'])
+ break
+ else:
+ conf.fatal('Could not determine the XLC version.')
+ at conf
+def add_as_needed(self):
+ if self.env.DEST_BINFMT=='elf'and'gcc'in(self.env.CXX_NAME,self.env.CC_NAME):
+ self.env.append_unique('LINKFLAGS','--as-needed')
+class cfgtask(Task.TaskBase):
+ def display(self):
+ return''
+ def runnable_status(self):
+ return Task.RUN_ME
+ def uid(self):
+ return Utils.SIG_NIL
+ def run(self):
+ conf=self.conf
+ bld=Build.BuildContext(top_dir=conf.srcnode.abspath(),out_dir=conf.bldnode.abspath())
+ bld.env=conf.env
+ bld.init_dirs()
+ bld.in_msg=1
+ bld.logger=self.logger
+ try:
+ bld.check(**self.args)
+ except Exception:
+ return 1
+ at conf
+def multicheck(self,*k,**kw):
+ self.start_msg(kw.get('msg','Executing %d configuration tests'%len(k)))
+ class par(object):
+ def __init__(self):
+ self.keep=False
+ self.cache_global=Options.cache_global
+ self.nocache=Options.options.nocache
+ self.returned_tasks=[]
+ self.task_sigs={}
+ def total(self):
+ return len(tasks)
+ def to_log(self,*k,**kw):
+ return
+ bld=par()
+ tasks=[]
+ for dct in k:
+ x=cfgtask(bld=bld)
+ tasks.append(x)
+ x.args=dct
+ x.bld=bld
+ x.conf=self
+ x.args=dct
+ x.logger=Logs.make_mem_logger(str(id(x)),self.logger)
+ def it():
+ yield tasks
+ while 1:
+ yield[]
+ p=Runner.Parallel(bld,Options.options.jobs)
+ p.biter=it()
+ p.start()
+ for x in tasks:
+ x.logger.memhandler.flush()
+ for x in tasks:
+ if x.hasrun!=Task.SUCCESS:
+ self.end_msg(kw.get('errmsg','no'),color='YELLOW')
+ self.fatal(kw.get('fatalmsg',None)or'One of the tests has failed, see the config.log for more information')
+ self.end_msg('ok')
diff --git a/waflib/Tools/c_osx.py b/waflib/Tools/c_osx.py
new file mode 100644
index 0000000..579b2a7
--- /dev/null
+++ b/waflib/Tools/c_osx.py
@@ -0,0 +1,120 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import os,shutil,sys,platform
+from waflib import TaskGen,Task,Build,Options,Utils,Errors
+from waflib.TaskGen import taskgen_method,feature,after_method,before_method
+app_info='''
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist SYSTEM "file://localhost/System/Library/DTDs/PropertyList.dtd">
+<plist version="0.9">
+<dict>
+ <key>CFBundlePackageType</key>
+ <string>APPL</string>
+ <key>CFBundleGetInfoString</key>
+ <string>Created by Waf</string>
+ <key>CFBundleSignature</key>
+ <string>????</string>
+ <key>NOTE</key>
+ <string>THIS IS A GENERATED FILE, DO NOT MODIFY</string>
+ <key>CFBundleExecutable</key>
+ <string>%s</string>
+</dict>
+</plist>
+'''
+ at feature('c','cxx')
+def set_macosx_deployment_target(self):
+ if self.env['MACOSX_DEPLOYMENT_TARGET']:
+ os.environ['MACOSX_DEPLOYMENT_TARGET']=self.env['MACOSX_DEPLOYMENT_TARGET']
+ elif'MACOSX_DEPLOYMENT_TARGET'not in os.environ:
+ if Utils.unversioned_sys_platform()=='darwin':
+ os.environ['MACOSX_DEPLOYMENT_TARGET']='.'.join(platform.mac_ver()[0].split('.')[:2])
+ at taskgen_method
+def create_bundle_dirs(self,name,out):
+ bld=self.bld
+ dir=out.parent.find_or_declare(name)
+ dir.mkdir()
+ macos=dir.find_or_declare(['Contents','MacOS'])
+ macos.mkdir()
+ return dir
+def bundle_name_for_output(out):
+ name=out.name
+ k=name.rfind('.')
+ if k>=0:
+ name=name[:k]+'.app'
+ else:
+ name=name+'.app'
+ return name
+ at feature('cprogram','cxxprogram')
+ at after_method('apply_link')
+def create_task_macapp(self):
+ if self.env['MACAPP']or getattr(self,'mac_app',False):
+ out=self.link_task.outputs[0]
+ name=bundle_name_for_output(out)
+ dir=self.create_bundle_dirs(name,out)
+ n1=dir.find_or_declare(['Contents','MacOS',out.name])
+ self.apptask=self.create_task('macapp',self.link_task.outputs,n1)
+ inst_to=getattr(self,'install_path','/Applications')+'/%s/Contents/MacOS/'%name
+ self.bld.install_files(inst_to,n1,chmod=Utils.O755)
+ if getattr(self,'mac_resources',None):
+ res_dir=n1.parent.parent.make_node('Resources')
+ inst_to=getattr(self,'install_path','/Applications')+'/%s/Resources'%name
+ for x in self.to_list(self.mac_resources):
+ node=self.path.find_node(x)
+ if not node:
+ raise Errors.WafError('Missing mac_resource %r in %r'%(x,self))
+ parent=node.parent
+ if os.path.isdir(node.abspath()):
+ nodes=node.ant_glob('**')
+ else:
+ nodes=[node]
+ for node in nodes:
+ rel=node.path_from(parent)
+ tsk=self.create_task('macapp',node,res_dir.make_node(rel))
+ self.bld.install_as(inst_to+'/%s'%rel,node)
+ if getattr(self.bld,'is_install',None):
+ self.install_task.hasrun=Task.SKIP_ME
+ at feature('cprogram','cxxprogram')
+ at after_method('apply_link')
+def create_task_macplist(self):
+ if self.env['MACAPP']or getattr(self,'mac_app',False):
+ out=self.link_task.outputs[0]
+ name=bundle_name_for_output(out)
+ dir=self.create_bundle_dirs(name,out)
+ n1=dir.find_or_declare(['Contents','Info.plist'])
+ self.plisttask=plisttask=self.create_task('macplist',[],n1)
+ if getattr(self,'mac_plist',False):
+ node=self.path.find_resource(self.mac_plist)
+ if node:
+ plisttask.inputs.append(node)
+ else:
+ plisttask.code=self.mac_plist
+ else:
+ plisttask.code=app_info%self.link_task.outputs[0].name
+ inst_to=getattr(self,'install_path','/Applications')+'/%s/Contents/'%name
+ self.bld.install_files(inst_to,n1)
+ at feature('cshlib','cxxshlib')
+ at before_method('apply_link','propagate_uselib_vars')
+def apply_bundle(self):
+ if self.env['MACBUNDLE']or getattr(self,'mac_bundle',False):
+ self.env['LINKFLAGS_cshlib']=self.env['LINKFLAGS_cxxshlib']=[]
+ self.env['cshlib_PATTERN']=self.env['cxxshlib_PATTERN']=self.env['macbundle_PATTERN']
+ use=self.use=self.to_list(getattr(self,'use',[]))
+ if not'MACBUNDLE'in use:
+ use.append('MACBUNDLE')
+app_dirs=['Contents','Contents/MacOS','Contents/Resources']
+class macapp(Task.Task):
+ color='PINK'
+ def run(self):
+ self.outputs[0].parent.mkdir()
+ shutil.copy2(self.inputs[0].srcpath(),self.outputs[0].abspath())
+class macplist(Task.Task):
+ color='PINK'
+ ext_in=['.bin']
+ def run(self):
+ if getattr(self,'code',None):
+ txt=self.code
+ else:
+ txt=self.inputs[0].read()
+ self.outputs[0].write(txt)
diff --git a/waflib/Tools/c_preproc.py b/waflib/Tools/c_preproc.py
new file mode 100644
index 0000000..9dfd8ca
--- /dev/null
+++ b/waflib/Tools/c_preproc.py
@@ -0,0 +1,604 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import re,string,traceback
+from waflib import Logs,Utils,Errors
+from waflib.Logs import debug,error
+class PreprocError(Errors.WafError):
+ pass
+POPFILE='-'
+recursion_limit=150
+go_absolute=False
+standard_includes=['/usr/include']
+if Utils.is_win32:
+ standard_includes=[]
+use_trigraphs=0
+strict_quotes=0
+g_optrans={'not':'!','and':'&&','bitand':'&','and_eq':'&=','or':'||','bitor':'|','or_eq':'|=','xor':'^','xor_eq':'^=','compl':'~',}
+re_lines=re.compile('^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*)\r*$',re.IGNORECASE|re.MULTILINE)
+re_mac=re.compile("^[a-zA-Z_]\w*")
+re_fun=re.compile('^[a-zA-Z_][a-zA-Z0-9_]*[(]')
+re_pragma_once=re.compile('^\s*once\s*',re.IGNORECASE)
+re_nl=re.compile('\\\\\r*\n',re.MULTILINE)
+re_cpp=re.compile(r"""(/\*[^*]*\*+([^/*][^*]*\*+)*/)|//[^\n]*|("(\\.|[^"\\])*"|'(\\.|[^'\\])*'|.[^/"'\\]*)""",re.MULTILINE)
+trig_def=[('??'+a,b)for a,b in zip("=-/!'()<>",r'#~\|^[]{}')]
+chr_esc={'0':0,'a':7,'b':8,'t':9,'n':10,'f':11,'v':12,'r':13,'\\':92,"'":39}
+NUM='i'
+OP='O'
+IDENT='T'
+STR='s'
+CHAR='c'
+tok_types=[NUM,STR,IDENT,OP]
+exp_types=[r"""0[xX](?P<hex>[a-fA-F0-9]+)(?P<qual1>[uUlL]*)|L*?'(?P<char>(\\.|[^\\'])+)'|(?P<n1>\d+)[Ee](?P<exp0>[+-]*?\d+)(?P<float0>[fFlL]*)|(?P<n2>\d*\.\d+)([Ee](?P<exp1>[+-]*?\d+))?(?P<float1>[fFlL]*)|(?P<n4>\d+\.\d*)([Ee](?P<exp2>[+-]*?\d+))?(?P<float2>[fFlL]*)|(?P<oct>0*)(?P<n0>\d+)(?P<qual2>[uUlL]*)""",r'L?"([^"\\]|\\.)*"',r'[a-zA-Z_]\w*',r'%:%:|<<=|>>=|\.\.\.|<<|<%|<:|<=|>>|>=|\+\+|\+=|--|->|-=|\*=|/=|%:|%=|%>|==|&&|&=|\|\||\|=|\^=|:>|!=|##|[\(\)\{\}\[\]<>\?\|\^\*\+&=:!#;,%/\-\?\~\.]',]
+re_clexer=re.compile('|'.join(["(?P<%s>%s)"%(name,part)for name,part in zip(tok_types,exp_types)]),re.M)
+accepted='a'
+ignored='i'
+undefined='u'
+skipped='s'
+def repl(m):
+ s=m.group(1)
+ if s:
+ return' '
+ return m.group(3)or''
+def filter_comments(filename):
+ code=Utils.readf(filename)
+ if use_trigraphs:
+ for(a,b)in trig_def:code=code.split(a).join(b)
+ code=re_nl.sub('',code)
+ code=re_cpp.sub(repl,code)
+ return[(m.group(2),m.group(3))for m in re.finditer(re_lines,code)]
+prec={}
+ops=['* / %','+ -','<< >>','< <= >= >','== !=','& | ^','&& ||',',']
+for x in range(len(ops)):
+ syms=ops[x]
+ for u in syms.split():
+ prec[u]=x
+def trimquotes(s):
+ if not s:return''
+ s=s.rstrip()
+ if s[0]=="'"and s[-1]=="'":return s[1:-1]
+ return s
+def reduce_nums(val_1,val_2,val_op):
+ try:a=0+val_1
+ except TypeError:a=int(val_1)
+ try:b=0+val_2
+ except TypeError:b=int(val_2)
+ d=val_op
+ if d=='%':c=a%b
+ elif d=='+':c=a+b
+ elif d=='-':c=a-b
+ elif d=='*':c=a*b
+ elif d=='/':c=a/b
+ elif d=='^':c=a^b
+ elif d=='|':c=a|b
+ elif d=='||':c=int(a or b)
+ elif d=='&':c=a&b
+ elif d=='&&':c=int(a and b)
+ elif d=='==':c=int(a==b)
+ elif d=='!=':c=int(a!=b)
+ elif d=='<=':c=int(a<=b)
+ elif d=='<':c=int(a<b)
+ elif d=='>':c=int(a>b)
+ elif d=='>=':c=int(a>=b)
+ elif d=='^':c=int(a^b)
+ elif d=='<<':c=a<<b
+ elif d=='>>':c=a>>b
+ else:c=0
+ return c
+def get_num(lst):
+ if not lst:raise PreprocError("empty list for get_num")
+ (p,v)=lst[0]
+ if p==OP:
+ if v=='(':
+ count_par=1
+ i=1
+ while i<len(lst):
+ (p,v)=lst[i]
+ if p==OP:
+ if v==')':
+ count_par-=1
+ if count_par==0:
+ break
+ elif v=='(':
+ count_par+=1
+ i+=1
+ else:
+ raise PreprocError("rparen expected %r"%lst)
+ (num,_)=get_term(lst[1:i])
+ return(num,lst[i+1:])
+ elif v=='+':
+ return get_num(lst[1:])
+ elif v=='-':
+ num,lst=get_num(lst[1:])
+ return(reduce_nums('-1',num,'*'),lst)
+ elif v=='!':
+ num,lst=get_num(lst[1:])
+ return(int(not int(num)),lst)
+ elif v=='~':
+ num,lst=get_num(lst[1:])
+ return(~int(num),lst)
+ else:
+ raise PreprocError("Invalid op token %r for get_num"%lst)
+ elif p==NUM:
+ return v,lst[1:]
+ elif p==IDENT:
+ return 0,lst[1:]
+ else:
+ raise PreprocError("Invalid token %r for get_num"%lst)
+def get_term(lst):
+ if not lst:raise PreprocError("empty list for get_term")
+ num,lst=get_num(lst)
+ if not lst:
+ return(num,[])
+ (p,v)=lst[0]
+ if p==OP:
+ if v==',':
+ return get_term(lst[1:])
+ elif v=='?':
+ count_par=0
+ i=1
+ while i<len(lst):
+ (p,v)=lst[i]
+ if p==OP:
+ if v==')':
+ count_par-=1
+ elif v=='(':
+ count_par+=1
+ elif v==':':
+ if count_par==0:
+ break
+ i+=1
+ else:
+ raise PreprocError("rparen expected %r"%lst)
+ if int(num):
+ return get_term(lst[1:i])
+ else:
+ return get_term(lst[i+1:])
+ else:
+ num2,lst=get_num(lst[1:])
+ if not lst:
+ num2=reduce_nums(num,num2,v)
+ return get_term([(NUM,num2)]+lst)
+ p2,v2=lst[0]
+ if p2!=OP:
+ raise PreprocError("op expected %r"%lst)
+ if prec[v2]>=prec[v]:
+ num2=reduce_nums(num,num2,v)
+ return get_term([(NUM,num2)]+lst)
+ else:
+ num3,lst=get_num(lst[1:])
+ num3=reduce_nums(num2,num3,v2)
+ return get_term([(NUM,num),(p,v),(NUM,num3)]+lst)
+ raise PreprocError("cannot reduce %r"%lst)
+def reduce_eval(lst):
+ num,lst=get_term(lst)
+ return(NUM,num)
+def stringize(lst):
+ lst=[str(v2)for(p2,v2)in lst]
+ return"".join(lst)
+def paste_tokens(t1,t2):
+ p1=None
+ if t1[0]==OP and t2[0]==OP:
+ p1=OP
+ elif t1[0]==IDENT and(t2[0]==IDENT or t2[0]==NUM):
+ p1=IDENT
+ elif t1[0]==NUM and t2[0]==NUM:
+ p1=NUM
+ if not p1:
+ raise PreprocError('tokens do not make a valid paste %r and %r'%(t1,t2))
+ return(p1,t1[1]+t2[1])
+def reduce_tokens(lst,defs,ban=[]):
+ i=0
+ while i<len(lst):
+ (p,v)=lst[i]
+ if p==IDENT and v=="defined":
+ del lst[i]
+ if i<len(lst):
+ (p2,v2)=lst[i]
+ if p2==IDENT:
+ if v2 in defs:
+ lst[i]=(NUM,1)
+ else:
+ lst[i]=(NUM,0)
+ elif p2==OP and v2=='(':
+ del lst[i]
+ (p2,v2)=lst[i]
+ del lst[i]
+ if v2 in defs:
+ lst[i]=(NUM,1)
+ else:
+ lst[i]=(NUM,0)
+ else:
+ raise PreprocError("Invalid define expression %r"%lst)
+ elif p==IDENT and v in defs:
+ if isinstance(defs[v],str):
+ a,b=extract_macro(defs[v])
+ defs[v]=b
+ macro_def=defs[v]
+ to_add=macro_def[1]
+ if isinstance(macro_def[0],list):
+ del lst[i]
+ accu=to_add[:]
+ reduce_tokens(accu,defs,ban+[v])
+ for x in range(len(accu)):
+ lst.insert(i,accu[x])
+ i+=1
+ else:
+ args=[]
+ del lst[i]
+ if i>=len(lst):
+ raise PreprocError("expected '(' after %r (got nothing)"%v)
+ (p2,v2)=lst[i]
+ if p2!=OP or v2!='(':
+ raise PreprocError("expected '(' after %r"%v)
+ del lst[i]
+ one_param=[]
+ count_paren=0
+ while i<len(lst):
+ p2,v2=lst[i]
+ del lst[i]
+ if p2==OP and count_paren==0:
+ if v2=='(':
+ one_param.append((p2,v2))
+ count_paren+=1
+ elif v2==')':
+ if one_param:args.append(one_param)
+ break
+ elif v2==',':
+ if not one_param:raise PreprocError("empty param in funcall %s"%p)
+ args.append(one_param)
+ one_param=[]
+ else:
+ one_param.append((p2,v2))
+ else:
+ one_param.append((p2,v2))
+ if v2=='(':count_paren+=1
+ elif v2==')':count_paren-=1
+ else:
+ raise PreprocError('malformed macro')
+ accu=[]
+ arg_table=macro_def[0]
+ j=0
+ while j<len(to_add):
+ (p2,v2)=to_add[j]
+ if p2==OP and v2=='#':
+ if j+1<len(to_add)and to_add[j+1][0]==IDENT and to_add[j+1][1]in arg_table:
+ toks=args[arg_table[to_add[j+1][1]]]
+ accu.append((STR,stringize(toks)))
+ j+=1
+ else:
+ accu.append((p2,v2))
+ elif p2==OP and v2=='##':
+ if accu and j+1<len(to_add):
+ t1=accu[-1]
+ if to_add[j+1][0]==IDENT and to_add[j+1][1]in arg_table:
+ toks=args[arg_table[to_add[j+1][1]]]
+ if toks:
+ accu[-1]=paste_tokens(t1,toks[0])
+ accu.extend(toks[1:])
+ else:
+ accu.append((p2,v2))
+ accu.extend(toks)
+ elif to_add[j+1][0]==IDENT and to_add[j+1][1]=='__VA_ARGS__':
+ va_toks=[]
+ st=len(macro_def[0])
+ pt=len(args)
+ for x in args[pt-st+1:]:
+ va_toks.extend(x)
+ va_toks.append((OP,','))
+ if va_toks:va_toks.pop()
+ if len(accu)>1:
+ (p3,v3)=accu[-1]
+ (p4,v4)=accu[-2]
+ if v3=='##':
+ accu.pop()
+ if v4==','and pt<st:
+ accu.pop()
+ accu+=va_toks
+ else:
+ accu[-1]=paste_tokens(t1,to_add[j+1])
+ j+=1
+ else:
+ accu.append((p2,v2))
+ elif p2==IDENT and v2 in arg_table:
+ toks=args[arg_table[v2]]
+ reduce_tokens(toks,defs,ban+[v])
+ accu.extend(toks)
+ else:
+ accu.append((p2,v2))
+ j+=1
+ reduce_tokens(accu,defs,ban+[v])
+ for x in range(len(accu)-1,-1,-1):
+ lst.insert(i,accu[x])
+ i+=1
+def eval_macro(lst,defs):
+ reduce_tokens(lst,defs,[])
+ if not lst:raise PreprocError("missing tokens to evaluate")
+ (p,v)=reduce_eval(lst)
+ return int(v)!=0
+def extract_macro(txt):
+ t=tokenize(txt)
+ if re_fun.search(txt):
+ p,name=t[0]
+ p,v=t[1]
+ if p!=OP:raise PreprocError("expected open parenthesis")
+ i=1
+ pindex=0
+ params={}
+ prev='('
+ while 1:
+ i+=1
+ p,v=t[i]
+ if prev=='(':
+ if p==IDENT:
+ params[v]=pindex
+ pindex+=1
+ prev=p
+ elif p==OP and v==')':
+ break
+ else:
+ raise PreprocError("unexpected token (3)")
+ elif prev==IDENT:
+ if p==OP and v==',':
+ prev=v
+ elif p==OP and v==')':
+ break
+ else:
+ raise PreprocError("comma or ... expected")
+ elif prev==',':
+ if p==IDENT:
+ params[v]=pindex
+ pindex+=1
+ prev=p
+ elif p==OP and v=='...':
+ raise PreprocError("not implemented (1)")
+ else:
+ raise PreprocError("comma or ... expected (2)")
+ elif prev=='...':
+ raise PreprocError("not implemented (2)")
+ else:
+ raise PreprocError("unexpected else")
+ return(name,[params,t[i+1:]])
+ else:
+ (p,v)=t[0]
+ return(v,[[],t[1:]])
+re_include=re.compile('^\s*(<(?P<a>.*)>|"(?P<b>.*)")')
+def extract_include(txt,defs):
+ m=re_include.search(txt)
+ if m:
+ if m.group('a'):return'<',m.group('a')
+ if m.group('b'):return'"',m.group('b')
+ toks=tokenize(txt)
+ reduce_tokens(toks,defs,['waf_include'])
+ if not toks:
+ raise PreprocError("could not parse include %s"%txt)
+ if len(toks)==1:
+ if toks[0][0]==STR:
+ return'"',toks[0][1]
+ else:
+ if toks[0][1]=='<'and toks[-1][1]=='>':
+ return stringize(toks).lstrip('<').rstrip('>')
+ raise PreprocError("could not parse include %s."%txt)
+def parse_char(txt):
+ if not txt:raise PreprocError("attempted to parse a null char")
+ if txt[0]!='\\':
+ return ord(txt)
+ c=txt[1]
+ if c=='x':
+ if len(txt)==4 and txt[3]in string.hexdigits:return int(txt[2:],16)
+ return int(txt[2:],16)
+ elif c.isdigit():
+ if c=='0'and len(txt)==2:return 0
+ for i in 3,2,1:
+ if len(txt)>i and txt[1:1+i].isdigit():
+ return(1+i,int(txt[1:1+i],8))
+ else:
+ try:return chr_esc[c]
+ except KeyError:raise PreprocError("could not parse char literal '%s'"%txt)
+def tokenize(s):
+ return tokenize_private(s)[:]
+ at Utils.run_once
+def tokenize_private(s):
+ ret=[]
+ for match in re_clexer.finditer(s):
+ m=match.group
+ for name in tok_types:
+ v=m(name)
+ if v:
+ if name==IDENT:
+ try:v=g_optrans[v];name=OP
+ except KeyError:
+ if v.lower()=="true":
+ v=1
+ name=NUM
+ elif v.lower()=="false":
+ v=0
+ name=NUM
+ elif name==NUM:
+ if m('oct'):v=int(v,8)
+ elif m('hex'):v=int(m('hex'),16)
+ elif m('n0'):v=m('n0')
+ else:
+ v=m('char')
+ if v:v=parse_char(v)
+ else:v=m('n2')or m('n4')
+ elif name==OP:
+ if v=='%:':v='#'
+ elif v=='%:%:':v='##'
+ elif name==STR:
+ v=v[1:-1]
+ ret.append((name,v))
+ break
+ return ret
+ at Utils.run_once
+def define_name(line):
+ return re_mac.match(line).group(0)
+class c_parser(object):
+ def __init__(self,nodepaths=None,defines=None):
+ self.lines=[]
+ if defines is None:
+ self.defs={}
+ else:
+ self.defs=dict(defines)
+ self.state=[]
+ self.count_files=0
+ self.currentnode_stack=[]
+ self.nodepaths=nodepaths or[]
+ self.nodes=[]
+ self.names=[]
+ self.curfile=''
+ self.ban_includes=set([])
+ def cached_find_resource(self,node,filename):
+ try:
+ nd=node.ctx.cache_nd
+ except AttributeError:
+ nd=node.ctx.cache_nd={}
+ tup=(node,filename)
+ try:
+ return nd[tup]
+ except KeyError:
+ ret=node.find_resource(filename)
+ if ret:
+ if getattr(ret,'children',None):
+ ret=None
+ elif ret.is_child_of(node.ctx.bldnode):
+ tmp=node.ctx.srcnode.search_node(ret.path_from(node.ctx.bldnode))
+ if tmp and getattr(tmp,'children',None):
+ ret=None
+ nd[tup]=ret
+ return ret
+ def tryfind(self,filename):
+ self.curfile=filename
+ found=self.cached_find_resource(self.currentnode_stack[-1],filename)
+ for n in self.nodepaths:
+ if found:
+ break
+ found=self.cached_find_resource(n,filename)
+ if found:
+ self.nodes.append(found)
+ if filename[-4:]!='.moc':
+ self.addlines(found)
+ else:
+ if not filename in self.names:
+ self.names.append(filename)
+ return found
+ def addlines(self,node):
+ self.currentnode_stack.append(node.parent)
+ filepath=node.abspath()
+ self.count_files+=1
+ if self.count_files>recursion_limit:
+ raise PreprocError("recursion limit exceeded")
+ pc=self.parse_cache
+ debug('preproc: reading file %r',filepath)
+ try:
+ lns=pc[filepath]
+ except KeyError:
+ pass
+ else:
+ self.lines.extend(lns)
+ return
+ try:
+ lines=filter_comments(filepath)
+ lines.append((POPFILE,''))
+ lines.reverse()
+ pc[filepath]=lines
+ self.lines.extend(lines)
+ except IOError:
+ raise PreprocError("could not read the file %s"%filepath)
+ except Exception:
+ if Logs.verbose>0:
+ error("parsing %s failed"%filepath)
+ traceback.print_exc()
+ def start(self,node,env):
+ debug('preproc: scanning %s (in %s)',node.name,node.parent.name)
+ bld=node.ctx
+ try:
+ self.parse_cache=bld.parse_cache
+ except AttributeError:
+ bld.parse_cache={}
+ self.parse_cache=bld.parse_cache
+ self.addlines(node)
+ if env['DEFINES']:
+ try:
+ lst=['%s %s'%(x[0],trimquotes('='.join(x[1:])))for x in[y.split('=')for y in env['DEFINES']]]
+ lst.reverse()
+ self.lines.extend([('define',x)for x in lst])
+ except AttributeError:
+ pass
+ while self.lines:
+ (token,line)=self.lines.pop()
+ if token==POPFILE:
+ self.count_files-=1
+ self.currentnode_stack.pop()
+ continue
+ try:
+ ve=Logs.verbose
+ if ve:debug('preproc: line is %s - %s state is %s',token,line,self.state)
+ state=self.state
+ if token[:2]=='if':
+ state.append(undefined)
+ elif token=='endif':
+ state.pop()
+ if token[0]!='e':
+ if skipped in self.state or ignored in self.state:
+ continue
+ if token=='if':
+ ret=eval_macro(tokenize(line),self.defs)
+ if ret:state[-1]=accepted
+ else:state[-1]=ignored
+ elif token=='ifdef':
+ m=re_mac.match(line)
+ if m and m.group(0)in self.defs:state[-1]=accepted
+ else:state[-1]=ignored
+ elif token=='ifndef':
+ m=re_mac.match(line)
+ if m and m.group(0)in self.defs:state[-1]=ignored
+ else:state[-1]=accepted
+ elif token=='include'or token=='import':
+ (kind,inc)=extract_include(line,self.defs)
+ if inc in self.ban_includes:
+ continue
+ if token=='import':self.ban_includes.add(inc)
+ if ve:debug('preproc: include found %s (%s) ',inc,kind)
+ if kind=='"'or not strict_quotes:
+ self.tryfind(inc)
+ elif token=='elif':
+ if state[-1]==accepted:
+ state[-1]=skipped
+ elif state[-1]==ignored:
+ if eval_macro(tokenize(line),self.defs):
+ state[-1]=accepted
+ elif token=='else':
+ if state[-1]==accepted:state[-1]=skipped
+ elif state[-1]==ignored:state[-1]=accepted
+ elif token=='define':
+ try:
+ self.defs[define_name(line)]=line
+ except Exception:
+ raise PreprocError("Invalid define line %s"%line)
+ elif token=='undef':
+ m=re_mac.match(line)
+ if m and m.group(0)in self.defs:
+ self.defs.__delitem__(m.group(0))
+ elif token=='pragma':
+ if re_pragma_once.match(line.lower()):
+ self.ban_includes.add(self.curfile)
+ except Exception ,e:
+ if Logs.verbose:
+ debug('preproc: line parsing failed (%s): %s %s',e,line,Utils.ex_stack())
+def scan(task):
+ global go_absolute
+ try:
+ incn=task.generator.includes_nodes
+ except AttributeError:
+ raise Errors.WafError('%r is missing a feature such as "c", "cxx" or "includes": '%task.generator)
+ if go_absolute:
+ nodepaths=incn+[task.generator.bld.root.find_dir(x)for x in standard_includes]
+ else:
+ nodepaths=[x for x in incn if x.is_child_of(x.ctx.srcnode)or x.is_child_of(x.ctx.bldnode)]
+ tmp=c_parser(nodepaths)
+ tmp.start(task.inputs[0],task.env)
+ if Logs.verbose:
+ debug('deps: deps for %r: %r; unresolved %r'%(task.inputs,tmp.nodes,tmp.names))
+ return(tmp.nodes,tmp.names)
diff --git a/waflib/Tools/c_tests.py b/waflib/Tools/c_tests.py
new file mode 100644
index 0000000..f275977
--- /dev/null
+++ b/waflib/Tools/c_tests.py
@@ -0,0 +1,153 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+from waflib import Task
+from waflib.Configure import conf
+from waflib.TaskGen import feature,before_method,after_method
+import sys
+LIB_CODE='''
+#ifdef _MSC_VER
+#define testEXPORT __declspec(dllexport)
+#else
+#define testEXPORT
+#endif
+testEXPORT int lib_func(void) { return 9; }
+'''
+MAIN_CODE='''
+#ifdef _MSC_VER
+#define testEXPORT __declspec(dllimport)
+#else
+#define testEXPORT
+#endif
+testEXPORT int lib_func(void);
+int main(int argc, char **argv) {
+ (void)argc; (void)argv;
+ return !(lib_func() == 9);
+}
+'''
+ at feature('link_lib_test')
+ at before_method('process_source')
+def link_lib_test_fun(self):
+ def write_test_file(task):
+ task.outputs[0].write(task.generator.code)
+ rpath=[]
+ if getattr(self,'add_rpath',False):
+ rpath=[self.bld.path.get_bld().abspath()]
+ mode=self.mode
+ m='%s %s'%(mode,mode)
+ ex=self.test_exec and'test_exec'or''
+ bld=self.bld
+ bld(rule=write_test_file,target='test.'+mode,code=LIB_CODE)
+ bld(rule=write_test_file,target='main.'+mode,code=MAIN_CODE)
+ bld(features='%sshlib'%m,source='test.'+mode,target='test')
+ bld(features='%sprogram %s'%(m,ex),source='main.'+mode,target='app',use='test',rpath=rpath)
+ at conf
+def check_library(self,mode=None,test_exec=True):
+ if not mode:
+ mode='c'
+ if self.env.CXX:
+ mode='cxx'
+ self.check(compile_filename=[],features='link_lib_test',msg='Checking for libraries',mode=mode,test_exec=test_exec,)
+INLINE_CODE='''
+typedef int foo_t;
+static %s foo_t static_foo () {return 0; }
+%s foo_t foo () {
+ return 0;
+}
+'''
+INLINE_VALUES=['inline','__inline__','__inline']
+ at conf
+def check_inline(self,**kw):
+ self.start_msg('Checking for inline')
+ if not'define_name'in kw:
+ kw['define_name']='INLINE_MACRO'
+ if not'features'in kw:
+ if self.env.CXX:
+ kw['features']=['cxx']
+ else:
+ kw['features']=['c']
+ for x in INLINE_VALUES:
+ kw['fragment']=INLINE_CODE%(x,x)
+ try:
+ self.check(**kw)
+ except self.errors.ConfigurationError:
+ continue
+ else:
+ self.end_msg(x)
+ if x!='inline':
+ self.define('inline',x,quote=False)
+ return x
+ self.fatal('could not use inline functions')
+LARGE_FRAGMENT='''#include <unistd.h>
+int main(int argc, char **argv) {
+ (void)argc; (void)argv;
+ return !(sizeof(off_t) >= 8);
+}
+'''
+ at conf
+def check_large_file(self,**kw):
+ if not'define_name'in kw:
+ kw['define_name']='HAVE_LARGEFILE'
+ if not'execute'in kw:
+ kw['execute']=True
+ if not'features'in kw:
+ if self.env.CXX:
+ kw['features']=['cxx','cxxprogram']
+ else:
+ kw['features']=['c','cprogram']
+ kw['fragment']=LARGE_FRAGMENT
+ kw['msg']='Checking for large file support'
+ ret=True
+ try:
+ if self.env.DEST_BINFMT!='pe':
+ ret=self.check(**kw)
+ except self.errors.ConfigurationError:
+ pass
+ else:
+ if ret:
+ return True
+ kw['msg']='Checking for -D_FILE_OFFSET_BITS=64'
+ kw['defines']=['_FILE_OFFSET_BITS=64']
+ try:
+ ret=self.check(**kw)
+ except self.errors.ConfigurationError:
+ pass
+ else:
+ self.define('_FILE_OFFSET_BITS',64)
+ return ret
+ self.fatal('There is no support for large files')
+ENDIAN_FRAGMENT='''
+short int ascii_mm[] = { 0x4249, 0x4765, 0x6E44, 0x6961, 0x6E53, 0x7953, 0 };
+short int ascii_ii[] = { 0x694C, 0x5454, 0x656C, 0x6E45, 0x6944, 0x6E61, 0 };
+int use_ascii (int i) {
+ return ascii_mm[i] + ascii_ii[i];
+}
+short int ebcdic_ii[] = { 0x89D3, 0xE3E3, 0x8593, 0x95C5, 0x89C4, 0x9581, 0 };
+short int ebcdic_mm[] = { 0xC2C9, 0xC785, 0x95C4, 0x8981, 0x95E2, 0xA8E2, 0 };
+int use_ebcdic (int i) {
+ return ebcdic_mm[i] + ebcdic_ii[i];
+}
+extern int foo;
+'''
+class grep_for_endianness(Task.Task):
+ color='PINK'
+ def run(self):
+ txt=self.inputs[0].read(flags='rb').decode('iso8859-1')
+ if txt.find('LiTTleEnDian')>-1:
+ self.generator.tmp.append('little')
+ elif txt.find('BIGenDianSyS')>-1:
+ self.generator.tmp.append('big')
+ else:
+ return-1
+ at feature('grep_for_endianness')
+ at after_method('process_source')
+def grep_for_endianness_fun(self):
+ self.create_task('grep_for_endianness',self.compiled_tasks[0].outputs[0])
+ at conf
+def check_endianness(self):
+ tmp=[]
+ def check_msg(self):
+ return tmp[0]
+ self.check(fragment=ENDIAN_FRAGMENT,features='c grep_for_endianness',msg="Checking for endianness",define='ENDIANNESS',tmp=tmp,okmsg=check_msg)
+ return tmp[0]
diff --git a/waflib/Tools/ccroot.py b/waflib/Tools/ccroot.py
new file mode 100644
index 0000000..18c57ce
--- /dev/null
+++ b/waflib/Tools/ccroot.py
@@ -0,0 +1,391 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import os
+from waflib import Task,Utils,Node,Errors
+from waflib.TaskGen import after_method,before_method,feature,taskgen_method,extension
+from waflib.Tools import c_aliases,c_preproc,c_config,c_osx,c_tests
+from waflib.Configure import conf
+SYSTEM_LIB_PATHS=['/usr/lib64','/usr/lib','/usr/local/lib64','/usr/local/lib']
+USELIB_VARS=Utils.defaultdict(set)
+USELIB_VARS['c']=set(['INCLUDES','FRAMEWORKPATH','DEFINES','CPPFLAGS','CCDEPS','CFLAGS','ARCH'])
+USELIB_VARS['cxx']=set(['INCLUDES','FRAMEWORKPATH','DEFINES','CPPFLAGS','CXXDEPS','CXXFLAGS','ARCH'])
+USELIB_VARS['d']=set(['INCLUDES','DFLAGS'])
+USELIB_VARS['includes']=set(['INCLUDES','FRAMEWORKPATH','ARCH'])
+USELIB_VARS['cprogram']=USELIB_VARS['cxxprogram']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS','FRAMEWORK','FRAMEWORKPATH','ARCH'])
+USELIB_VARS['cshlib']=USELIB_VARS['cxxshlib']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS','FRAMEWORK','FRAMEWORKPATH','ARCH'])
+USELIB_VARS['cstlib']=USELIB_VARS['cxxstlib']=set(['ARFLAGS','LINKDEPS'])
+USELIB_VARS['dprogram']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS'])
+USELIB_VARS['dshlib']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS'])
+USELIB_VARS['dstlib']=set(['ARFLAGS','LINKDEPS'])
+USELIB_VARS['asm']=set(['ASFLAGS'])
+ at taskgen_method
+def create_compiled_task(self,name,node):
+ out='%s.%d.o'%(node.name,self.idx)
+ task=self.create_task(name,node,node.parent.find_or_declare(out))
+ try:
+ self.compiled_tasks.append(task)
+ except AttributeError:
+ self.compiled_tasks=[task]
+ return task
+ at taskgen_method
+def to_incnodes(self,inlst):
+ lst=[]
+ seen=set([])
+ for x in self.to_list(inlst):
+ if x in seen or not x:
+ continue
+ seen.add(x)
+ if isinstance(x,Node.Node):
+ lst.append(x)
+ else:
+ if os.path.isabs(x):
+ lst.append(self.bld.root.make_node(x)or x)
+ else:
+ if x[0]=='#':
+ p=self.bld.bldnode.make_node(x[1:])
+ v=self.bld.srcnode.make_node(x[1:])
+ else:
+ p=self.path.get_bld().make_node(x)
+ v=self.path.make_node(x)
+ if p.is_child_of(self.bld.bldnode):
+ p.mkdir()
+ lst.append(p)
+ lst.append(v)
+ return lst
+ at feature('c','cxx','d','asm','fc','includes')
+ at after_method('propagate_uselib_vars','process_source')
+def apply_incpaths(self):
+ lst=self.to_incnodes(self.to_list(getattr(self,'includes',[]))+self.env['INCLUDES'])
+ self.includes_nodes=lst
+ self.env['INCPATHS']=[x.abspath()for x in lst]
+class link_task(Task.Task):
+ color='YELLOW'
+ inst_to=None
+ chmod=Utils.O755
+ def add_target(self,target):
+ if isinstance(target,str):
+ pattern=self.env[self.__class__.__name__+'_PATTERN']
+ if not pattern:
+ pattern='%s'
+ folder,name=os.path.split(target)
+ if self.__class__.__name__.find('shlib')>0:
+ if self.env.DEST_BINFMT=='pe'and getattr(self.generator,'vnum',None):
+ name=name+'-'+self.generator.vnum.split('.')[0]
+ tmp=folder+os.sep+pattern%name
+ target=self.generator.path.find_or_declare(tmp)
+ self.set_outputs(target)
+class stlink_task(link_task):
+ run_str='${AR} ${ARFLAGS} ${AR_TGT_F}${TGT} ${AR_SRC_F}${SRC}'
+def rm_tgt(cls):
+ old=cls.run
+ def wrap(self):
+ try:os.remove(self.outputs[0].abspath())
+ except OSError:pass
+ return old(self)
+ setattr(cls,'run',wrap)
+rm_tgt(stlink_task)
+ at feature('c','cxx','d','fc','asm')
+ at after_method('process_source')
+def apply_link(self):
+ for x in self.features:
+ if x=='cprogram'and'cxx'in self.features:
+ x='cxxprogram'
+ elif x=='cshlib'and'cxx'in self.features:
+ x='cxxshlib'
+ if x in Task.classes:
+ if issubclass(Task.classes[x],link_task):
+ link=x
+ break
+ else:
+ return
+ objs=[t.outputs[0]for t in getattr(self,'compiled_tasks',[])]
+ self.link_task=self.create_task(link,objs)
+ self.link_task.add_target(self.target)
+ try:
+ inst_to=self.install_path
+ except AttributeError:
+ inst_to=self.link_task.__class__.inst_to
+ if inst_to:
+ self.install_task=self.bld.install_files(inst_to,self.link_task.outputs[:],env=self.env,chmod=self.link_task.chmod)
+ at taskgen_method
+def use_rec(self,name,**kw):
+ if name in self.tmp_use_not or name in self.tmp_use_seen:
+ return
+ try:
+ y=self.bld.get_tgen_by_name(name)
+ except Errors.WafError:
+ self.uselib.append(name)
+ self.tmp_use_not.add(name)
+ return
+ self.tmp_use_seen.append(name)
+ y.post()
+ y.tmp_use_objects=objects=kw.get('objects',True)
+ y.tmp_use_stlib=stlib=kw.get('stlib',True)
+ try:
+ link_task=y.link_task
+ except AttributeError:
+ y.tmp_use_var=''
+ else:
+ objects=False
+ if not isinstance(link_task,stlink_task):
+ stlib=False
+ y.tmp_use_var='LIB'
+ else:
+ y.tmp_use_var='STLIB'
+ p=self.tmp_use_prec
+ for x in self.to_list(getattr(y,'use',[])):
+ try:
+ p[x].append(name)
+ except KeyError:
+ p[x]=[name]
+ self.use_rec(x,objects=objects,stlib=stlib)
+ at feature('c','cxx','d','use','fc')
+ at before_method('apply_incpaths','propagate_uselib_vars')
+ at after_method('apply_link','process_source')
+def process_use(self):
+ use_not=self.tmp_use_not=set([])
+ self.tmp_use_seen=[]
+ use_prec=self.tmp_use_prec={}
+ self.uselib=self.to_list(getattr(self,'uselib',[]))
+ self.includes=self.to_list(getattr(self,'includes',[]))
+ names=self.to_list(getattr(self,'use',[]))
+ for x in names:
+ self.use_rec(x)
+ for x in use_not:
+ if x in use_prec:
+ del use_prec[x]
+ out=[]
+ tmp=[]
+ for x in self.tmp_use_seen:
+ for k in use_prec.values():
+ if x in k:
+ break
+ else:
+ tmp.append(x)
+ while tmp:
+ e=tmp.pop()
+ out.append(e)
+ try:
+ nlst=use_prec[e]
+ except KeyError:
+ pass
+ else:
+ del use_prec[e]
+ for x in nlst:
+ for y in use_prec:
+ if x in use_prec[y]:
+ break
+ else:
+ tmp.append(x)
+ if use_prec:
+ raise Errors.WafError('Cycle detected in the use processing %r'%use_prec)
+ out.reverse()
+ link_task=getattr(self,'link_task',None)
+ for x in out:
+ y=self.bld.get_tgen_by_name(x)
+ var=y.tmp_use_var
+ if var and link_task:
+ if var=='LIB'or y.tmp_use_stlib:
+ self.env.append_value(var,[y.target[y.target.rfind(os.sep)+1:]])
+ self.link_task.dep_nodes.extend(y.link_task.outputs)
+ tmp_path=y.link_task.outputs[0].parent.path_from(self.bld.bldnode)
+ self.env.append_value(var+'PATH',[tmp_path])
+ else:
+ if y.tmp_use_objects:
+ self.add_objects_from_tgen(y)
+ if getattr(y,'export_includes',None):
+ self.includes.extend(y.to_incnodes(y.export_includes))
+ for x in names:
+ try:
+ y=self.bld.get_tgen_by_name(x)
+ except Exception:
+ if not self.env['STLIB_'+x]and not x in self.uselib:
+ self.uselib.append(x)
+ else:
+ for k in self.to_list(getattr(y,'uselib',[])):
+ if not self.env['STLIB_'+k]and not k in self.uselib:
+ self.uselib.append(k)
+ at taskgen_method
+def accept_node_to_link(self,node):
+ return not node.name.endswith('.pdb')
+ at taskgen_method
+def add_objects_from_tgen(self,tg):
+ try:
+ link_task=self.link_task
+ except AttributeError:
+ pass
+ else:
+ for tsk in getattr(tg,'compiled_tasks',[]):
+ for x in tsk.outputs:
+ if self.accept_node_to_link(x):
+ link_task.inputs.append(x)
+ at taskgen_method
+def get_uselib_vars(self):
+ _vars=set([])
+ for x in self.features:
+ if x in USELIB_VARS:
+ _vars|=USELIB_VARS[x]
+ return _vars
+ at feature('c','cxx','d','fc','javac','cs','uselib','asm')
+ at after_method('process_use')
+def propagate_uselib_vars(self):
+ _vars=self.get_uselib_vars()
+ env=self.env
+ for x in _vars:
+ y=x.lower()
+ env.append_unique(x,self.to_list(getattr(self,y,[])))
+ for x in self.features:
+ for var in _vars:
+ compvar='%s_%s'%(var,x)
+ env.append_value(var,env[compvar])
+ for x in self.to_list(getattr(self,'uselib',[])):
+ for v in _vars:
+ env.append_value(v,env[v+'_'+x])
+ at feature('cshlib','cxxshlib','fcshlib')
+ at after_method('apply_link')
+def apply_implib(self):
+ if not self.env.DEST_BINFMT=='pe':
+ return
+ dll=self.link_task.outputs[0]
+ if isinstance(self.target,Node.Node):
+ name=self.target.name
+ else:
+ name=os.path.split(self.target)[1]
+ implib=self.env['implib_PATTERN']%name
+ implib=dll.parent.find_or_declare(implib)
+ self.env.append_value('LINKFLAGS',self.env['IMPLIB_ST']%implib.bldpath())
+ self.link_task.outputs.append(implib)
+ if getattr(self,'defs',None)and self.env.DEST_BINFMT=='pe':
+ node=self.path.find_resource(self.defs)
+ if not node:
+ raise Errors.WafError('invalid def file %r'%self.defs)
+ if'msvc'in(self.env.CC_NAME,self.env.CXX_NAME):
+ self.env.append_value('LINKFLAGS','/def:%s'%node.path_from(self.bld.bldnode))
+ self.link_task.dep_nodes.append(node)
+ else:
+ self.link_task.inputs.append(node)
+ try:
+ inst_to=self.install_path
+ except AttributeError:
+ inst_to=self.link_task.__class__.inst_to
+ if not inst_to:
+ return
+ self.implib_install_task=self.bld.install_as('${LIBDIR}/%s'%implib.name,implib,self.env)
+ at feature('cshlib','cxxshlib','dshlib','fcshlib','vnum')
+ at after_method('apply_link','propagate_uselib_vars')
+def apply_vnum(self):
+ if not getattr(self,'vnum','')or os.name!='posix'or self.env.DEST_BINFMT not in('elf','mac-o'):
+ return
+ link=self.link_task
+ nums=self.vnum.split('.')
+ node=link.outputs[0]
+ libname=node.name
+ if libname.endswith('.dylib'):
+ name3=libname.replace('.dylib','.%s.dylib'%self.vnum)
+ name2=libname.replace('.dylib','.%s.dylib'%nums[0])
+ else:
+ name3=libname+'.'+self.vnum
+ name2=libname+'.'+nums[0]
+ if self.env.SONAME_ST:
+ v=self.env.SONAME_ST%name2
+ self.env.append_value('LINKFLAGS',v.split())
+ self.create_task('vnum',node,[node.parent.find_or_declare(name2),node.parent.find_or_declare(name3)])
+ if getattr(self,'install_task',None):
+ self.install_task.hasrun=Task.SKIP_ME
+ bld=self.bld
+ path=self.install_task.dest
+ t1=bld.install_as(path+os.sep+name3,node,env=self.env,chmod=self.link_task.chmod)
+ t2=bld.symlink_as(path+os.sep+name2,name3)
+ t3=bld.symlink_as(path+os.sep+libname,name3)
+ self.vnum_install_task=(t1,t2,t3)
+ if'-dynamiclib'in self.env['LINKFLAGS']:
+ try:
+ inst_to=self.install_path
+ except AttributeError:
+ inst_to=self.link_task.__class__.inst_to
+ if inst_to:
+ p=Utils.subst_vars(inst_to,self.env)
+ path=os.path.join(p,self.link_task.outputs[0].name)
+ self.env.append_value('LINKFLAGS',['-install_name',path])
+class vnum(Task.Task):
+ color='CYAN'
+ quient=True
+ ext_in=['.bin']
+ def run(self):
+ for x in self.outputs:
+ path=x.abspath()
+ try:
+ os.remove(path)
+ except OSError:
+ pass
+ try:
+ os.symlink(self.inputs[0].name,path)
+ except OSError:
+ return 1
+class fake_shlib(link_task):
+ def runnable_status(self):
+ for t in self.run_after:
+ if not t.hasrun:
+ return Task.ASK_LATER
+ for x in self.outputs:
+ x.sig=Utils.h_file(x.abspath())
+ return Task.SKIP_ME
+class fake_stlib(stlink_task):
+ def runnable_status(self):
+ for t in self.run_after:
+ if not t.hasrun:
+ return Task.ASK_LATER
+ for x in self.outputs:
+ x.sig=Utils.h_file(x.abspath())
+ return Task.SKIP_ME
+ at conf
+def read_shlib(self,name,paths=[]):
+ return self(name=name,features='fake_lib',lib_paths=paths,lib_type='shlib')
+ at conf
+def read_stlib(self,name,paths=[]):
+ return self(name=name,features='fake_lib',lib_paths=paths,lib_type='stlib')
+lib_patterns={'shlib':['lib%s.so','%s.so','lib%s.dylib','lib%s.dll','%s.dll'],'stlib':['lib%s.a','%s.a','lib%s.dll','%s.dll','lib%s.lib','%s.lib'],}
+ at feature('fake_lib')
+def process_lib(self):
+ node=None
+ names=[x%self.name for x in lib_patterns[self.lib_type]]
+ for x in self.lib_paths+[self.path]+SYSTEM_LIB_PATHS:
+ if not isinstance(x,Node.Node):
+ x=self.bld.root.find_node(x)or self.path.find_node(x)
+ if not x:
+ continue
+ for y in names:
+ node=x.find_node(y)
+ if node:
+ node.sig=Utils.h_file(node.abspath())
+ break
+ else:
+ continue
+ break
+ else:
+ raise Errors.WafError('could not find library %r'%self.name)
+ self.link_task=self.create_task('fake_%s'%self.lib_type,[],[node])
+ self.target=self.name
+class fake_o(Task.Task):
+ def runnable_status(self):
+ return Task.SKIP_ME
+ at extension('.o','.obj')
+def add_those_o_files(self,node):
+ tsk=self.create_task('fake_o',[],node)
+ try:
+ self.compiled_tasks.append(tsk)
+ except AttributeError:
+ self.compiled_tasks=[tsk]
+ at feature('fake_obj')
+ at before_method('process_source')
+def process_objs(self):
+ for node in self.to_nodes(self.source):
+ self.add_those_o_files(node)
+ self.source=[]
+ at conf
+def read_object(self,obj):
+ if not isinstance(obj,self.path.__class__):
+ obj=self.path.find_resource(obj)
+ return self(features='fake_obj',source=obj,name=obj.name)
diff --git a/waflib/Tools/compiler_c.py b/waflib/Tools/compiler_c.py
new file mode 100644
index 0000000..04504fa
--- /dev/null
+++ b/waflib/Tools/compiler_c.py
@@ -0,0 +1,39 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import os,sys,imp,types
+from waflib.Tools import ccroot
+from waflib import Utils,Configure
+from waflib.Logs import debug
+c_compiler={'win32':['msvc','gcc'],'cygwin':['gcc'],'darwin':['gcc'],'aix':['xlc','gcc'],'linux':['gcc','icc'],'sunos':['suncc','gcc'],'irix':['gcc','irixcc'],'hpux':['gcc'],'gnu':['gcc'],'java':['gcc','msvc','icc'],'default':['gcc'],}
+def configure(conf):
+ try:test_for_compiler=conf.options.check_c_compiler
+ except AttributeError:conf.fatal("Add options(opt): opt.load('compiler_c')")
+ for compiler in test_for_compiler.split():
+ conf.env.stash()
+ conf.start_msg('Checking for %r (c compiler)'%compiler)
+ try:
+ conf.load(compiler)
+ except conf.errors.ConfigurationError ,e:
+ conf.env.revert()
+ conf.end_msg(False)
+ debug('compiler_c: %r'%e)
+ else:
+ if conf.env['CC']:
+ conf.end_msg(conf.env.get_flat('CC'))
+ conf.env['COMPILER_CC']=compiler
+ break
+ conf.end_msg(False)
+ else:
+ conf.fatal('could not configure a c compiler!')
+def options(opt):
+ opt.load_special_tools('c_*.py',ban=['c_dumbpreproc.py'])
+ global c_compiler
+ build_platform=Utils.unversioned_sys_platform()
+ possible_compiler_list=c_compiler[build_platform in c_compiler and build_platform or'default']
+ test_for_compiler=' '.join(possible_compiler_list)
+ cc_compiler_opts=opt.add_option_group("C Compiler Options")
+ cc_compiler_opts.add_option('--check-c-compiler',default="%s"%test_for_compiler,help='On this platform (%s) the following C-Compiler will be checked by default: "%s"'%(build_platform,test_for_compiler),dest="check_c_compiler")
+ for x in test_for_compiler.split():
+ opt.load('%s'%x)
diff --git a/waflib/Tools/compiler_cxx.py b/waflib/Tools/compiler_cxx.py
new file mode 100644
index 0000000..14b7c7d
--- /dev/null
+++ b/waflib/Tools/compiler_cxx.py
@@ -0,0 +1,39 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import os,sys,imp,types
+from waflib.Tools import ccroot
+from waflib import Utils,Configure
+from waflib.Logs import debug
+cxx_compiler={'win32':['msvc','g++'],'cygwin':['g++'],'darwin':['g++'],'aix':['xlc++','g++'],'linux':['g++','icpc'],'sunos':['sunc++','g++'],'irix':['g++'],'hpux':['g++'],'gnu':['g++'],'java':['g++','msvc','icpc'],'default':['g++']}
+def configure(conf):
+ try:test_for_compiler=conf.options.check_cxx_compiler
+ except AttributeError:conf.fatal("Add options(opt): opt.load('compiler_cxx')")
+ for compiler in test_for_compiler.split():
+ conf.env.stash()
+ conf.start_msg('Checking for %r (c++ compiler)'%compiler)
+ try:
+ conf.load(compiler)
+ except conf.errors.ConfigurationError ,e:
+ conf.env.revert()
+ conf.end_msg(False)
+ debug('compiler_cxx: %r'%e)
+ else:
+ if conf.env['CXX']:
+ conf.end_msg(conf.env.get_flat('CXX'))
+ conf.env['COMPILER_CXX']=compiler
+ break
+ conf.end_msg(False)
+ else:
+ conf.fatal('could not configure a c++ compiler!')
+def options(opt):
+ opt.load_special_tools('cxx_*.py')
+ global cxx_compiler
+ build_platform=Utils.unversioned_sys_platform()
+ possible_compiler_list=cxx_compiler[build_platform in cxx_compiler and build_platform or'default']
+ test_for_compiler=' '.join(possible_compiler_list)
+ cxx_compiler_opts=opt.add_option_group('C++ Compiler Options')
+ cxx_compiler_opts.add_option('--check-cxx-compiler',default="%s"%test_for_compiler,help='On this platform (%s) the following C++ Compiler will be checked by default: "%s"'%(build_platform,test_for_compiler),dest="check_cxx_compiler")
+ for x in test_for_compiler.split():
+ opt.load('%s'%x)
diff --git a/waflib/Tools/compiler_d.py b/waflib/Tools/compiler_d.py
new file mode 100644
index 0000000..ee173e1
--- /dev/null
+++ b/waflib/Tools/compiler_d.py
@@ -0,0 +1,29 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import os,sys,imp,types
+from waflib import Utils,Configure,Options,Logs
+def configure(conf):
+ for compiler in conf.options.dcheck.split(','):
+ conf.env.stash()
+ conf.start_msg('Checking for %r (d compiler)'%compiler)
+ try:
+ conf.load(compiler)
+ except conf.errors.ConfigurationError ,e:
+ conf.env.revert()
+ conf.end_msg(False)
+ Logs.debug('compiler_d: %r'%e)
+ else:
+ if conf.env.D:
+ conf.end_msg(conf.env.get_flat('D'))
+ conf.env['COMPILER_D']=compiler
+ break
+ conf.end_msg(False)
+ else:
+ conf.fatal('no suitable d compiler was found')
+def options(opt):
+ d_compiler_opts=opt.add_option_group('D Compiler Options')
+ d_compiler_opts.add_option('--check-d-compiler',default='gdc,dmd,ldc2',action='store',help='check for the compiler [Default:gdc,dmd,ldc2]',dest='dcheck')
+ for d_compiler in['gdc','dmd','ldc2']:
+ opt.load('%s'%d_compiler)
diff --git a/waflib/Tools/compiler_fc.py b/waflib/Tools/compiler_fc.py
new file mode 100644
index 0000000..ec5d2ea
--- /dev/null
+++ b/waflib/Tools/compiler_fc.py
@@ -0,0 +1,43 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import os,sys,imp,types
+from waflib import Utils,Configure,Options,Logs,Errors
+from waflib.Tools import fc
+fc_compiler={'win32':['gfortran','ifort'],'darwin':['gfortran','g95','ifort'],'linux':['gfortran','g95','ifort'],'java':['gfortran','g95','ifort'],'default':['gfortran'],'aix':['gfortran']}
+def __list_possible_compiler(platform):
+ try:
+ return fc_compiler[platform]
+ except KeyError:
+ return fc_compiler["default"]
+def configure(conf):
+ try:test_for_compiler=conf.options.check_fc
+ except AttributeError:conf.fatal("Add options(opt): opt.load('compiler_fc')")
+ for compiler in test_for_compiler.split():
+ conf.env.stash()
+ conf.start_msg('Checking for %r (fortran compiler)'%compiler)
+ try:
+ conf.load(compiler)
+ except conf.errors.ConfigurationError ,e:
+ conf.env.revert()
+ conf.end_msg(False)
+ Logs.debug('compiler_fortran: %r'%e)
+ else:
+ if conf.env['FC']:
+ conf.end_msg(conf.env.get_flat('FC'))
+ conf.env.COMPILER_FORTRAN=compiler
+ break
+ conf.end_msg(False)
+ else:
+ conf.fatal('could not configure a fortran compiler!')
+def options(opt):
+ opt.load_special_tools('fc_*.py')
+ build_platform=Utils.unversioned_sys_platform()
+ detected_platform=Options.platform
+ possible_compiler_list=__list_possible_compiler(detected_platform)
+ test_for_compiler=' '.join(possible_compiler_list)
+ fortran_compiler_opts=opt.add_option_group("Fortran Compiler Options")
+ fortran_compiler_opts.add_option('--check-fortran-compiler',default="%s"%test_for_compiler,help='On this platform (%s) the following Fortran Compiler will be checked by default: "%s"'%(detected_platform,test_for_compiler),dest="check_fc")
+ for compiler in test_for_compiler.split():
+ opt.load('%s'%compiler)
diff --git a/waflib/Tools/cs.py b/waflib/Tools/cs.py
new file mode 100644
index 0000000..ee4d319
--- /dev/null
+++ b/waflib/Tools/cs.py
@@ -0,0 +1,132 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+from waflib import Utils,Task,Options,Logs,Errors
+from waflib.TaskGen import before_method,after_method,feature
+from waflib.Tools import ccroot
+from waflib.Configure import conf
+import os,tempfile
+ccroot.USELIB_VARS['cs']=set(['CSFLAGS','ASSEMBLIES','RESOURCES'])
+ccroot.lib_patterns['csshlib']=['%s']
+ at feature('cs')
+ at before_method('process_source')
+def apply_cs(self):
+ cs_nodes=[]
+ no_nodes=[]
+ for x in self.to_nodes(self.source):
+ if x.name.endswith('.cs'):
+ cs_nodes.append(x)
+ else:
+ no_nodes.append(x)
+ self.source=no_nodes
+ bintype=getattr(self,'bintype',self.gen.endswith('.dll')and'library'or'exe')
+ self.cs_task=tsk=self.create_task('mcs',cs_nodes,self.path.find_or_declare(self.gen))
+ tsk.env.CSTYPE='/target:%s'%bintype
+ tsk.env.OUT='/out:%s'%tsk.outputs[0].abspath()
+ self.env.append_value('CSFLAGS','/platform:%s'%getattr(self,'platform','anycpu'))
+ inst_to=getattr(self,'install_path',bintype=='exe'and'${BINDIR}'or'${LIBDIR}')
+ if inst_to:
+ mod=getattr(self,'chmod',bintype=='exe'and Utils.O755 or Utils.O644)
+ self.install_task=self.bld.install_files(inst_to,self.cs_task.outputs[:],env=self.env,chmod=mod)
+ at feature('cs')
+ at after_method('apply_cs')
+def use_cs(self):
+ names=self.to_list(getattr(self,'use',[]))
+ get=self.bld.get_tgen_by_name
+ for x in names:
+ try:
+ y=get(x)
+ except Errors.WafError:
+ self.env.append_value('CSFLAGS','/reference:%s'%x)
+ continue
+ y.post()
+ tsk=getattr(y,'cs_task',None)or getattr(y,'link_task',None)
+ if not tsk:
+ self.bld.fatal('cs task has no link task for use %r'%self)
+ self.cs_task.dep_nodes.extend(tsk.outputs)
+ self.cs_task.set_run_after(tsk)
+ self.env.append_value('CSFLAGS','/reference:%s'%tsk.outputs[0].abspath())
+ at feature('cs')
+ at after_method('apply_cs','use_cs')
+def debug_cs(self):
+ csdebug=getattr(self,'csdebug',self.env.CSDEBUG)
+ if not csdebug:
+ return
+ node=self.cs_task.outputs[0]
+ if self.env.CS_NAME=='mono':
+ out=node.parent.find_or_declare(node.name+'.mdb')
+ else:
+ out=node.change_ext('.pdb')
+ self.cs_task.outputs.append(out)
+ try:
+ self.install_task.source.append(out)
+ except AttributeError:
+ pass
+ if csdebug=='pdbonly':
+ val=['/debug+','/debug:pdbonly']
+ elif csdebug=='full':
+ val=['/debug+','/debug:full']
+ else:
+ val=['/debug-']
+ self.env.append_value('CSFLAGS',val)
+class mcs(Task.Task):
+ color='YELLOW'
+ run_str='${MCS} ${CSTYPE} ${CSFLAGS} ${ASS_ST:ASSEMBLIES} ${RES_ST:RESOURCES} ${OUT} ${SRC}'
+ def exec_command(self,cmd,**kw):
+ bld=self.generator.bld
+ try:
+ if not kw.get('cwd',None):
+ kw['cwd']=bld.cwd
+ except AttributeError:
+ bld.cwd=kw['cwd']=bld.variant_dir
+ try:
+ tmp=None
+ if isinstance(cmd,list)and len(' '.join(cmd))>=8192:
+ program=cmd[0]
+ cmd=[self.quote_response_command(x)for x in cmd]
+ (fd,tmp)=tempfile.mkstemp()
+ os.write(fd,'\r\n'.join(i.replace('\\','\\\\')for i in cmd[1:]))
+ os.close(fd)
+ cmd=[program,'@'+tmp]
+ ret=self.generator.bld.exec_command(cmd,**kw)
+ finally:
+ if tmp:
+ try:
+ os.remove(tmp)
+ except OSError:
+ pass
+ return ret
+ def quote_response_command(self,flag):
+ if flag.lower()=='/noconfig':
+ return''
+ if flag.find(' ')>-1:
+ for x in('/r:','/reference:','/resource:','/lib:','/out:'):
+ if flag.startswith(x):
+ flag='%s"%s"'%(x,flag[len(x):])
+ break
+ else:
+ flag='"%s"'%flag
+ return flag
+def configure(conf):
+ csc=getattr(Options.options,'cscbinary',None)
+ if csc:
+ conf.env.MCS=csc
+ conf.find_program(['csc','mcs','gmcs'],var='MCS')
+ conf.env.ASS_ST='/r:%s'
+ conf.env.RES_ST='/resource:%s'
+ conf.env.CS_NAME='csc'
+ if str(conf.env.MCS).lower().find('mcs')>-1:
+ conf.env.CS_NAME='mono'
+def options(opt):
+ opt.add_option('--with-csc-binary',type='string',dest='cscbinary')
+class fake_csshlib(Task.Task):
+ color='YELLOW'
+ inst_to=None
+ def runnable_status(self):
+ for x in self.outputs:
+ x.sig=Utils.h_file(x.abspath())
+ return Task.SKIP_ME
+ at conf
+def read_csshlib(self,name,paths=[]):
+ return self(name=name,features='fake_lib',lib_paths=paths,lib_type='csshlib')
diff --git a/waflib/Tools/cxx.py b/waflib/Tools/cxx.py
new file mode 100644
index 0000000..b744a8d
--- /dev/null
+++ b/waflib/Tools/cxx.py
@@ -0,0 +1,26 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+from waflib import TaskGen,Task,Utils
+from waflib.Tools import c_preproc
+from waflib.Tools.ccroot import link_task,stlink_task
+ at TaskGen.extension('.cpp','.cc','.cxx','.C','.c++')
+def cxx_hook(self,node):
+ return self.create_compiled_task('cxx',node)
+if not'.c'in TaskGen.task_gen.mappings:
+ TaskGen.task_gen.mappings['.c']=TaskGen.task_gen.mappings['.cpp']
+class cxx(Task.Task):
+ run_str='${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CXX_SRC_F}${SRC} ${CXX_TGT_F}${TGT}'
+ vars=['CXXDEPS']
+ ext_in=['.h']
+ scan=c_preproc.scan
+class cxxprogram(link_task):
+ run_str='${LINK_CXX} ${LINKFLAGS} ${CXXLNK_SRC_F}${SRC} ${CXXLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB}'
+ vars=['LINKDEPS']
+ ext_out=['.bin']
+ inst_to='${BINDIR}'
+class cxxshlib(cxxprogram):
+ inst_to='${LIBDIR}'
+class cxxstlib(stlink_task):
+ pass
diff --git a/waflib/Tools/d.py b/waflib/Tools/d.py
new file mode 100644
index 0000000..1838740
--- /dev/null
+++ b/waflib/Tools/d.py
@@ -0,0 +1,54 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+from waflib import Utils,Task,Errors
+from waflib.TaskGen import taskgen_method,feature,extension
+from waflib.Tools import d_scan,d_config
+from waflib.Tools.ccroot import link_task,stlink_task
+class d(Task.Task):
+ color='GREEN'
+ run_str='${D} ${DFLAGS} ${DINC_ST:INCPATHS} ${D_SRC_F:SRC} ${D_TGT_F:TGT}'
+ scan=d_scan.scan
+class d_with_header(d):
+ run_str='${D} ${DFLAGS} ${DINC_ST:INCPATHS} ${D_HDR_F:tgt.outputs[1].bldpath()} ${D_SRC_F:SRC} ${D_TGT_F:tgt.outputs[0].bldpath()}'
+class d_header(Task.Task):
+ color='BLUE'
+ run_str='${D} ${D_HEADER} ${SRC}'
+class dprogram(link_task):
+ run_str='${D_LINKER} ${LINKFLAGS} ${DLNK_SRC_F}${SRC} ${DLNK_TGT_F:TGT} ${RPATH_ST:RPATH} ${DSTLIB_MARKER} ${DSTLIBPATH_ST:STLIBPATH} ${DSTLIB_ST:STLIB} ${DSHLIB_MARKER} ${DLIBPATH_ST:LIBPATH} ${DSHLIB_ST:LIB}'
+ inst_to='${BINDIR}'
+class dshlib(dprogram):
+ inst_to='${LIBDIR}'
+class dstlib(stlink_task):
+ pass
+ at extension('.d','.di','.D')
+def d_hook(self,node):
+ ext=Utils.destos_to_binfmt(self.env.DEST_OS)=='pe'and'obj'or'o'
+ out='%s.%d.%s'%(node.name,self.idx,ext)
+ def create_compiled_task(self,name,node):
+ task=self.create_task(name,node,node.parent.find_or_declare(out))
+ try:
+ self.compiled_tasks.append(task)
+ except AttributeError:
+ self.compiled_tasks=[task]
+ return task
+ if getattr(self,'generate_headers',None):
+ tsk=create_compiled_task(self,'d_with_header',node)
+ tsk.outputs.append(node.change_ext(self.env['DHEADER_ext']))
+ else:
+ tsk=create_compiled_task(self,'d',node)
+ return tsk
+ at taskgen_method
+def generate_header(self,filename):
+ try:
+ self.header_lst.append([filename,self.install_path])
+ except AttributeError:
+ self.header_lst=[[filename,self.install_path]]
+ at feature('d')
+def process_header(self):
+ for i in getattr(self,'header_lst',[]):
+ node=self.path.find_resource(i[0])
+ if not node:
+ raise Errors.WafError('file %r not found on d obj'%i[0])
+ self.create_task('d_header',node,node.change_ext('.di'))
diff --git a/waflib/Tools/d_config.py b/waflib/Tools/d_config.py
new file mode 100644
index 0000000..50660ea
--- /dev/null
+++ b/waflib/Tools/d_config.py
@@ -0,0 +1,52 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+from waflib import Utils
+from waflib.Configure import conf
+ at conf
+def d_platform_flags(self):
+ v=self.env
+ if not v.DEST_OS:
+ v.DEST_OS=Utils.unversioned_sys_platform()
+ binfmt=Utils.destos_to_binfmt(self.env.DEST_OS)
+ if binfmt=='pe':
+ v['dprogram_PATTERN']='%s.exe'
+ v['dshlib_PATTERN']='lib%s.dll'
+ v['dstlib_PATTERN']='lib%s.a'
+ elif binfmt=='mac-o':
+ v['dprogram_PATTERN']='%s'
+ v['dshlib_PATTERN']='lib%s.dylib'
+ v['dstlib_PATTERN']='lib%s.a'
+ else:
+ v['dprogram_PATTERN']='%s'
+ v['dshlib_PATTERN']='lib%s.so'
+ v['dstlib_PATTERN']='lib%s.a'
+DLIB='''
+version(D_Version2) {
+ import std.stdio;
+ int main() {
+ writefln("phobos2");
+ return 0;
+ }
+} else {
+ version(Tango) {
+ import tango.stdc.stdio;
+ int main() {
+ printf("tango");
+ return 0;
+ }
+ } else {
+ import std.stdio;
+ int main() {
+ writefln("phobos1");
+ return 0;
+ }
+ }
+}
+'''
+ at conf
+def check_dlibrary(self,execute=True):
+ ret=self.check_cc(features='d dprogram',fragment=DLIB,compile_filename='test.d',execute=execute,define_ret=True)
+ if execute:
+ self.env.DLIBRARY=ret.strip()
diff --git a/waflib/Tools/d_scan.py b/waflib/Tools/d_scan.py
new file mode 100644
index 0000000..ee80c5f
--- /dev/null
+++ b/waflib/Tools/d_scan.py
@@ -0,0 +1,133 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import re
+from waflib import Utils,Logs
+def filter_comments(filename):
+ txt=Utils.readf(filename)
+ i=0
+ buf=[]
+ max=len(txt)
+ begin=0
+ while i<max:
+ c=txt[i]
+ if c=='"'or c=="'":
+ buf.append(txt[begin:i])
+ delim=c
+ i+=1
+ while i<max:
+ c=txt[i]
+ if c==delim:break
+ elif c=='\\':
+ i+=1
+ i+=1
+ i+=1
+ begin=i
+ elif c=='/':
+ buf.append(txt[begin:i])
+ i+=1
+ if i==max:break
+ c=txt[i]
+ if c=='+':
+ i+=1
+ nesting=1
+ c=None
+ while i<max:
+ prev=c
+ c=txt[i]
+ if prev=='/'and c=='+':
+ nesting+=1
+ c=None
+ elif prev=='+'and c=='/':
+ nesting-=1
+ if nesting==0:break
+ c=None
+ i+=1
+ elif c=='*':
+ i+=1
+ c=None
+ while i<max:
+ prev=c
+ c=txt[i]
+ if prev=='*'and c=='/':break
+ i+=1
+ elif c=='/':
+ i+=1
+ while i<max and txt[i]!='\n':
+ i+=1
+ else:
+ begin=i-1
+ continue
+ i+=1
+ begin=i
+ buf.append(' ')
+ else:
+ i+=1
+ buf.append(txt[begin:])
+ return buf
+class d_parser(object):
+ def __init__(self,env,incpaths):
+ self.allnames=[]
+ self.re_module=re.compile("module\s+([^;]+)")
+ self.re_import=re.compile("import\s+([^;]+)")
+ self.re_import_bindings=re.compile("([^:]+):(.*)")
+ self.re_import_alias=re.compile("[^=]+=(.+)")
+ self.env=env
+ self.nodes=[]
+ self.names=[]
+ self.incpaths=incpaths
+ def tryfind(self,filename):
+ found=0
+ for n in self.incpaths:
+ found=n.find_resource(filename.replace('.','/')+'.d')
+ if found:
+ self.nodes.append(found)
+ self.waiting.append(found)
+ break
+ if not found:
+ if not filename in self.names:
+ self.names.append(filename)
+ def get_strings(self,code):
+ self.module=''
+ lst=[]
+ mod_name=self.re_module.search(code)
+ if mod_name:
+ self.module=re.sub('\s+','',mod_name.group(1))
+ import_iterator=self.re_import.finditer(code)
+ if import_iterator:
+ for import_match in import_iterator:
+ import_match_str=re.sub('\s+','',import_match.group(1))
+ bindings_match=self.re_import_bindings.match(import_match_str)
+ if bindings_match:
+ import_match_str=bindings_match.group(1)
+ matches=import_match_str.split(',')
+ for match in matches:
+ alias_match=self.re_import_alias.match(match)
+ if alias_match:
+ match=alias_match.group(1)
+ lst.append(match)
+ return lst
+ def start(self,node):
+ self.waiting=[node]
+ while self.waiting:
+ nd=self.waiting.pop(0)
+ self.iter(nd)
+ def iter(self,node):
+ path=node.abspath()
+ code="".join(filter_comments(path))
+ names=self.get_strings(code)
+ for x in names:
+ if x in self.allnames:continue
+ self.allnames.append(x)
+ self.tryfind(x)
+def scan(self):
+ env=self.env
+ gruik=d_parser(env,self.generator.includes_nodes)
+ node=self.inputs[0]
+ gruik.start(node)
+ nodes=gruik.nodes
+ names=gruik.names
+ if Logs.verbose:
+ Logs.debug('deps: deps for %s: %r; unresolved %r'%(str(node),nodes,names))
+ return(nodes,names)
diff --git a/waflib/Tools/dbus.py b/waflib/Tools/dbus.py
new file mode 100644
index 0000000..ccea278
--- /dev/null
+++ b/waflib/Tools/dbus.py
@@ -0,0 +1,29 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+from waflib import Task,Errors
+from waflib.TaskGen import taskgen_method,before_method
+ at taskgen_method
+def add_dbus_file(self,filename,prefix,mode):
+ if not hasattr(self,'dbus_lst'):
+ self.dbus_lst=[]
+ if not'process_dbus'in self.meths:
+ self.meths.append('process_dbus')
+ self.dbus_lst.append([filename,prefix,mode])
+ at before_method('apply_core')
+def process_dbus(self):
+ for filename,prefix,mode in getattr(self,'dbus_lst',[]):
+ node=self.path.find_resource(filename)
+ if not node:
+ raise Errors.WafError('file not found '+filename)
+ tsk=self.create_task('dbus_binding_tool',node,node.change_ext('.h'))
+ tsk.env.DBUS_BINDING_TOOL_PREFIX=prefix
+ tsk.env.DBUS_BINDING_TOOL_MODE=mode
+class dbus_binding_tool(Task.Task):
+ color='BLUE'
+ ext_out=['.h']
+ run_str='${DBUS_BINDING_TOOL} --prefix=${DBUS_BINDING_TOOL_PREFIX} --mode=${DBUS_BINDING_TOOL_MODE} --output=${TGT} ${SRC}'
+ shell=True
+def configure(conf):
+ dbus_binding_tool=conf.find_program('dbus-binding-tool',var='DBUS_BINDING_TOOL')
diff --git a/waflib/Tools/dmd.py b/waflib/Tools/dmd.py
new file mode 100644
index 0000000..b6e3303
--- /dev/null
+++ b/waflib/Tools/dmd.py
@@ -0,0 +1,51 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import sys
+from waflib.Tools import ar,d
+from waflib.Configure import conf
+ at conf
+def find_dmd(conf):
+ conf.find_program(['dmd','dmd2','ldc'],var='D')
+ out=conf.cmd_and_log([conf.env.D,'--help'])
+ if out.find("D Compiler v")==-1:
+ out=conf.cmd_and_log([conf.env.D,'-version'])
+ if out.find("based on DMD v1.")==-1:
+ conf.fatal("detected compiler is not dmd/ldc")
+ at conf
+def common_flags_ldc(conf):
+ v=conf.env
+ v['DFLAGS']=['-d-version=Posix']
+ v['LINKFLAGS']=[]
+ v['DFLAGS_dshlib']=['-relocation-model=pic']
+ at conf
+def common_flags_dmd(conf):
+ v=conf.env
+ v['D_SRC_F']=['-c']
+ v['D_TGT_F']='-of%s'
+ v['D_LINKER']=v['D']
+ v['DLNK_SRC_F']=''
+ v['DLNK_TGT_F']='-of%s'
+ v['DINC_ST']='-I%s'
+ v['DSHLIB_MARKER']=v['DSTLIB_MARKER']=''
+ v['DSTLIB_ST']=v['DSHLIB_ST']='-L-l%s'
+ v['DSTLIBPATH_ST']=v['DLIBPATH_ST']='-L-L%s'
+ v['LINKFLAGS_dprogram']=['-quiet']
+ v['DFLAGS_dshlib']=['-fPIC']
+ v['LINKFLAGS_dshlib']=['-L-shared']
+ v['DHEADER_ext']='.di'
+ v.DFLAGS_d_with_header=['-H','-Hf']
+ v['D_HDR_F']='%s'
+def configure(conf):
+ conf.find_dmd()
+ if sys.platform=='win32':
+ out=conf.cmd_and_log([conf.env.D,'--help'])
+ if out.find("D Compiler v2.")>-1:
+ conf.fatal('dmd2 on Windows is not supported, use gdc or ldc2 instead')
+ conf.load('ar')
+ conf.load('d')
+ conf.common_flags_dmd()
+ conf.d_platform_flags()
+ if str(conf.env.D).find('ldc')>-1:
+ conf.common_flags_ldc()
diff --git a/waflib/Tools/errcheck.py b/waflib/Tools/errcheck.py
new file mode 100644
index 0000000..3b06493
--- /dev/null
+++ b/waflib/Tools/errcheck.py
@@ -0,0 +1,161 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+typos={'feature':'features','sources':'source','targets':'target','include':'includes','export_include':'export_includes','define':'defines','importpath':'includes','installpath':'install_path','iscopy':'is_copy',}
+meths_typos=['__call__','program','shlib','stlib','objects']
+from waflib import Logs,Build,Node,Task,TaskGen,ConfigSet,Errors,Utils
+import waflib.Tools.ccroot
+def check_same_targets(self):
+ mp=Utils.defaultdict(list)
+ uids={}
+ def check_task(tsk):
+ if not isinstance(tsk,Task.Task):
+ return
+ for node in tsk.outputs:
+ mp[node].append(tsk)
+ try:
+ uids[tsk.uid()].append(tsk)
+ except KeyError:
+ uids[tsk.uid()]=[tsk]
+ for g in self.groups:
+ for tg in g:
+ try:
+ for tsk in tg.tasks:
+ check_task(tsk)
+ except AttributeError:
+ check_task(tg)
+ dupe=False
+ for(k,v)in mp.items():
+ if len(v)>1:
+ dupe=True
+ msg='* Node %r is created more than once%s. The task generators are:'%(k,Logs.verbose==1 and" (full message on 'waf -v -v')"or"")
+ Logs.error(msg)
+ for x in v:
+ if Logs.verbose>1:
+ Logs.error(' %d. %r'%(1+v.index(x),x.generator))
+ else:
+ Logs.error(' %d. %r in %r'%(1+v.index(x),x.generator.name,getattr(x.generator,'path',None)))
+ if not dupe:
+ for(k,v)in uids.items():
+ if len(v)>1:
+ Logs.error('* Several tasks use the same identifier. Please check the information on\n http://docs.waf.googlecode.com/git/apidocs_16/Task.html#waflib.Task.Task.uid')
+ for tsk in v:
+ Logs.error(' - object %r (%r) defined in %r'%(tsk.__class__.__name__,tsk,tsk.generator))
+def check_invalid_constraints(self):
+ feat=set([])
+ for x in list(TaskGen.feats.values()):
+ feat.union(set(x))
+ for(x,y)in TaskGen.task_gen.prec.items():
+ feat.add(x)
+ feat.union(set(y))
+ ext=set([])
+ for x in TaskGen.task_gen.mappings.values():
+ ext.add(x.__name__)
+ invalid=ext&feat
+ if invalid:
+ Logs.error('The methods %r have invalid annotations: @extension <-> @feature/@before_method/@after_method'%list(invalid))
+ for cls in list(Task.classes.values()):
+ for x in('before','after'):
+ for y in Utils.to_list(getattr(cls,x,[])):
+ if not Task.classes.get(y,None):
+ Logs.error('Erroneous order constraint %r=%r on task class %r'%(x,y,cls.__name__))
+ if getattr(cls,'rule',None):
+ Logs.error('Erroneous attribute "rule" on task class %r (rename to "run_str")'%cls.__name__)
+def replace(m):
+ oldcall=getattr(Build.BuildContext,m)
+ def call(self,*k,**kw):
+ ret=oldcall(self,*k,**kw)
+ for x in typos:
+ if x in kw:
+ if x=='iscopy'and'subst'in getattr(self,'features',''):
+ continue
+ err=True
+ Logs.error('Fix the typo %r -> %r on %r'%(x,typos[x],ret))
+ return ret
+ setattr(Build.BuildContext,m,call)
+def enhance_lib():
+ for m in meths_typos:
+ replace(m)
+ def ant_glob(self,*k,**kw):
+ if k:
+ lst=Utils.to_list(k[0])
+ for pat in lst:
+ if'..'in pat.split('/'):
+ Logs.error("In ant_glob pattern %r: '..' means 'two dots', not 'parent directory'"%k[0])
+ if kw.get('remove',True):
+ try:
+ if self.is_child_of(self.ctx.bldnode)and not kw.get('quiet',False):
+ Logs.error('Using ant_glob on the build folder (%r) is dangerous (quiet=True to disable this warning)'%self)
+ except AttributeError:
+ pass
+ return self.old_ant_glob(*k,**kw)
+ Node.Node.old_ant_glob=Node.Node.ant_glob
+ Node.Node.ant_glob=ant_glob
+ old=Task.is_before
+ def is_before(t1,t2):
+ ret=old(t1,t2)
+ if ret and old(t2,t1):
+ Logs.error('Contradictory order constraints in classes %r %r'%(t1,t2))
+ return ret
+ Task.is_before=is_before
+ def check_err_features(self):
+ lst=self.to_list(self.features)
+ if'shlib'in lst:
+ Logs.error('feature shlib -> cshlib, dshlib or cxxshlib')
+ for x in('c','cxx','d','fc'):
+ if not x in lst and lst and lst[0]in[x+y for y in('program','shlib','stlib')]:
+ Logs.error('%r features is probably missing %r'%(self,x))
+ TaskGen.feature('*')(check_err_features)
+ def check_err_order(self):
+ if not hasattr(self,'rule')and not'subst'in Utils.to_list(self.features):
+ for x in('before','after','ext_in','ext_out'):
+ if hasattr(self,x):
+ Logs.warn('Erroneous order constraint %r on non-rule based task generator %r'%(x,self))
+ else:
+ for x in('before','after'):
+ for y in self.to_list(getattr(self,x,[])):
+ if not Task.classes.get(y,None):
+ Logs.error('Erroneous order constraint %s=%r on %r (no such class)'%(x,y,self))
+ TaskGen.feature('*')(check_err_order)
+ def check_compile(self):
+ check_invalid_constraints(self)
+ try:
+ ret=self.orig_compile()
+ finally:
+ check_same_targets(self)
+ return ret
+ Build.BuildContext.orig_compile=Build.BuildContext.compile
+ Build.BuildContext.compile=check_compile
+ def use_rec(self,name,**kw):
+ try:
+ y=self.bld.get_tgen_by_name(name)
+ except Errors.WafError:
+ pass
+ else:
+ idx=self.bld.get_group_idx(self)
+ odx=self.bld.get_group_idx(y)
+ if odx>idx:
+ msg="Invalid 'use' across build groups:"
+ if Logs.verbose>1:
+ msg+='\n target %r\n uses:\n %r'%(self,y)
+ else:
+ msg+=" %r uses %r (try 'waf -v -v' for the full error)"%(self.name,name)
+ raise Errors.WafError(msg)
+ self.orig_use_rec(name,**kw)
+ TaskGen.task_gen.orig_use_rec=TaskGen.task_gen.use_rec
+ TaskGen.task_gen.use_rec=use_rec
+ def getattri(self,name,default=None):
+ if name=='append'or name=='add':
+ raise Errors.WafError('env.append and env.add do not exist: use env.append_value/env.append_unique')
+ elif name=='prepend':
+ raise Errors.WafError('env.prepend does not exist: use env.prepend_value')
+ if name in self.__slots__:
+ return object.__getattr__(self,name,default)
+ else:
+ return self[name]
+ ConfigSet.ConfigSet.__getattr__=getattri
+def options(opt):
+ enhance_lib()
+def configure(conf):
+ pass
diff --git a/waflib/Tools/fc.py b/waflib/Tools/fc.py
new file mode 100644
index 0000000..3589799
--- /dev/null
+++ b/waflib/Tools/fc.py
@@ -0,0 +1,116 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import re
+from waflib import Utils,Task,TaskGen,Logs
+from waflib.Tools import ccroot,fc_config,fc_scan
+from waflib.TaskGen import feature,before_method,after_method,extension
+from waflib.Configure import conf
+ccroot.USELIB_VARS['fc']=set(['FCFLAGS','DEFINES','INCLUDES'])
+ccroot.USELIB_VARS['fcprogram_test']=ccroot.USELIB_VARS['fcprogram']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS'])
+ccroot.USELIB_VARS['fcshlib']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS'])
+ccroot.USELIB_VARS['fcstlib']=set(['ARFLAGS','LINKDEPS'])
+ at feature('fcprogram','fcshlib','fcstlib','fcprogram_test')
+def dummy(self):
+ pass
+ at extension('.f','.f90','.F','.F90','.for','.FOR')
+def fc_hook(self,node):
+ return self.create_compiled_task('fc',node)
+ at conf
+def modfile(conf,name):
+ return{'lower':name.lower()+'.mod','lower.MOD':name.upper()+'.MOD','UPPER.mod':name.upper()+'.mod','UPPER':name.upper()+'.MOD'}[conf.env.FC_MOD_CAPITALIZATION or'lower']
+def get_fortran_tasks(tsk):
+ bld=tsk.generator.bld
+ tasks=bld.get_tasks_group(bld.get_group_idx(tsk.generator))
+ return[x for x in tasks if isinstance(x,fc)and not getattr(x,'nomod',None)and not getattr(x,'mod_fortran_done',None)]
+class fc(Task.Task):
+ color='GREEN'
+ run_str='${FC} ${FCFLAGS} ${FCINCPATH_ST:INCPATHS} ${FCDEFINES_ST:DEFINES} ${_FCMODOUTFLAGS} ${FC_TGT_F}${TGT[0].abspath()} ${FC_SRC_F}${SRC[0].abspath()}'
+ vars=["FORTRANMODPATHFLAG"]
+ def scan(self):
+ tmp=fc_scan.fortran_parser(self.generator.includes_nodes)
+ tmp.task=self
+ tmp.start(self.inputs[0])
+ if Logs.verbose:
+ Logs.debug('deps: deps for %r: %r; unresolved %r'%(self.inputs,tmp.nodes,tmp.names))
+ return(tmp.nodes,tmp.names)
+ def runnable_status(self):
+ if getattr(self,'mod_fortran_done',None):
+ return super(fc,self).runnable_status()
+ bld=self.generator.bld
+ lst=get_fortran_tasks(self)
+ for tsk in lst:
+ tsk.mod_fortran_done=True
+ for tsk in lst:
+ ret=tsk.runnable_status()
+ if ret==Task.ASK_LATER:
+ for x in lst:
+ x.mod_fortran_done=None
+ return Task.ASK_LATER
+ ins=Utils.defaultdict(set)
+ outs=Utils.defaultdict(set)
+ for tsk in lst:
+ key=tsk.uid()
+ for x in bld.raw_deps[key]:
+ if x.startswith('MOD@'):
+ name=bld.modfile(x.replace('MOD@',''))
+ node=bld.srcnode.find_or_declare(name)
+ tsk.set_outputs(node)
+ outs[id(node)].add(tsk)
+ for tsk in lst:
+ key=tsk.uid()
+ for x in bld.raw_deps[key]:
+ if x.startswith('USE@'):
+ name=bld.modfile(x.replace('USE@',''))
+ node=bld.srcnode.find_resource(name)
+ if node and node not in tsk.outputs:
+ if not node in bld.node_deps[key]:
+ bld.node_deps[key].append(node)
+ ins[id(node)].add(tsk)
+ for k in ins.keys():
+ for a in ins[k]:
+ a.run_after.update(outs[k])
+ tmp=[]
+ for t in outs[k]:
+ tmp.extend(t.outputs)
+ a.dep_nodes.extend(tmp)
+ a.dep_nodes.sort(key=lambda x:x.abspath())
+ for tsk in lst:
+ try:
+ delattr(tsk,'cache_sig')
+ except AttributeError:
+ pass
+ return super(fc,self).runnable_status()
+class fcprogram(ccroot.link_task):
+ color='YELLOW'
+ run_str='${FC} ${LINKFLAGS} ${FCLNK_SRC_F}${SRC} ${FCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FCSTLIB_MARKER} ${FCSTLIBPATH_ST:STLIBPATH} ${FCSTLIB_ST:STLIB} ${FCSHLIB_MARKER} ${FCLIBPATH_ST:LIBPATH} ${FCLIB_ST:LIB}'
+ inst_to='${BINDIR}'
+class fcshlib(fcprogram):
+ inst_to='${LIBDIR}'
+class fcprogram_test(fcprogram):
+ def can_retrieve_cache(self):
+ return False
+ def runnable_status(self):
+ ret=super(fcprogram_test,self).runnable_status()
+ if ret==Task.SKIP_ME:
+ ret=Task.RUN_ME
+ return ret
+ def exec_command(self,cmd,**kw):
+ bld=self.generator.bld
+ kw['shell']=isinstance(cmd,str)
+ kw['stdout']=kw['stderr']=Utils.subprocess.PIPE
+ kw['cwd']=bld.variant_dir
+ bld.out=bld.err=''
+ bld.to_log('command: %s\n'%cmd)
+ kw['output']=0
+ try:
+ (bld.out,bld.err)=bld.cmd_and_log(cmd,**kw)
+ except Exception ,e:
+ return-1
+ if bld.out:
+ bld.to_log("out: %s\n"%bld.out)
+ if bld.err:
+ bld.to_log("err: %s\n"%bld.err)
+class fcstlib(ccroot.stlink_task):
+ pass
diff --git a/waflib/Tools/fc_config.py b/waflib/Tools/fc_config.py
new file mode 100644
index 0000000..0130a57
--- /dev/null
+++ b/waflib/Tools/fc_config.py
@@ -0,0 +1,285 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import re,shutil,os,sys,string,shlex
+from waflib.Configure import conf
+from waflib.TaskGen import feature,after_method,before_method
+from waflib import Build,Utils
+FC_FRAGMENT=' program main\n end program main\n'
+FC_FRAGMENT2=' PROGRAM MAIN\n END\n'
+ at conf
+def fc_flags(conf):
+ v=conf.env
+ v['FC_SRC_F']=[]
+ v['FC_TGT_F']=['-c','-o']
+ v['FCINCPATH_ST']='-I%s'
+ v['FCDEFINES_ST']='-D%s'
+ if not v['LINK_FC']:v['LINK_FC']=v['FC']
+ v['FCLNK_SRC_F']=[]
+ v['FCLNK_TGT_F']=['-o']
+ v['FCFLAGS_fcshlib']=['-fpic']
+ v['LINKFLAGS_fcshlib']=['-shared']
+ v['fcshlib_PATTERN']='lib%s.so'
+ v['fcstlib_PATTERN']='lib%s.a'
+ v['FCLIB_ST']='-l%s'
+ v['FCLIBPATH_ST']='-L%s'
+ v['FCSTLIB_ST']='-l%s'
+ v['FCSTLIBPATH_ST']='-L%s'
+ v['FCSTLIB_MARKER']='-Wl,-Bstatic'
+ v['FCSHLIB_MARKER']='-Wl,-Bdynamic'
+ v['SONAME_ST']='-Wl,-h,%s'
+ at conf
+def fc_add_flags(conf):
+ conf.add_os_flags('FCFLAGS')
+ conf.add_os_flags('LDFLAGS','LINKFLAGS')
+ at conf
+def check_fortran(self,*k,**kw):
+ self.check_cc(fragment=FC_FRAGMENT,compile_filename='test.f',features='fc fcprogram',msg='Compiling a simple fortran app')
+ at conf
+def check_fc(self,*k,**kw):
+ kw['compiler']='fc'
+ if not'compile_mode'in kw:
+ kw['compile_mode']='fc'
+ if not'type'in kw:
+ kw['type']='fcprogram'
+ if not'compile_filename'in kw:
+ kw['compile_filename']='test.f90'
+ if not'code'in kw:
+ kw['code']=FC_FRAGMENT
+ return self.check(*k,**kw)
+ at conf
+def fortran_modifier_darwin(conf):
+ v=conf.env
+ v['FCFLAGS_fcshlib']=['-fPIC','-compatibility_version','1','-current_version','1']
+ v['LINKFLAGS_fcshlib']=['-dynamiclib']
+ v['fcshlib_PATTERN']='lib%s.dylib'
+ v['FRAMEWORKPATH_ST']='-F%s'
+ v['FRAMEWORK_ST']='-framework %s'
+ v['LINKFLAGS_fcstlib']=[]
+ v['FCSHLIB_MARKER']=''
+ v['FCSTLIB_MARKER']=''
+ v['SONAME_ST']=''
+ at conf
+def fortran_modifier_win32(conf):
+ v=conf.env
+ v['fcprogram_PATTERN']=v['fcprogram_test_PATTERN']='%s.exe'
+ v['fcshlib_PATTERN']='%s.dll'
+ v['implib_PATTERN']='lib%s.dll.a'
+ v['IMPLIB_ST']='-Wl,--out-implib,%s'
+ v['FCFLAGS_fcshlib']=[]
+ v.append_value('FCFLAGS_fcshlib',['-DDLL_EXPORT'])
+ v.append_value('LINKFLAGS',['-Wl,--enable-auto-import'])
+ at conf
+def fortran_modifier_cygwin(conf):
+ fortran_modifier_win32(conf)
+ v=conf.env
+ v['fcshlib_PATTERN']='cyg%s.dll'
+ v.append_value('LINKFLAGS_fcshlib',['-Wl,--enable-auto-image-base'])
+ v['FCFLAGS_fcshlib']=[]
+ at conf
+def check_fortran_dummy_main(self,*k,**kw):
+ if not self.env.CC:
+ self.fatal('A c compiler is required for check_fortran_dummy_main')
+ lst=['MAIN__','__MAIN','_MAIN','MAIN_','MAIN']
+ lst.extend([m.lower()for m in lst])
+ lst.append('')
+ self.start_msg('Detecting whether we need a dummy main')
+ for main in lst:
+ kw['fortran_main']=main
+ try:
+ self.check_cc(fragment='int %s() { return 0; }\n'%(main or'test'),features='c fcprogram',mandatory=True)
+ if not main:
+ self.env.FC_MAIN=-1
+ self.end_msg('no')
+ else:
+ self.env.FC_MAIN=main
+ self.end_msg('yes %s'%main)
+ break
+ except self.errors.ConfigurationError:
+ pass
+ else:
+ self.end_msg('not found')
+ self.fatal('could not detect whether fortran requires a dummy main, see the config.log')
+GCC_DRIVER_LINE=re.compile('^Driving:')
+POSIX_STATIC_EXT=re.compile('\S+\.a')
+POSIX_LIB_FLAGS=re.compile('-l\S+')
+ at conf
+def is_link_verbose(self,txt):
+ assert isinstance(txt,str)
+ for line in txt.splitlines():
+ if not GCC_DRIVER_LINE.search(line):
+ if POSIX_STATIC_EXT.search(line)or POSIX_LIB_FLAGS.search(line):
+ return True
+ return False
+ at conf
+def check_fortran_verbose_flag(self,*k,**kw):
+ self.start_msg('fortran link verbose flag')
+ for x in['-v','--verbose','-verbose','-V']:
+ try:
+ self.check_cc(features='fc fcprogram_test',fragment=FC_FRAGMENT2,compile_filename='test.f',linkflags=[x],mandatory=True)
+ except self.errors.ConfigurationError:
+ pass
+ else:
+ if self.is_link_verbose(self.test_bld.err)or self.is_link_verbose(self.test_bld.out):
+ self.end_msg(x)
+ break
+ else:
+ self.end_msg('failure')
+ self.fatal('Could not obtain the fortran link verbose flag (see config.log)')
+ self.env.FC_VERBOSE_FLAG=x
+ return x
+LINKFLAGS_IGNORED=[r'-lang*',r'-lcrt[a-zA-Z0-9\.]*\.o',r'-lc$',r'-lSystem',r'-libmil',r'-LIST:*',r'-LNO:*']
+if os.name=='nt':
+ LINKFLAGS_IGNORED.extend([r'-lfrt*',r'-luser32',r'-lkernel32',r'-ladvapi32',r'-lmsvcrt',r'-lshell32',r'-lmingw',r'-lmoldname'])
+else:
+ LINKFLAGS_IGNORED.append(r'-lgcc*')
+RLINKFLAGS_IGNORED=[re.compile(f)for f in LINKFLAGS_IGNORED]
+def _match_ignore(line):
+ for i in RLINKFLAGS_IGNORED:
+ if i.match(line):
+ return True
+ return False
+def parse_fortran_link(lines):
+ final_flags=[]
+ for line in lines:
+ if not GCC_DRIVER_LINE.match(line):
+ _parse_flink_line(line,final_flags)
+ return final_flags
+SPACE_OPTS=re.compile('^-[LRuYz]$')
+NOSPACE_OPTS=re.compile('^-[RL]')
+def _parse_flink_line(line,final_flags):
+ lexer=shlex.shlex(line,posix=True)
+ lexer.whitespace_split=True
+ t=lexer.get_token()
+ tmp_flags=[]
+ while t:
+ def parse(token):
+ if _match_ignore(token):
+ pass
+ elif token.startswith('-lkernel32')and sys.platform=='cygwin':
+ tmp_flags.append(token)
+ elif SPACE_OPTS.match(token):
+ t=lexer.get_token()
+ if t.startswith('P,'):
+ t=t[2:]
+ for opt in t.split(os.pathsep):
+ tmp_flags.append('-L%s'%opt)
+ elif NOSPACE_OPTS.match(token):
+ tmp_flags.append(token)
+ elif POSIX_LIB_FLAGS.match(token):
+ tmp_flags.append(token)
+ else:
+ pass
+ t=lexer.get_token()
+ return t
+ t=parse(t)
+ final_flags.extend(tmp_flags)
+ return final_flags
+ at conf
+def check_fortran_clib(self,autoadd=True,*k,**kw):
+ if not self.env.FC_VERBOSE_FLAG:
+ self.fatal('env.FC_VERBOSE_FLAG is not set: execute check_fortran_verbose_flag?')
+ self.start_msg('Getting fortran runtime link flags')
+ try:
+ self.check_cc(fragment=FC_FRAGMENT2,compile_filename='test.f',features='fc fcprogram_test',linkflags=[self.env.FC_VERBOSE_FLAG])
+ except Exception:
+ self.end_msg(False)
+ if kw.get('mandatory',True):
+ conf.fatal('Could not find the c library flags')
+ else:
+ out=self.test_bld.err
+ flags=parse_fortran_link(out.splitlines())
+ self.end_msg('ok (%s)'%' '.join(flags))
+ self.env.LINKFLAGS_CLIB=flags
+ return flags
+ return[]
+def getoutput(conf,cmd,stdin=False):
+ if stdin:
+ stdin=Utils.subprocess.PIPE
+ else:
+ stdin=None
+ env=conf.env.env or None
+ try:
+ p=Utils.subprocess.Popen(cmd,stdin=stdin,stdout=Utils.subprocess.PIPE,stderr=Utils.subprocess.PIPE,env=env)
+ if stdin:
+ p.stdin.write('\n')
+ out,err=p.communicate()
+ except Exception:
+ conf.fatal('could not determine the compiler version %r'%cmd)
+ if not isinstance(out,str):
+ out=out.decode(sys.stdout.encoding or'iso8859-1')
+ if not isinstance(err,str):
+ err=err.decode(sys.stdout.encoding or'iso8859-1')
+ return(out,err)
+ROUTINES_CODE="""\
+ subroutine foobar()
+ return
+ end
+ subroutine foo_bar()
+ return
+ end
+"""
+MAIN_CODE="""
+void %(dummy_func_nounder)s(void);
+void %(dummy_func_under)s(void);
+int %(main_func_name)s() {
+ %(dummy_func_nounder)s();
+ %(dummy_func_under)s();
+ return 0;
+}
+"""
+ at feature('link_main_routines_func')
+ at before_method('process_source')
+def link_main_routines_tg_method(self):
+ def write_test_file(task):
+ task.outputs[0].write(task.generator.code)
+ bld=self.bld
+ bld(rule=write_test_file,target='main.c',code=MAIN_CODE%self.__dict__)
+ bld(rule=write_test_file,target='test.f',code=ROUTINES_CODE)
+ bld(features='fc fcstlib',source='test.f',target='test')
+ bld(features='c fcprogram',source='main.c',target='app',use='test')
+def mangling_schemes():
+ for u in['_','']:
+ for du in['','_']:
+ for c in["lower","upper"]:
+ yield(u,du,c)
+def mangle_name(u,du,c,name):
+ return getattr(name,c)()+u+(name.find('_')!=-1 and du or'')
+ at conf
+def check_fortran_mangling(self,*k,**kw):
+ if not self.env.CC:
+ self.fatal('A c compiler is required for link_main_routines')
+ if not self.env.FC:
+ self.fatal('A fortran compiler is required for link_main_routines')
+ if not self.env.FC_MAIN:
+ self.fatal('Checking for mangling requires self.env.FC_MAIN (execute "check_fortran_dummy_main" first?)')
+ self.start_msg('Getting fortran mangling scheme')
+ for(u,du,c)in mangling_schemes():
+ try:
+ self.check_cc(compile_filename=[],features='link_main_routines_func',msg='nomsg',errmsg='nomsg',mandatory=True,dummy_func_nounder=mangle_name(u,du,c,"foobar"),dummy_func_under=mangle_name(u,du,c,"foo_bar"),main_func_name=self.env.FC_MAIN)
+ except self.errors.ConfigurationError:
+ pass
+ else:
+ self.end_msg("ok ('%s', '%s', '%s-case')"%(u,du,c))
+ self.env.FORTRAN_MANGLING=(u,du,c)
+ break
+ else:
+ self.end_msg(False)
+ self.fatal('mangler not found')
+ return(u,du,c)
+ at feature('pyext')
+ at before_method('propagate_uselib_vars','apply_link')
+def set_lib_pat(self):
+ self.env['fcshlib_PATTERN']=self.env['pyext_PATTERN']
+ at conf
+def detect_openmp(self):
+ for x in['-fopenmp','-openmp','-mp','-xopenmp','-omp','-qsmp=omp']:
+ try:
+ self.check_fc(msg='Checking for OpenMP flag %s'%x,fragment='program main\n call omp_get_num_threads()\nend program main',fcflags=x,linkflags=x,uselib_store='OPENMP')
+ except self.errors.ConfigurationError:
+ pass
+ else:
+ break
+ else:
+ self.fatal('Could not find OpenMP')
diff --git a/waflib/Tools/fc_scan.py b/waflib/Tools/fc_scan.py
new file mode 100644
index 0000000..48e06b5
--- /dev/null
+++ b/waflib/Tools/fc_scan.py
@@ -0,0 +1,68 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import re
+from waflib import Utils,Task,TaskGen,Logs
+from waflib.TaskGen import feature,before_method,after_method,extension
+from waflib.Configure import conf
+INC_REGEX="""(?:^|['">]\s*;)\s*INCLUDE\s+(?:\w+_)?[<"'](.+?)(?=["'>])"""
+USE_REGEX="""(?:^|;)\s*USE(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)"""
+MOD_REGEX="""(?:^|;)\s*MODULE(?!\s*PROCEDURE)(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)"""
+re_inc=re.compile(INC_REGEX,re.I)
+re_use=re.compile(USE_REGEX,re.I)
+re_mod=re.compile(MOD_REGEX,re.I)
+class fortran_parser(object):
+ def __init__(self,incpaths):
+ self.seen=[]
+ self.nodes=[]
+ self.names=[]
+ self.incpaths=incpaths
+ def find_deps(self,node):
+ txt=node.read()
+ incs=[]
+ uses=[]
+ mods=[]
+ for line in txt.splitlines():
+ m=re_inc.search(line)
+ if m:
+ incs.append(m.group(1))
+ m=re_use.search(line)
+ if m:
+ uses.append(m.group(1))
+ m=re_mod.search(line)
+ if m:
+ mods.append(m.group(1))
+ return(incs,uses,mods)
+ def start(self,node):
+ self.waiting=[node]
+ while self.waiting:
+ nd=self.waiting.pop(0)
+ self.iter(nd)
+ def iter(self,node):
+ path=node.abspath()
+ incs,uses,mods=self.find_deps(node)
+ for x in incs:
+ if x in self.seen:
+ continue
+ self.seen.append(x)
+ self.tryfind_header(x)
+ for x in uses:
+ name="USE@%s"%x
+ if not name in self.names:
+ self.names.append(name)
+ for x in mods:
+ name="MOD@%s"%x
+ if not name in self.names:
+ self.names.append(name)
+ def tryfind_header(self,filename):
+ found=None
+ for n in self.incpaths:
+ found=n.find_resource(filename)
+ if found:
+ self.nodes.append(found)
+ self.waiting.append(found)
+ break
+ if not found:
+ if not filename in self.names:
+ self.names.append(filename)
diff --git a/waflib/Tools/flex.py b/waflib/Tools/flex.py
new file mode 100644
index 0000000..13f6207
--- /dev/null
+++ b/waflib/Tools/flex.py
@@ -0,0 +1,32 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import waflib.TaskGen,os,re
+def decide_ext(self,node):
+ if'cxx'in self.features:
+ return['.lex.cc']
+ return['.lex.c']
+def flexfun(tsk):
+ env=tsk.env
+ bld=tsk.generator.bld
+ wd=bld.variant_dir
+ def to_list(xx):
+ if isinstance(xx,str):return[xx]
+ return xx
+ tsk.last_cmd=lst=[]
+ lst.extend(to_list(env['FLEX']))
+ lst.extend(to_list(env['FLEXFLAGS']))
+ inputs=[a.path_from(bld.bldnode)for a in tsk.inputs]
+ if env.FLEX_MSYS:
+ inputs=[x.replace(os.sep,'/')for x in inputs]
+ lst.extend(inputs)
+ lst=[x for x in lst if x]
+ txt=bld.cmd_and_log(lst,cwd=wd,env=env.env or None,quiet=0)
+ tsk.outputs[0].write(txt.replace('\r\n','\n').replace('\r','\n'))
+waflib.TaskGen.declare_chain(name='flex',rule=flexfun,ext_in='.l',decider=decide_ext,)
+def configure(conf):
+ conf.find_program('flex',var='FLEX')
+ conf.env.FLEXFLAGS=['-t']
+ if re.search(r"\\msys\\[0-9.]+\\bin\\flex.exe$",conf.env.FLEX):
+ conf.env.FLEX_MSYS=True
diff --git a/waflib/Tools/g95.py b/waflib/Tools/g95.py
new file mode 100644
index 0000000..9bc331a
--- /dev/null
+++ b/waflib/Tools/g95.py
@@ -0,0 +1,55 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import re
+from waflib import Utils
+from waflib.Tools import fc,fc_config,fc_scan,ar
+from waflib.Configure import conf
+ at conf
+def find_g95(conf):
+ fc=conf.find_program('g95',var='FC')
+ fc=conf.cmd_to_list(fc)
+ conf.get_g95_version(fc)
+ conf.env.FC_NAME='G95'
+ at conf
+def g95_flags(conf):
+ v=conf.env
+ v['FCFLAGS_fcshlib']=['-fPIC']
+ v['FORTRANMODFLAG']=['-fmod=','']
+ v['FCFLAGS_DEBUG']=['-Werror']
+ at conf
+def g95_modifier_win32(conf):
+ fc_config.fortran_modifier_win32(conf)
+ at conf
+def g95_modifier_cygwin(conf):
+ fc_config.fortran_modifier_cygwin(conf)
+ at conf
+def g95_modifier_darwin(conf):
+ fc_config.fortran_modifier_darwin(conf)
+ at conf
+def g95_modifier_platform(conf):
+ dest_os=conf.env['DEST_OS']or Utils.unversioned_sys_platform()
+ g95_modifier_func=getattr(conf,'g95_modifier_'+dest_os,None)
+ if g95_modifier_func:
+ g95_modifier_func()
+ at conf
+def get_g95_version(conf,fc):
+ version_re=re.compile(r"g95\s*(?P<major>\d*)\.(?P<minor>\d*)").search
+ cmd=fc+['--version']
+ out,err=fc_config.getoutput(conf,cmd,stdin=False)
+ if out:
+ match=version_re(out)
+ else:
+ match=version_re(err)
+ if not match:
+ conf.fatal('cannot determine g95 version')
+ k=match.groupdict()
+ conf.env['FC_VERSION']=(k['major'],k['minor'])
+def configure(conf):
+ conf.find_g95()
+ conf.find_ar()
+ conf.fc_flags()
+ conf.fc_add_flags()
+ conf.g95_flags()
+ conf.g95_modifier_platform()
diff --git a/waflib/Tools/gas.py b/waflib/Tools/gas.py
new file mode 100644
index 0000000..b714ca1
--- /dev/null
+++ b/waflib/Tools/gas.py
@@ -0,0 +1,12 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import waflib.Tools.asm
+from waflib.Tools import ar
+def configure(conf):
+ conf.find_program(['gas','gcc'],var='AS')
+ conf.env.AS_TGT_F=['-c','-o']
+ conf.env.ASLNK_TGT_F=['-o']
+ conf.find_ar()
+ conf.load('asm')
diff --git a/waflib/Tools/gcc.py b/waflib/Tools/gcc.py
new file mode 100644
index 0000000..adf11da
--- /dev/null
+++ b/waflib/Tools/gcc.py
@@ -0,0 +1,97 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import os,sys
+from waflib import Configure,Options,Utils
+from waflib.Tools import ccroot,ar
+from waflib.Configure import conf
+ at conf
+def find_gcc(conf):
+ cc=conf.find_program(['gcc','cc'],var='CC')
+ cc=conf.cmd_to_list(cc)
+ conf.get_cc_version(cc,gcc=True)
+ conf.env.CC_NAME='gcc'
+ conf.env.CC=cc
+ at conf
+def gcc_common_flags(conf):
+ v=conf.env
+ v['CC_SRC_F']=[]
+ v['CC_TGT_F']=['-c','-o']
+ if not v['LINK_CC']:v['LINK_CC']=v['CC']
+ v['CCLNK_SRC_F']=[]
+ v['CCLNK_TGT_F']=['-o']
+ v['CPPPATH_ST']='-I%s'
+ v['DEFINES_ST']='-D%s'
+ v['LIB_ST']='-l%s'
+ v['LIBPATH_ST']='-L%s'
+ v['STLIB_ST']='-l%s'
+ v['STLIBPATH_ST']='-L%s'
+ v['RPATH_ST']='-Wl,-rpath,%s'
+ v['SONAME_ST']='-Wl,-h,%s'
+ v['SHLIB_MARKER']='-Wl,-Bdynamic'
+ v['STLIB_MARKER']='-Wl,-Bstatic'
+ v['cprogram_PATTERN']='%s'
+ v['CFLAGS_cshlib']=['-fPIC']
+ v['LINKFLAGS_cshlib']=['-shared']
+ v['cshlib_PATTERN']='lib%s.so'
+ v['LINKFLAGS_cstlib']=['-Wl,-Bstatic']
+ v['cstlib_PATTERN']='lib%s.a'
+ v['LINKFLAGS_MACBUNDLE']=['-bundle','-undefined','dynamic_lookup']
+ v['CFLAGS_MACBUNDLE']=['-fPIC']
+ v['macbundle_PATTERN']='%s.bundle'
+ at conf
+def gcc_modifier_win32(conf):
+ v=conf.env
+ v['cprogram_PATTERN']='%s.exe'
+ v['cshlib_PATTERN']='%s.dll'
+ v['implib_PATTERN']='lib%s.dll.a'
+ v['IMPLIB_ST']='-Wl,--out-implib,%s'
+ v['CFLAGS_cshlib']=[]
+ v.append_value('LINKFLAGS',['-Wl,--enable-auto-import'])
+ at conf
+def gcc_modifier_cygwin(conf):
+ gcc_modifier_win32(conf)
+ v=conf.env
+ v['cshlib_PATTERN']='cyg%s.dll'
+ v.append_value('LINKFLAGS_cshlib',['-Wl,--enable-auto-image-base'])
+ v['CFLAGS_cshlib']=[]
+ at conf
+def gcc_modifier_darwin(conf):
+ v=conf.env
+ v['CFLAGS_cshlib']=['-fPIC','-compatibility_version','1','-current_version','1']
+ v['LINKFLAGS_cshlib']=['-dynamiclib']
+ v['cshlib_PATTERN']='lib%s.dylib'
+ v['FRAMEWORKPATH_ST']='-F%s'
+ v['FRAMEWORK_ST']=['-framework']
+ v['ARCH_ST']=['-arch']
+ v['LINKFLAGS_cstlib']=[]
+ v['SHLIB_MARKER']=[]
+ v['STLIB_MARKER']=[]
+ v['SONAME_ST']=[]
+ at conf
+def gcc_modifier_aix(conf):
+ v=conf.env
+ v['LINKFLAGS_cprogram']=['-Wl,-brtl']
+ v['LINKFLAGS_cshlib']=['-shared','-Wl,-brtl,-bexpfull']
+ v['SHLIB_MARKER']=[]
+ at conf
+def gcc_modifier_hpux(conf):
+ v=conf.env
+ v['SHLIB_MARKER']=[]
+ v['STLIB_MARKER']='-Bstatic'
+ v['CFLAGS_cshlib']=['-fPIC','-DPIC']
+ v['cshlib_PATTERN']='lib%s.sl'
+ at conf
+def gcc_modifier_platform(conf):
+ gcc_modifier_func=getattr(conf,'gcc_modifier_'+conf.env.DEST_OS,None)
+ if gcc_modifier_func:
+ gcc_modifier_func()
+def configure(conf):
+ conf.find_gcc()
+ conf.find_ar()
+ conf.gcc_common_flags()
+ conf.gcc_modifier_platform()
+ conf.cc_load_tools()
+ conf.cc_add_flags()
+ conf.link_add_flags()
diff --git a/waflib/Tools/gdc.py b/waflib/Tools/gdc.py
new file mode 100644
index 0000000..da966ec
--- /dev/null
+++ b/waflib/Tools/gdc.py
@@ -0,0 +1,36 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import sys
+from waflib.Tools import ar,d
+from waflib.Configure import conf
+ at conf
+def find_gdc(conf):
+ conf.find_program('gdc',var='D')
+ out=conf.cmd_and_log([conf.env.D,'--version'])
+ if out.find("gdc ")==-1:
+ conf.fatal("detected compiler is not gdc")
+ at conf
+def common_flags_gdc(conf):
+ v=conf.env
+ v['DFLAGS']=[]
+ v['D_SRC_F']=['-c']
+ v['D_TGT_F']='-o%s'
+ v['D_LINKER']=v['D']
+ v['DLNK_SRC_F']=''
+ v['DLNK_TGT_F']='-o%s'
+ v['DINC_ST']='-I%s'
+ v['DSHLIB_MARKER']=v['DSTLIB_MARKER']=''
+ v['DSTLIB_ST']=v['DSHLIB_ST']='-l%s'
+ v['DSTLIBPATH_ST']=v['DLIBPATH_ST']='-L%s'
+ v['LINKFLAGS_dshlib']=['-shared']
+ v['DHEADER_ext']='.di'
+ v.DFLAGS_d_with_header='-fintfc'
+ v['D_HDR_F']='-fintfc-file=%s'
+def configure(conf):
+ conf.find_gdc()
+ conf.load('ar')
+ conf.load('d')
+ conf.common_flags_gdc()
+ conf.d_platform_flags()
diff --git a/waflib/Tools/gfortran.py b/waflib/Tools/gfortran.py
new file mode 100644
index 0000000..854a93d
--- /dev/null
+++ b/waflib/Tools/gfortran.py
@@ -0,0 +1,69 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import re
+from waflib import Utils
+from waflib.Tools import fc,fc_config,fc_scan,ar
+from waflib.Configure import conf
+ at conf
+def find_gfortran(conf):
+ fc=conf.find_program(['gfortran','g77'],var='FC')
+ fc=conf.cmd_to_list(fc)
+ conf.get_gfortran_version(fc)
+ conf.env.FC_NAME='GFORTRAN'
+ at conf
+def gfortran_flags(conf):
+ v=conf.env
+ v['FCFLAGS_fcshlib']=['-fPIC']
+ v['FORTRANMODFLAG']=['-J','']
+ v['FCFLAGS_DEBUG']=['-Werror']
+ at conf
+def gfortran_modifier_win32(conf):
+ fc_config.fortran_modifier_win32(conf)
+ at conf
+def gfortran_modifier_cygwin(conf):
+ fc_config.fortran_modifier_cygwin(conf)
+ at conf
+def gfortran_modifier_darwin(conf):
+ fc_config.fortran_modifier_darwin(conf)
+ at conf
+def gfortran_modifier_platform(conf):
+ dest_os=conf.env['DEST_OS']or Utils.unversioned_sys_platform()
+ gfortran_modifier_func=getattr(conf,'gfortran_modifier_'+dest_os,None)
+ if gfortran_modifier_func:
+ gfortran_modifier_func()
+ at conf
+def get_gfortran_version(conf,fc):
+ version_re=re.compile(r"GNU\s*Fortran",re.I).search
+ cmd=fc+['--version']
+ out,err=fc_config.getoutput(conf,cmd,stdin=False)
+ if out:match=version_re(out)
+ else:match=version_re(err)
+ if not match:
+ conf.fatal('Could not determine the compiler type')
+ cmd=fc+['-dM','-E','-']
+ out,err=fc_config.getoutput(conf,cmd,stdin=True)
+ if out.find('__GNUC__')<0:
+ conf.fatal('Could not determine the compiler type')
+ k={}
+ out=out.split('\n')
+ import shlex
+ for line in out:
+ lst=shlex.split(line)
+ if len(lst)>2:
+ key=lst[1]
+ val=lst[2]
+ k[key]=val
+ def isD(var):
+ return var in k
+ def isT(var):
+ return var in k and k[var]!='0'
+ conf.env['FC_VERSION']=(k['__GNUC__'],k['__GNUC_MINOR__'],k['__GNUC_PATCHLEVEL__'])
+def configure(conf):
+ conf.find_gfortran()
+ conf.find_ar()
+ conf.fc_flags()
+ conf.fc_add_flags()
+ conf.gfortran_flags()
+ conf.gfortran_modifier_platform()
diff --git a/waflib/Tools/glib2.py b/waflib/Tools/glib2.py
new file mode 100644
index 0000000..1d75510
--- /dev/null
+++ b/waflib/Tools/glib2.py
@@ -0,0 +1,173 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import os
+from waflib import Task,Utils,Options,Errors,Logs
+from waflib.TaskGen import taskgen_method,before_method,after_method,feature
+ at taskgen_method
+def add_marshal_file(self,filename,prefix):
+ if not hasattr(self,'marshal_list'):
+ self.marshal_list=[]
+ self.meths.append('process_marshal')
+ self.marshal_list.append((filename,prefix))
+ at before_method('process_source')
+def process_marshal(self):
+ for f,prefix in getattr(self,'marshal_list',[]):
+ node=self.path.find_resource(f)
+ if not node:
+ raise Errors.WafError('file not found %r'%f)
+ h_node=node.change_ext('.h')
+ c_node=node.change_ext('.c')
+ task=self.create_task('glib_genmarshal',node,[h_node,c_node])
+ task.env.GLIB_GENMARSHAL_PREFIX=prefix
+ self.source=self.to_nodes(getattr(self,'source',[]))
+ self.source.append(c_node)
+class glib_genmarshal(Task.Task):
+ def run(self):
+ bld=self.inputs[0].__class__.ctx
+ get=self.env.get_flat
+ cmd1="%s %s --prefix=%s --header > %s"%(get('GLIB_GENMARSHAL'),self.inputs[0].srcpath(),get('GLIB_GENMARSHAL_PREFIX'),self.outputs[0].abspath())
+ ret=bld.exec_command(cmd1)
+ if ret:return ret
+ c='''#include "%s"\n'''%self.outputs[0].name
+ self.outputs[1].write(c)
+ cmd2="%s %s --prefix=%s --body >> %s"%(get('GLIB_GENMARSHAL'),self.inputs[0].srcpath(),get('GLIB_GENMARSHAL_PREFIX'),self.outputs[1].abspath())
+ return bld.exec_command(cmd2)
+ vars=['GLIB_GENMARSHAL_PREFIX','GLIB_GENMARSHAL']
+ color='BLUE'
+ ext_out=['.h']
+ at taskgen_method
+def add_enums_from_template(self,source='',target='',template='',comments=''):
+ if not hasattr(self,'enums_list'):
+ self.enums_list=[]
+ self.meths.append('process_enums')
+ self.enums_list.append({'source':source,'target':target,'template':template,'file-head':'','file-prod':'','file-tail':'','enum-prod':'','value-head':'','value-prod':'','value-tail':'','comments':comments})
+ at taskgen_method
+def add_enums(self,source='',target='',file_head='',file_prod='',file_tail='',enum_prod='',value_head='',value_prod='',value_tail='',comments=''):
+ if not hasattr(self,'enums_list'):
+ self.enums_list=[]
+ self.meths.append('process_enums')
+ self.enums_list.append({'source':source,'template':'','target':target,'file-head':file_head,'file-prod':file_prod,'file-tail':file_tail,'enum-prod':enum_prod,'value-head':value_head,'value-prod':value_prod,'value-tail':value_tail,'comments':comments})
+ at before_method('process_source')
+def process_enums(self):
+ for enum in getattr(self,'enums_list',[]):
+ task=self.create_task('glib_mkenums')
+ env=task.env
+ inputs=[]
+ source_list=self.to_list(enum['source'])
+ if not source_list:
+ raise Errors.WafError('missing source '+str(enum))
+ source_list=[self.path.find_resource(k)for k in source_list]
+ inputs+=source_list
+ env['GLIB_MKENUMS_SOURCE']=[k.abspath()for k in source_list]
+ if not enum['target']:
+ raise Errors.WafError('missing target '+str(enum))
+ tgt_node=self.path.find_or_declare(enum['target'])
+ if tgt_node.name.endswith('.c'):
+ self.source.append(tgt_node)
+ env['GLIB_MKENUMS_TARGET']=tgt_node.abspath()
+ options=[]
+ if enum['template']:
+ template_node=self.path.find_resource(enum['template'])
+ options.append('--template %s'%(template_node.abspath()))
+ inputs.append(template_node)
+ params={'file-head':'--fhead','file-prod':'--fprod','file-tail':'--ftail','enum-prod':'--eprod','value-head':'--vhead','value-prod':'--vprod','value-tail':'--vtail','comments':'--comments'}
+ for param,option in params.items():
+ if enum[param]:
+ options.append('%s %r'%(option,enum[param]))
+ env['GLIB_MKENUMS_OPTIONS']=' '.join(options)
+ task.set_inputs(inputs)
+ task.set_outputs(tgt_node)
+class glib_mkenums(Task.Task):
+ run_str='${GLIB_MKENUMS} ${GLIB_MKENUMS_OPTIONS} ${GLIB_MKENUMS_SOURCE} > ${GLIB_MKENUMS_TARGET}'
+ color='PINK'
+ ext_out=['.h']
+ at taskgen_method
+def add_settings_schemas(self,filename_list):
+ if not hasattr(self,'settings_schema_files'):
+ self.settings_schema_files=[]
+ if not isinstance(filename_list,list):
+ filename_list=[filename_list]
+ self.settings_schema_files.extend(filename_list)
+ at taskgen_method
+def add_settings_enums(self,namespace,filename_list):
+ if hasattr(self,'settings_enum_namespace'):
+ raise Errors.WafError("Tried to add gsettings enums to '%s' more than once"%self.name)
+ self.settings_enum_namespace=namespace
+ if type(filename_list)!='list':
+ filename_list=[filename_list]
+ self.settings_enum_files=filename_list
+def r_change_ext(self,ext):
+ name=self.name
+ k=name.rfind('.')
+ if k>=0:
+ name=name[:k]+ext
+ else:
+ name=name+ext
+ return self.parent.find_or_declare([name])
+ at feature('glib2')
+def process_settings(self):
+ enums_tgt_node=[]
+ install_files=[]
+ settings_schema_files=getattr(self,'settings_schema_files',[])
+ if settings_schema_files and not self.env['GLIB_COMPILE_SCHEMAS']:
+ raise Errors.WafError("Unable to process GSettings schemas - glib-compile-schemas was not found during configure")
+ if hasattr(self,'settings_enum_files'):
+ enums_task=self.create_task('glib_mkenums')
+ source_list=self.settings_enum_files
+ source_list=[self.path.find_resource(k)for k in source_list]
+ enums_task.set_inputs(source_list)
+ enums_task.env['GLIB_MKENUMS_SOURCE']=[k.abspath()for k in source_list]
+ target=self.settings_enum_namespace+'.enums.xml'
+ tgt_node=self.path.find_or_declare(target)
+ enums_task.set_outputs(tgt_node)
+ enums_task.env['GLIB_MKENUMS_TARGET']=tgt_node.abspath()
+ enums_tgt_node=[tgt_node]
+ install_files.append(tgt_node)
+ options='--comments "<!-- @comment@ -->" --fhead "<schemalist>" --vhead " <@type@ id=\\"%s. at EnumName@\\">" --vprod " <value nick=\\"@valuenick@\\" value=\\"@valuenum@\\"/>" --vtail " </@type@>" --ftail "</schemalist>" '%(self.settings_enum_namespace)
+ enums_task.env['GLIB_MKENUMS_OPTIONS']=options
+ for schema in settings_schema_files:
+ schema_task=self.create_task('glib_validate_schema')
+ schema_node=self.path.find_resource(schema)
+ if not schema_node:
+ raise Errors.WafError("Cannot find the schema file '%s'"%schema)
+ install_files.append(schema_node)
+ source_list=enums_tgt_node+[schema_node]
+ schema_task.set_inputs(source_list)
+ schema_task.env['GLIB_COMPILE_SCHEMAS_OPTIONS']=[("--schema-file="+k.abspath())for k in source_list]
+ target_node=r_change_ext(schema_node,'.xml.valid')
+ schema_task.set_outputs(target_node)
+ schema_task.env['GLIB_VALIDATE_SCHEMA_OUTPUT']=target_node.abspath()
+ def compile_schemas_callback(bld):
+ if not bld.is_install:return
+ Logs.pprint('YELLOW','Updating GSettings schema cache')
+ command=Utils.subst_vars("${GLIB_COMPILE_SCHEMAS} ${GSETTINGSSCHEMADIR}",bld.env)
+ ret=self.bld.exec_command(command)
+ if self.bld.is_install:
+ if not self.env['GSETTINGSSCHEMADIR']:
+ raise Errors.WafError('GSETTINGSSCHEMADIR not defined (should have been set up automatically during configure)')
+ if install_files:
+ self.bld.install_files(self.env['GSETTINGSSCHEMADIR'],install_files)
+ if not hasattr(self.bld,'_compile_schemas_registered'):
+ self.bld.add_post_fun(compile_schemas_callback)
+ self.bld._compile_schemas_registered=True
+class glib_validate_schema(Task.Task):
+ run_str='rm -f ${GLIB_VALIDATE_SCHEMA_OUTPUT} && ${GLIB_COMPILE_SCHEMAS} --dry-run ${GLIB_COMPILE_SCHEMAS_OPTIONS} && touch ${GLIB_VALIDATE_SCHEMA_OUTPUT}'
+ color='PINK'
+def configure(conf):
+ conf.find_program('glib-genmarshal',var='GLIB_GENMARSHAL')
+ conf.find_perl_program('glib-mkenums',var='GLIB_MKENUMS')
+ conf.find_program('glib-compile-schemas',var='GLIB_COMPILE_SCHEMAS',mandatory=False)
+ def getstr(varname):
+ return getattr(Options.options,varname,getattr(conf.env,varname,''))
+ gsettingsschemadir=getstr('GSETTINGSSCHEMADIR')
+ if not gsettingsschemadir:
+ datadir=getstr('DATADIR')
+ if not datadir:
+ prefix=conf.env['PREFIX']
+ datadir=os.path.join(prefix,'share')
+ gsettingsschemadir=os.path.join(datadir,'glib-2.0','schemas')
+ conf.env['GSETTINGSSCHEMADIR']=gsettingsschemadir
+def options(opt):
+ opt.add_option('--gsettingsschemadir',help='GSettings schema location [Default: ${datadir}/glib-2.0/schemas]',default='',dest='GSETTINGSSCHEMADIR')
diff --git a/waflib/Tools/gnu_dirs.py b/waflib/Tools/gnu_dirs.py
new file mode 100644
index 0000000..9c8a304
--- /dev/null
+++ b/waflib/Tools/gnu_dirs.py
@@ -0,0 +1,65 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import os
+from waflib import Utils,Options,Context
+_options=[x.split(', ')for x in'''
+bindir, user executables, ${EXEC_PREFIX}/bin
+sbindir, system admin executables, ${EXEC_PREFIX}/sbin
+libexecdir, program executables, ${EXEC_PREFIX}/libexec
+sysconfdir, read-only single-machine data, ${PREFIX}/etc
+sharedstatedir, modifiable architecture-independent data, ${PREFIX}/com
+localstatedir, modifiable single-machine data, ${PREFIX}/var
+libdir, object code libraries, ${EXEC_PREFIX}/lib
+includedir, C header files, ${PREFIX}/include
+oldincludedir, C header files for non-gcc, /usr/include
+datarootdir, read-only arch.-independent data root, ${PREFIX}/share
+datadir, read-only architecture-independent data, ${DATAROOTDIR}
+infodir, info documentation, ${DATAROOTDIR}/info
+localedir, locale-dependent data, ${DATAROOTDIR}/locale
+mandir, man documentation, ${DATAROOTDIR}/man
+docdir, documentation root, ${DATAROOTDIR}/doc/${PACKAGE}
+htmldir, html documentation, ${DOCDIR}
+dvidir, dvi documentation, ${DOCDIR}
+pdfdir, pdf documentation, ${DOCDIR}
+psdir, ps documentation, ${DOCDIR}
+'''.split('\n')if x]
+def configure(conf):
+ def get_param(varname,default):
+ return getattr(Options.options,varname,'')or default
+ env=conf.env
+ env.LIBDIR=env.BINDIR=[]
+ env.EXEC_PREFIX=get_param('EXEC_PREFIX',env.PREFIX)
+ env.PACKAGE=getattr(Context.g_module,'APPNAME',None)or env.PACKAGE
+ complete=False
+ iter=0
+ while not complete and iter<len(_options)+1:
+ iter+=1
+ complete=True
+ for name,help,default in _options:
+ name=name.upper()
+ if not env[name]:
+ try:
+ env[name]=Utils.subst_vars(get_param(name,default).replace('/',os.sep),env)
+ except TypeError:
+ complete=False
+ if not complete:
+ lst=[name for name,_,_ in _options if not env[name.upper()]]
+ raise conf.errors.WafError('Variable substitution failure %r'%lst)
+def options(opt):
+ inst_dir=opt.add_option_group('Installation directories','By default, "waf install" will put the files in\
+ "/usr/local/bin", "/usr/local/lib" etc. An installation prefix other\
+ than "/usr/local" can be given using "--prefix", for example "--prefix=$HOME"')
+ for k in('--prefix','--destdir'):
+ option=opt.parser.get_option(k)
+ if option:
+ opt.parser.remove_option(k)
+ inst_dir.add_option(option)
+ inst_dir.add_option('--exec-prefix',help='installation prefix [Default: ${PREFIX}]',default='',dest='EXEC_PREFIX')
+ dirs_options=opt.add_option_group('Pre-defined installation directories','')
+ for name,help,default in _options:
+ option_name='--'+name
+ str_default=default
+ str_help='%s [Default: %s]'%(help,str_default)
+ dirs_options.add_option(option_name,help=str_help,default='',dest=name.upper())
diff --git a/waflib/Tools/gxx.py b/waflib/Tools/gxx.py
new file mode 100644
index 0000000..8257017
--- /dev/null
+++ b/waflib/Tools/gxx.py
@@ -0,0 +1,97 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import os,sys
+from waflib import Configure,Options,Utils
+from waflib.Tools import ccroot,ar
+from waflib.Configure import conf
+ at conf
+def find_gxx(conf):
+ cxx=conf.find_program(['g++','c++'],var='CXX')
+ cxx=conf.cmd_to_list(cxx)
+ conf.get_cc_version(cxx,gcc=True)
+ conf.env.CXX_NAME='gcc'
+ conf.env.CXX=cxx
+ at conf
+def gxx_common_flags(conf):
+ v=conf.env
+ v['CXX_SRC_F']=[]
+ v['CXX_TGT_F']=['-c','-o']
+ if not v['LINK_CXX']:v['LINK_CXX']=v['CXX']
+ v['CXXLNK_SRC_F']=[]
+ v['CXXLNK_TGT_F']=['-o']
+ v['CPPPATH_ST']='-I%s'
+ v['DEFINES_ST']='-D%s'
+ v['LIB_ST']='-l%s'
+ v['LIBPATH_ST']='-L%s'
+ v['STLIB_ST']='-l%s'
+ v['STLIBPATH_ST']='-L%s'
+ v['RPATH_ST']='-Wl,-rpath,%s'
+ v['SONAME_ST']='-Wl,-h,%s'
+ v['SHLIB_MARKER']='-Wl,-Bdynamic'
+ v['STLIB_MARKER']='-Wl,-Bstatic'
+ v['cxxprogram_PATTERN']='%s'
+ v['CXXFLAGS_cxxshlib']=['-fPIC']
+ v['LINKFLAGS_cxxshlib']=['-shared']
+ v['cxxshlib_PATTERN']='lib%s.so'
+ v['LINKFLAGS_cxxstlib']=['-Wl,-Bstatic']
+ v['cxxstlib_PATTERN']='lib%s.a'
+ v['LINKFLAGS_MACBUNDLE']=['-bundle','-undefined','dynamic_lookup']
+ v['CXXFLAGS_MACBUNDLE']=['-fPIC']
+ v['macbundle_PATTERN']='%s.bundle'
+ at conf
+def gxx_modifier_win32(conf):
+ v=conf.env
+ v['cxxprogram_PATTERN']='%s.exe'
+ v['cxxshlib_PATTERN']='%s.dll'
+ v['implib_PATTERN']='lib%s.dll.a'
+ v['IMPLIB_ST']='-Wl,--out-implib,%s'
+ v['CXXFLAGS_cxxshlib']=[]
+ v.append_value('LINKFLAGS',['-Wl,--enable-auto-import'])
+ at conf
+def gxx_modifier_cygwin(conf):
+ gxx_modifier_win32(conf)
+ v=conf.env
+ v['cxxshlib_PATTERN']='cyg%s.dll'
+ v.append_value('LINKFLAGS_cxxshlib',['-Wl,--enable-auto-image-base'])
+ v['CXXFLAGS_cxxshlib']=[]
+ at conf
+def gxx_modifier_darwin(conf):
+ v=conf.env
+ v['CXXFLAGS_cxxshlib']=['-fPIC','-compatibility_version','1','-current_version','1']
+ v['LINKFLAGS_cxxshlib']=['-dynamiclib']
+ v['cxxshlib_PATTERN']='lib%s.dylib'
+ v['FRAMEWORKPATH_ST']='-F%s'
+ v['FRAMEWORK_ST']=['-framework']
+ v['ARCH_ST']=['-arch']
+ v['LINKFLAGS_cxxstlib']=[]
+ v['SHLIB_MARKER']=[]
+ v['STLIB_MARKER']=[]
+ v['SONAME_ST']=[]
+ at conf
+def gxx_modifier_aix(conf):
+ v=conf.env
+ v['LINKFLAGS_cxxprogram']=['-Wl,-brtl']
+ v['LINKFLAGS_cxxshlib']=['-shared','-Wl,-brtl,-bexpfull']
+ v['SHLIB_MARKER']=[]
+ at conf
+def gxx_modifier_hpux(conf):
+ v=conf.env
+ v['SHLIB_MARKER']=[]
+ v['STLIB_MARKER']='-Bstatic'
+ v['CFLAGS_cxxshlib']=['-fPIC','-DPIC']
+ v['cxxshlib_PATTERN']='lib%s.sl'
+ at conf
+def gxx_modifier_platform(conf):
+ gxx_modifier_func=getattr(conf,'gxx_modifier_'+conf.env.DEST_OS,None)
+ if gxx_modifier_func:
+ gxx_modifier_func()
+def configure(conf):
+ conf.find_gxx()
+ conf.find_ar()
+ conf.gxx_common_flags()
+ conf.gxx_modifier_platform()
+ conf.cxx_load_tools()
+ conf.cxx_add_flags()
+ conf.link_add_flags()
diff --git a/waflib/Tools/icc.py b/waflib/Tools/icc.py
new file mode 100644
index 0000000..7c75e18
--- /dev/null
+++ b/waflib/Tools/icc.py
@@ -0,0 +1,30 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import os,sys
+from waflib.Tools import ccroot,ar,gcc
+from waflib.Configure import conf
+ at conf
+def find_icc(conf):
+ if sys.platform=='cygwin':
+ conf.fatal('The Intel compiler does not work on Cygwin')
+ v=conf.env
+ cc=None
+ if v['CC']:cc=v['CC']
+ elif'CC'in conf.environ:cc=conf.environ['CC']
+ if not cc:cc=conf.find_program('icc',var='CC')
+ if not cc:cc=conf.find_program('ICL',var='CC')
+ if not cc:conf.fatal('Intel C Compiler (icc) was not found')
+ cc=conf.cmd_to_list(cc)
+ conf.get_cc_version(cc,icc=True)
+ v['CC']=cc
+ v['CC_NAME']='icc'
+def configure(conf):
+ conf.find_icc()
+ conf.find_ar()
+ conf.gcc_common_flags()
+ conf.gcc_modifier_platform()
+ conf.cc_load_tools()
+ conf.cc_add_flags()
+ conf.link_add_flags()
diff --git a/waflib/Tools/icpc.py b/waflib/Tools/icpc.py
new file mode 100644
index 0000000..14a5325
--- /dev/null
+++ b/waflib/Tools/icpc.py
@@ -0,0 +1,29 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import os,sys
+from waflib.Tools import ccroot,ar,gxx
+from waflib.Configure import conf
+ at conf
+def find_icpc(conf):
+ if sys.platform=='cygwin':
+ conf.fatal('The Intel compiler does not work on Cygwin')
+ v=conf.env
+ cxx=None
+ if v['CXX']:cxx=v['CXX']
+ elif'CXX'in conf.environ:cxx=conf.environ['CXX']
+ if not cxx:cxx=conf.find_program('icpc',var='CXX')
+ if not cxx:conf.fatal('Intel C++ Compiler (icpc) was not found')
+ cxx=conf.cmd_to_list(cxx)
+ conf.get_cc_version(cxx,icc=True)
+ v['CXX']=cxx
+ v['CXX_NAME']='icc'
+def configure(conf):
+ conf.find_icpc()
+ conf.find_ar()
+ conf.gxx_common_flags()
+ conf.gxx_modifier_platform()
+ conf.cxx_load_tools()
+ conf.cxx_add_flags()
+ conf.link_add_flags()
diff --git a/waflib/Tools/ifort.py b/waflib/Tools/ifort.py
new file mode 100644
index 0000000..a9f2528
--- /dev/null
+++ b/waflib/Tools/ifort.py
@@ -0,0 +1,49 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import re
+from waflib import Utils
+from waflib.Tools import fc,fc_config,fc_scan,ar
+from waflib.Configure import conf
+ at conf
+def find_ifort(conf):
+ fc=conf.find_program('ifort',var='FC')
+ fc=conf.cmd_to_list(fc)
+ conf.get_ifort_version(fc)
+ conf.env.FC_NAME='IFORT'
+ at conf
+def ifort_modifier_cygwin(conf):
+ raise NotImplementedError("Ifort on cygwin not yet implemented")
+ at conf
+def ifort_modifier_win32(conf):
+ fc_config.fortran_modifier_win32(conf)
+ at conf
+def ifort_modifier_darwin(conf):
+ fc_config.fortran_modifier_darwin(conf)
+ at conf
+def ifort_modifier_platform(conf):
+ dest_os=conf.env['DEST_OS']or Utils.unversioned_sys_platform()
+ ifort_modifier_func=getattr(conf,'ifort_modifier_'+dest_os,None)
+ if ifort_modifier_func:
+ ifort_modifier_func()
+ at conf
+def get_ifort_version(conf,fc):
+ version_re=re.compile(r"ifort\s*\(IFORT\)\s*(?P<major>\d*)\.(?P<minor>\d*)",re.I).search
+ cmd=fc+['--version']
+ out,err=fc_config.getoutput(conf,cmd,stdin=False)
+ if out:
+ match=version_re(out)
+ else:
+ match=version_re(err)
+ if not match:
+ conf.fatal('cannot determine ifort version.')
+ k=match.groupdict()
+ conf.env['FC_VERSION']=(k['major'],k['minor'])
+def configure(conf):
+ conf.find_ifort()
+ conf.find_program('xiar',var='AR')
+ conf.env.ARFLAGS='rcs'
+ conf.fc_flags()
+ conf.fc_add_flags()
+ conf.ifort_modifier_platform()
diff --git a/waflib/Tools/intltool.py b/waflib/Tools/intltool.py
new file mode 100644
index 0000000..d558674
--- /dev/null
+++ b/waflib/Tools/intltool.py
@@ -0,0 +1,77 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import os,re
+from waflib import Configure,TaskGen,Task,Utils,Runner,Options,Build,Logs
+import waflib.Tools.ccroot
+from waflib.TaskGen import feature,before_method
+from waflib.Logs import error
+ at before_method('process_source')
+ at feature('intltool_in')
+def apply_intltool_in_f(self):
+ try:self.meths.remove('process_source')
+ except ValueError:pass
+ if not self.env.LOCALEDIR:
+ self.env.LOCALEDIR=self.env.PREFIX+'/share/locale'
+ for i in self.to_list(self.source):
+ node=self.path.find_resource(i)
+ podir=getattr(self,'podir','po')
+ podirnode=self.path.find_dir(podir)
+ if not podirnode:
+ error("could not find the podir %r"%podir)
+ continue
+ cache=getattr(self,'intlcache','.intlcache')
+ self.env['INTLCACHE']=os.path.join(self.path.bldpath(),podir,cache)
+ self.env['INTLPODIR']=podirnode.bldpath()
+ self.env['INTLFLAGS']=getattr(self,'flags',['-q','-u','-c'])
+ task=self.create_task('intltool',node,node.change_ext(''))
+ inst=getattr(self,'install_path','${LOCALEDIR}')
+ if inst:
+ self.bld.install_files(inst,task.outputs)
+ at feature('intltool_po')
+def apply_intltool_po(self):
+ try:self.meths.remove('process_source')
+ except ValueError:pass
+ if not self.env.LOCALEDIR:
+ self.env.LOCALEDIR=self.env.PREFIX+'/share/locale'
+ appname=getattr(self,'appname','set_your_app_name')
+ podir=getattr(self,'podir','')
+ inst=getattr(self,'install_path','${LOCALEDIR}')
+ linguas=self.path.find_node(os.path.join(podir,'LINGUAS'))
+ if linguas:
+ file=open(linguas.abspath())
+ langs=[]
+ for line in file.readlines():
+ if not line.startswith('#'):
+ langs+=line.split()
+ file.close()
+ re_linguas=re.compile('[-a-zA-Z_ at .]+')
+ for lang in langs:
+ if re_linguas.match(lang):
+ node=self.path.find_resource(os.path.join(podir,re_linguas.match(lang).group()+'.po'))
+ task=self.create_task('po',node,node.change_ext('.mo'))
+ if inst:
+ filename=task.outputs[0].name
+ (langname,ext)=os.path.splitext(filename)
+ inst_file=inst+os.sep+langname+os.sep+'LC_MESSAGES'+os.sep+appname+'.mo'
+ self.bld.install_as(inst_file,task.outputs[0],chmod=getattr(self,'chmod',Utils.O644),env=task.env)
+ else:
+ Logs.pprint('RED',"Error no LINGUAS file found in po directory")
+class po(Task.Task):
+ run_str='${MSGFMT} -o ${TGT} ${SRC}'
+ color='BLUE'
+class intltool(Task.Task):
+ run_str='${INTLTOOL} ${INTLFLAGS} ${INTLCACHE} ${INTLPODIR} ${SRC} ${TGT}'
+ color='BLUE'
+def configure(conf):
+ conf.find_program('msgfmt',var='MSGFMT')
+ conf.find_perl_program('intltool-merge',var='INTLTOOL')
+ prefix=conf.env.PREFIX
+ datadir=conf.env.DATADIR
+ if not datadir:
+ datadir=os.path.join(prefix,'share')
+ conf.define('LOCALEDIR',os.path.join(datadir,'locale').replace('\\','\\\\'))
+ conf.define('DATADIR',datadir.replace('\\','\\\\'))
+ if conf.env.CC or conf.env.CXX:
+ conf.check(header_name='locale.h')
diff --git a/waflib/Tools/irixcc.py b/waflib/Tools/irixcc.py
new file mode 100644
index 0000000..8dbdfca
--- /dev/null
+++ b/waflib/Tools/irixcc.py
@@ -0,0 +1,48 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import os
+from waflib import Utils
+from waflib.Tools import ccroot,ar
+from waflib.Configure import conf
+ at conf
+def find_irixcc(conf):
+ v=conf.env
+ cc=None
+ if v['CC']:cc=v['CC']
+ elif'CC'in conf.environ:cc=conf.environ['CC']
+ if not cc:cc=conf.find_program('cc',var='CC')
+ if not cc:conf.fatal('irixcc was not found')
+ cc=conf.cmd_to_list(cc)
+ try:
+ conf.cmd_and_log(cc+['-version'])
+ except Exception:
+ conf.fatal('%r -version could not be executed'%cc)
+ v['CC']=cc
+ v['CC_NAME']='irix'
+ at conf
+def irixcc_common_flags(conf):
+ v=conf.env
+ v['CC_SRC_F']=''
+ v['CC_TGT_F']=['-c','-o']
+ v['CPPPATH_ST']='-I%s'
+ v['DEFINES_ST']='-D%s'
+ if not v['LINK_CC']:v['LINK_CC']=v['CC']
+ v['CCLNK_SRC_F']=''
+ v['CCLNK_TGT_F']=['-o']
+ v['LIB_ST']='-l%s'
+ v['LIBPATH_ST']='-L%s'
+ v['STLIB_ST']='-l%s'
+ v['STLIBPATH_ST']='-L%s'
+ v['cprogram_PATTERN']='%s'
+ v['cshlib_PATTERN']='lib%s.so'
+ v['cstlib_PATTERN']='lib%s.a'
+def configure(conf):
+ conf.find_irixcc()
+ conf.find_cpp()
+ conf.find_ar()
+ conf.irixcc_common_flags()
+ conf.cc_load_tools()
+ conf.cc_add_flags()
+ conf.link_add_flags()
diff --git a/waflib/Tools/javaw.py b/waflib/Tools/javaw.py
new file mode 100644
index 0000000..5750f40
--- /dev/null
+++ b/waflib/Tools/javaw.py
@@ -0,0 +1,311 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import os,re,tempfile,shutil
+from waflib import TaskGen,Task,Utils,Options,Build,Errors,Node,Logs
+from waflib.Configure import conf
+from waflib.TaskGen import feature,before_method,after_method
+from waflib.Tools import ccroot
+ccroot.USELIB_VARS['javac']=set(['CLASSPATH','JAVACFLAGS'])
+SOURCE_RE='**/*.java'
+JAR_RE='**/*'
+class_check_source='''
+public class Test {
+ public static void main(String[] argv) {
+ Class lib;
+ if (argv.length < 1) {
+ System.err.println("Missing argument");
+ System.exit(77);
+ }
+ try {
+ lib = Class.forName(argv[0]);
+ } catch (ClassNotFoundException e) {
+ System.err.println("ClassNotFoundException");
+ System.exit(1);
+ }
+ lib = null;
+ System.exit(0);
+ }
+}
+'''
+ at feature('javac')
+ at before_method('process_source')
+def apply_java(self):
+ Utils.def_attrs(self,jarname='',classpath='',sourcepath='.',srcdir='.',jar_mf_attributes={},jar_mf_classpath=[])
+ nodes_lst=[]
+ outdir=getattr(self,'outdir',None)
+ if outdir:
+ if not isinstance(outdir,Node.Node):
+ outdir=self.path.get_bld().make_node(self.outdir)
+ else:
+ outdir=self.path.get_bld()
+ outdir.mkdir()
+ self.outdir=outdir
+ self.env['OUTDIR']=outdir.abspath()
+ self.javac_task=tsk=self.create_task('javac')
+ tmp=[]
+ srcdir=getattr(self,'srcdir','')
+ if isinstance(srcdir,Node.Node):
+ srcdir=[srcdir]
+ for x in Utils.to_list(srcdir):
+ if isinstance(x,Node.Node):
+ y=x
+ else:
+ y=self.path.find_dir(x)
+ if not y:
+ self.bld.fatal('Could not find the folder %s from %s'%(x,self.path))
+ tmp.append(y)
+ tsk.srcdir=tmp
+ if getattr(self,'compat',None):
+ tsk.env.append_value('JAVACFLAGS',['-source',self.compat])
+ if hasattr(self,'sourcepath'):
+ fold=[isinstance(x,Node.Node)and x or self.path.find_dir(x)for x in self.to_list(self.sourcepath)]
+ names=os.pathsep.join([x.srcpath()for x in fold])
+ else:
+ names=[x.srcpath()for x in tsk.srcdir]
+ if names:
+ tsk.env.append_value('JAVACFLAGS',['-sourcepath',names])
+ at feature('javac')
+ at after_method('apply_java')
+def use_javac_files(self):
+ lst=[]
+ self.uselib=self.to_list(getattr(self,'uselib',[]))
+ names=self.to_list(getattr(self,'use',[]))
+ get=self.bld.get_tgen_by_name
+ for x in names:
+ try:
+ y=get(x)
+ except Exception:
+ self.uselib.append(x)
+ else:
+ y.post()
+ lst.append(y.jar_task.outputs[0].abspath())
+ self.javac_task.set_run_after(y.jar_task)
+ if lst:
+ self.env.append_value('CLASSPATH',lst)
+ at feature('javac')
+ at after_method('apply_java','propagate_uselib_vars','use_javac_files')
+def set_classpath(self):
+ self.env.append_value('CLASSPATH',getattr(self,'classpath',[]))
+ for x in self.tasks:
+ x.env.CLASSPATH=os.pathsep.join(self.env.CLASSPATH)+os.pathsep
+ at feature('jar')
+ at after_method('apply_java','use_javac_files')
+ at before_method('process_source')
+def jar_files(self):
+ destfile=getattr(self,'destfile','test.jar')
+ jaropts=getattr(self,'jaropts',[])
+ manifest=getattr(self,'manifest',None)
+ basedir=getattr(self,'basedir',None)
+ if basedir:
+ if not isinstance(self.basedir,Node.Node):
+ basedir=self.path.get_bld().make_node(basedir)
+ else:
+ basedir=self.path.get_bld()
+ if not basedir:
+ self.bld.fatal('Could not find the basedir %r for %r'%(self.basedir,self))
+ self.jar_task=tsk=self.create_task('jar_create')
+ if manifest:
+ jarcreate=getattr(self,'jarcreate','cfm')
+ node=self.path.find_node(manifest)
+ tsk.dep_nodes.append(node)
+ jaropts.insert(0,node.abspath())
+ else:
+ jarcreate=getattr(self,'jarcreate','cf')
+ if not isinstance(destfile,Node.Node):
+ destfile=self.path.find_or_declare(destfile)
+ if not destfile:
+ self.bld.fatal('invalid destfile %r for %r'%(destfile,self))
+ tsk.set_outputs(destfile)
+ tsk.basedir=basedir
+ jaropts.append('-C')
+ jaropts.append(basedir.bldpath())
+ jaropts.append('.')
+ tsk.env['JAROPTS']=jaropts
+ tsk.env['JARCREATE']=jarcreate
+ if getattr(self,'javac_task',None):
+ tsk.set_run_after(self.javac_task)
+ at feature('jar')
+ at after_method('jar_files')
+def use_jar_files(self):
+ lst=[]
+ self.uselib=self.to_list(getattr(self,'uselib',[]))
+ names=self.to_list(getattr(self,'use',[]))
+ get=self.bld.get_tgen_by_name
+ for x in names:
+ try:
+ y=get(x)
+ except Exception:
+ self.uselib.append(x)
+ else:
+ y.post()
+ self.jar_task.run_after.update(y.tasks)
+class jar_create(Task.Task):
+ color='GREEN'
+ run_str='${JAR} ${JARCREATE} ${TGT} ${JAROPTS}'
+ def runnable_status(self):
+ for t in self.run_after:
+ if not t.hasrun:
+ return Task.ASK_LATER
+ if not self.inputs:
+ global JAR_RE
+ try:
+ self.inputs=[x for x in self.basedir.ant_glob(JAR_RE,remove=False)if id(x)!=id(self.outputs[0])]
+ except Exception:
+ raise Errors.WafError('Could not find the basedir %r for %r'%(self.basedir,self))
+ return super(jar_create,self).runnable_status()
+class javac(Task.Task):
+ color='BLUE'
+ nocache=True
+ vars=['CLASSPATH','JAVACFLAGS','JAVAC','OUTDIR']
+ def runnable_status(self):
+ for t in self.run_after:
+ if not t.hasrun:
+ return Task.ASK_LATER
+ if not self.inputs:
+ global SOURCE_RE
+ self.inputs=[]
+ for x in self.srcdir:
+ self.inputs.extend(x.ant_glob(SOURCE_RE,remove=False))
+ return super(javac,self).runnable_status()
+ def run(self):
+ env=self.env
+ gen=self.generator
+ bld=gen.bld
+ wd=bld.bldnode.abspath()
+ def to_list(xx):
+ if isinstance(xx,str):return[xx]
+ return xx
+ cmd=[]
+ cmd.extend(to_list(env['JAVAC']))
+ cmd.extend(['-classpath'])
+ cmd.extend(to_list(env['CLASSPATH']))
+ cmd.extend(['-d'])
+ cmd.extend(to_list(env['OUTDIR']))
+ cmd.extend(to_list(env['JAVACFLAGS']))
+ files=[a.path_from(bld.bldnode)for a in self.inputs]
+ tmp=None
+ try:
+ if len(str(files))+len(str(cmd))>8192:
+ (fd,tmp)=tempfile.mkstemp(dir=bld.bldnode.abspath())
+ try:
+ os.write(fd,'\n'.join(files))
+ finally:
+ if tmp:
+ os.close(fd)
+ if Logs.verbose:
+ Logs.debug('runner: %r'%(cmd+files))
+ cmd.append('@'+tmp)
+ else:
+ cmd+=files
+ ret=self.exec_command(cmd,cwd=wd,env=env.env or None)
+ finally:
+ if tmp:
+ os.unlink(tmp)
+ return ret
+ def post_run(self):
+ for n in self.generator.outdir.ant_glob('**/*.class'):
+ n.sig=Utils.h_file(n.abspath())
+ self.generator.bld.task_sigs[self.uid()]=self.cache_sig
+ at feature('javadoc')
+ at after_method('process_rule')
+def create_javadoc(self):
+ tsk=self.create_task('javadoc')
+ tsk.classpath=getattr(self,'classpath',[])
+ self.javadoc_package=Utils.to_list(self.javadoc_package)
+ if not isinstance(self.javadoc_output,Node.Node):
+ self.javadoc_output=self.bld.path.find_or_declare(self.javadoc_output)
+class javadoc(Task.Task):
+ color='BLUE'
+ def __str__(self):
+ return'%s: %s -> %s\n'%(self.__class__.__name__,self.generator.srcdir,self.generator.javadoc_output)
+ def run(self):
+ env=self.env
+ bld=self.generator.bld
+ wd=bld.bldnode.abspath()
+ srcpath=self.generator.path.abspath()+os.sep+self.generator.srcdir
+ srcpath+=os.pathsep
+ srcpath+=self.generator.path.get_bld().abspath()+os.sep+self.generator.srcdir
+ classpath=env.CLASSPATH
+ classpath+=os.pathsep
+ classpath+=os.pathsep.join(self.classpath)
+ classpath="".join(classpath)
+ self.last_cmd=lst=[]
+ lst.extend(Utils.to_list(env['JAVADOC']))
+ lst.extend(['-d',self.generator.javadoc_output.abspath()])
+ lst.extend(['-sourcepath',srcpath])
+ lst.extend(['-classpath',classpath])
+ lst.extend(['-subpackages'])
+ lst.extend(self.generator.javadoc_package)
+ lst=[x for x in lst if x]
+ self.generator.bld.cmd_and_log(lst,cwd=wd,env=env.env or None,quiet=0)
+ def post_run(self):
+ nodes=self.generator.javadoc_output.ant_glob('**')
+ for x in nodes:
+ x.sig=Utils.h_file(x.abspath())
+ self.generator.bld.task_sigs[self.uid()]=self.cache_sig
+def configure(self):
+ java_path=self.environ['PATH'].split(os.pathsep)
+ v=self.env
+ if'JAVA_HOME'in self.environ:
+ java_path=[os.path.join(self.environ['JAVA_HOME'],'bin')]+java_path
+ self.env['JAVA_HOME']=[self.environ['JAVA_HOME']]
+ for x in'javac java jar javadoc'.split():
+ self.find_program(x,var=x.upper(),path_list=java_path)
+ self.env[x.upper()]=self.cmd_to_list(self.env[x.upper()])
+ if'CLASSPATH'in self.environ:
+ v['CLASSPATH']=self.environ['CLASSPATH']
+ if not v['JAR']:self.fatal('jar is required for making java packages')
+ if not v['JAVAC']:self.fatal('javac is required for compiling java classes')
+ v['JARCREATE']='cf'
+ v['JAVACFLAGS']=[]
+ at conf
+def check_java_class(self,classname,with_classpath=None):
+ javatestdir='.waf-javatest'
+ classpath=javatestdir
+ if self.env['CLASSPATH']:
+ classpath+=os.pathsep+self.env['CLASSPATH']
+ if isinstance(with_classpath,str):
+ classpath+=os.pathsep+with_classpath
+ shutil.rmtree(javatestdir,True)
+ os.mkdir(javatestdir)
+ java_file=open(os.path.join(javatestdir,'Test.java'),'w')
+ java_file.write(class_check_source)
+ java_file.close()
+ self.exec_command(self.env['JAVAC']+[os.path.join(javatestdir,'Test.java')],shell=False)
+ cmd=self.env['JAVA']+['-cp',classpath,'Test',classname]
+ self.to_log("%s\n"%str(cmd))
+ found=self.exec_command(cmd,shell=False)
+ self.msg('Checking for java class %s'%classname,not found)
+ shutil.rmtree(javatestdir,True)
+ return found
+ at conf
+def check_jni_headers(conf):
+ if not conf.env.CC_NAME and not conf.env.CXX_NAME:
+ conf.fatal('load a compiler first (gcc, g++, ..)')
+ if not conf.env.JAVA_HOME:
+ conf.fatal('set JAVA_HOME in the system environment')
+ javaHome=conf.env['JAVA_HOME'][0]
+ dir=conf.root.find_dir(conf.env.JAVA_HOME[0]+'/include')
+ if dir is None:
+ dir=conf.root.find_dir(conf.env.JAVA_HOME[0]+'/../Headers')
+ if dir is None:
+ conf.fatal('JAVA_HOME does not seem to be set properly')
+ f=dir.ant_glob('**/(jni|jni_md).h')
+ incDirs=[x.parent.abspath()for x in f]
+ dir=conf.root.find_dir(conf.env.JAVA_HOME[0])
+ f=dir.ant_glob('**/*jvm.(so|dll|dylib)')
+ libDirs=[x.parent.abspath()for x in f]or[javaHome]
+ f=dir.ant_glob('**/*jvm.(lib)')
+ if f:
+ libDirs=[[x,y.parent.abspath()]for x in libDirs for y in f]
+ for d in libDirs:
+ try:
+ conf.check(header_name='jni.h',define_name='HAVE_JNI_H',lib='jvm',libpath=d,includes=incDirs,uselib_store='JAVA',uselib='JAVA')
+ except Exception:
+ pass
+ else:
+ break
+ else:
+ conf.fatal('could not find lib jvm in %r (see config.log)'%libDirs)
diff --git a/waflib/Tools/kde4.py b/waflib/Tools/kde4.py
new file mode 100644
index 0000000..cd51f5f
--- /dev/null
+++ b/waflib/Tools/kde4.py
@@ -0,0 +1,48 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import os,sys,re
+from waflib import Options,TaskGen,Task,Utils
+from waflib.TaskGen import feature,after_method
+ at feature('msgfmt')
+def apply_msgfmt(self):
+ for lang in self.to_list(self.langs):
+ node=self.path.find_resource(lang+'.po')
+ task=self.create_task('msgfmt',node,node.change_ext('.mo'))
+ langname=lang.split('/')
+ langname=langname[-1]
+ inst=getattr(self,'install_path','${KDE4_LOCALE_INSTALL_DIR}')
+ self.bld.install_as(inst+os.sep+langname+os.sep+'LC_MESSAGES'+os.sep+getattr(self,'appname','set_your_appname')+'.mo',task.outputs[0],chmod=getattr(self,'chmod',Utils.O644))
+class msgfmt(Task.Task):
+ color='BLUE'
+ run_str='${MSGFMT} ${SRC} -o ${TGT}'
+def configure(self):
+ kdeconfig=self.find_program('kde4-config')
+ prefix=self.cmd_and_log('%s --prefix'%kdeconfig).strip()
+ fname='%s/share/apps/cmake/modules/KDELibsDependencies.cmake'%prefix
+ try:os.stat(fname)
+ except OSError:
+ fname='%s/share/kde4/apps/cmake/modules/KDELibsDependencies.cmake'%prefix
+ try:os.stat(fname)
+ except OSError:self.fatal('could not open %s'%fname)
+ try:
+ txt=Utils.readf(fname)
+ except(OSError,IOError):
+ self.fatal('could not read %s'%fname)
+ txt=txt.replace('\\\n','\n')
+ fu=re.compile('#(.*)\n')
+ txt=fu.sub('',txt)
+ setregexp=re.compile('([sS][eE][tT]\s*\()\s*([^\s]+)\s+\"([^"]+)\"\)')
+ found=setregexp.findall(txt)
+ for(_,key,val)in found:
+ self.env[key]=val
+ self.env['LIB_KDECORE']=['kdecore']
+ self.env['LIB_KDEUI']=['kdeui']
+ self.env['LIB_KIO']=['kio']
+ self.env['LIB_KHTML']=['khtml']
+ self.env['LIB_KPARTS']=['kparts']
+ self.env['LIBPATH_KDECORE']=[os.path.join(self.env.KDE4_LIB_INSTALL_DIR,'kde4','devel'),self.env.KDE4_LIB_INSTALL_DIR]
+ self.env['INCLUDES_KDECORE']=[self.env['KDE4_INCLUDE_INSTALL_DIR']]
+ self.env.append_value('INCLUDES_KDECORE',[self.env['KDE4_INCLUDE_INSTALL_DIR']+os.sep+'KDE'])
+ self.find_program('msgfmt',var='MSGFMT')
diff --git a/waflib/Tools/ldc2.py b/waflib/Tools/ldc2.py
new file mode 100644
index 0000000..25b99e5
--- /dev/null
+++ b/waflib/Tools/ldc2.py
@@ -0,0 +1,37 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import sys
+from waflib.Tools import ar,d
+from waflib.Configure import conf
+ at conf
+def find_ldc2(conf):
+ conf.find_program(['ldc2'],var='D')
+ out=conf.cmd_and_log([conf.env.D,'-version'])
+ if out.find("based on DMD v2.")==-1:
+ conf.fatal("detected compiler is not ldc2")
+ at conf
+def common_flags_ldc2(conf):
+ v=conf.env
+ v['D_SRC_F']=['-c']
+ v['D_TGT_F']='-of%s'
+ v['D_LINKER']=v['D']
+ v['DLNK_SRC_F']=''
+ v['DLNK_TGT_F']='-of%s'
+ v['DINC_ST']='-I%s'
+ v['DSHLIB_MARKER']=v['DSTLIB_MARKER']=''
+ v['DSTLIB_ST']=v['DSHLIB_ST']='-L-l%s'
+ v['DSTLIBPATH_ST']=v['DLIBPATH_ST']='-L-L%s'
+ v['LINKFLAGS_dshlib']=['-L-shared']
+ v['DHEADER_ext']='.di'
+ v['DFLAGS_d_with_header']=['-H','-Hf']
+ v['D_HDR_F']='%s'
+ v['LINKFLAGS']=[]
+ v['DFLAGS_dshlib']=['-relocation-model=pic']
+def configure(conf):
+ conf.find_ldc2()
+ conf.load('ar')
+ conf.load('d')
+ conf.common_flags_ldc2()
+ conf.d_platform_flags()
diff --git a/waflib/Tools/lua.py b/waflib/Tools/lua.py
new file mode 100644
index 0000000..a0a35fc
--- /dev/null
+++ b/waflib/Tools/lua.py
@@ -0,0 +1,18 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+from waflib.TaskGen import extension
+from waflib import Task,Utils
+ at extension('.lua')
+def add_lua(self,node):
+ tsk=self.create_task('luac',node,node.change_ext('.luac'))
+ inst_to=getattr(self,'install_path',self.env.LUADIR and'${LUADIR}'or None)
+ if inst_to:
+ self.bld.install_files(inst_to,tsk.outputs)
+ return tsk
+class luac(Task.Task):
+ run_str='${LUAC} -s -o ${TGT} ${SRC}'
+ color='PINK'
+def configure(conf):
+ conf.find_program('luac',var='LUAC')
diff --git a/waflib/Tools/msvc.py b/waflib/Tools/msvc.py
new file mode 100644
index 0000000..289cd47
--- /dev/null
+++ b/waflib/Tools/msvc.py
@@ -0,0 +1,726 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import os,sys,re,tempfile
+from waflib import Utils,Task,Logs,Options
+from waflib.Logs import debug,warn
+from waflib.TaskGen import after_method,feature
+from waflib.Configure import conf
+from waflib.Tools import ccroot,c,cxx,ar,winres
+g_msvc_systemlibs='''
+aclui activeds ad1 adptif adsiid advapi32 asycfilt authz bhsupp bits bufferoverflowu cabinet
+cap certadm certidl ciuuid clusapi comctl32 comdlg32 comsupp comsuppd comsuppw comsuppwd comsvcs
+credui crypt32 cryptnet cryptui d3d8thk daouuid dbgeng dbghelp dciman32 ddao35 ddao35d
+ddao35u ddao35ud delayimp dhcpcsvc dhcpsapi dlcapi dnsapi dsprop dsuiext dtchelp
+faultrep fcachdll fci fdi framedyd framedyn gdi32 gdiplus glauxglu32 gpedit gpmuuid
+gtrts32w gtrtst32hlink htmlhelp httpapi icm32 icmui imagehlp imm32 iphlpapi iprop
+kernel32 ksguid ksproxy ksuser libcmt libcmtd libcpmt libcpmtd loadperf lz32 mapi
+mapi32 mgmtapi minidump mmc mobsync mpr mprapi mqoa mqrt msacm32 mscms mscoree
+msdasc msimg32 msrating mstask msvcmrt msvcurt msvcurtd mswsock msxml2 mtx mtxdm
+netapi32 nmapinmsupp npptools ntdsapi ntdsbcli ntmsapi ntquery odbc32 odbcbcp
+odbccp32 oldnames ole32 oleacc oleaut32 oledb oledlgolepro32 opends60 opengl32
+osptk parser pdh penter pgobootrun pgort powrprof psapi ptrustm ptrustmd ptrustu
+ptrustud qosname rasapi32 rasdlg rassapi resutils riched20 rpcndr rpcns4 rpcrt4 rtm
+rtutils runtmchk scarddlg scrnsave scrnsavw secur32 sensapi setupapi sfc shell32
+shfolder shlwapi sisbkup snmpapi sporder srclient sti strsafe svcguid tapi32 thunk32
+traffic unicows url urlmon user32 userenv usp10 uuid uxtheme vcomp vcompd vdmdbg
+version vfw32 wbemuuid webpost wiaguid wininet winmm winscard winspool winstrm
+wintrust wldap32 wmiutils wow32 ws2_32 wsnmp32 wsock32 wst wtsapi32 xaswitch xolehlp
+'''.split()
+all_msvc_platforms=[('x64','amd64'),('x86','x86'),('ia64','ia64'),('x86_amd64','amd64'),('x86_ia64','ia64')]
+all_wince_platforms=[('armv4','arm'),('armv4i','arm'),('mipsii','mips'),('mipsii_fp','mips'),('mipsiv','mips'),('mipsiv_fp','mips'),('sh4','sh'),('x86','cex86')]
+all_icl_platforms=[('intel64','amd64'),('em64t','amd64'),('ia32','x86'),('Itanium','ia64')]
+def options(opt):
+ opt.add_option('--msvc_version',type='string',help='msvc version, eg: "msvc 10.0,msvc 9.0"',default='')
+ opt.add_option('--msvc_targets',type='string',help='msvc targets, eg: "x64,arm"',default='')
+def setup_msvc(conf,versions,arch=False):
+ platforms=getattr(Options.options,'msvc_targets','').split(',')
+ if platforms==['']:
+ platforms=Utils.to_list(conf.env['MSVC_TARGETS'])or[i for i,j in all_msvc_platforms+all_icl_platforms+all_wince_platforms]
+ desired_versions=getattr(Options.options,'msvc_version','').split(',')
+ if desired_versions==['']:
+ desired_versions=conf.env['MSVC_VERSIONS']or[v for v,_ in versions][::-1]
+ versiondict=dict(versions)
+ for version in desired_versions:
+ try:
+ targets=dict(versiondict[version])
+ for target in platforms:
+ try:
+ arch,(p1,p2,p3)=targets[target]
+ compiler,revision=version.rsplit(' ',1)
+ if arch:
+ return compiler,revision,p1,p2,p3,arch
+ else:
+ return compiler,revision,p1,p2,p3
+ except KeyError:continue
+ except KeyError:continue
+ conf.fatal('msvc: Impossible to find a valid architecture for building (in setup_msvc)')
+ at conf
+def get_msvc_version(conf,compiler,version,target,vcvars):
+ debug('msvc: get_msvc_version: %r %r %r',compiler,version,target)
+ batfile=conf.bldnode.make_node('waf-print-msvc.bat')
+ batfile.write("""@echo off
+set INCLUDE=
+set LIB=
+call "%s" %s
+echo PATH=%%PATH%%
+echo INCLUDE=%%INCLUDE%%
+echo LIB=%%LIB%%
+"""%(vcvars,target))
+ sout=conf.cmd_and_log(['cmd','/E:on','/V:on','/C',batfile.abspath()])
+ lines=sout.splitlines()
+ if not lines[0]:
+ lines.pop(0)
+ if version=='11.0':
+ if lines[0].startswith('Error'):
+ conf.fatal('msvc: Could not find a valid architecture for building (get_msvc_version_1)')
+ else:
+ for x in('Setting environment','Setting SDK environment','Intel(R) C++ Compiler','Intel Parallel Studio','Intel(R) Parallel Studio','Intel(R) Composer','Intel Corporation. All rights reserved.'):
+ if lines[0].find(x)>-1:
+ lines.pop(0)
+ break
+ else:
+ debug('msvc: get_msvc_version: %r %r %r -> not found',compiler,version,target)
+ conf.fatal('msvc: Could not find a valid architecture for building (get_msvc_version_2)')
+ MSVC_PATH=MSVC_INCDIR=MSVC_LIBDIR=None
+ for line in lines:
+ if line.startswith('PATH='):
+ path=line[5:]
+ MSVC_PATH=path.split(';')
+ elif line.startswith('INCLUDE='):
+ MSVC_INCDIR=[i for i in line[8:].split(';')if i]
+ elif line.startswith('LIB='):
+ MSVC_LIBDIR=[i for i in line[4:].split(';')if i]
+ if None in(MSVC_PATH,MSVC_INCDIR,MSVC_LIBDIR):
+ conf.fatal('msvc: Could not find a valid architecture for building (get_msvc_version_3)')
+ env=dict(os.environ)
+ env.update(PATH=path)
+ compiler_name,linker_name,lib_name=_get_prog_names(conf,compiler)
+ cxx=conf.find_program(compiler_name,path_list=MSVC_PATH)
+ cxx=conf.cmd_to_list(cxx)
+ if'CL'in env:
+ del(env['CL'])
+ try:
+ try:
+ conf.cmd_and_log(cxx+['/help'],env=env)
+ except Exception ,e:
+ debug('msvc: get_msvc_version: %r %r %r -> failure'%(compiler,version,target))
+ debug(str(e))
+ conf.fatal('msvc: cannot run the compiler (in get_msvc_version)')
+ else:
+ debug('msvc: get_msvc_version: %r %r %r -> OK',compiler,version,target)
+ finally:
+ conf.env[compiler_name]=''
+ return(MSVC_PATH,MSVC_INCDIR,MSVC_LIBDIR)
+ at conf
+def gather_wsdk_versions(conf,versions):
+ version_pattern=re.compile('^v..?.?\...?.?')
+ try:
+ all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Microsoft\\Microsoft SDKs\\Windows')
+ except WindowsError:
+ try:
+ all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Microsoft\\Microsoft SDKs\\Windows')
+ except WindowsError:
+ return
+ index=0
+ while 1:
+ try:
+ version=Utils.winreg.EnumKey(all_versions,index)
+ except WindowsError:
+ break
+ index=index+1
+ if not version_pattern.match(version):
+ continue
+ try:
+ msvc_version=Utils.winreg.OpenKey(all_versions,version)
+ path,type=Utils.winreg.QueryValueEx(msvc_version,'InstallationFolder')
+ except WindowsError:
+ continue
+ if os.path.isfile(os.path.join(path,'bin','SetEnv.cmd')):
+ targets=[]
+ for target,arch in all_msvc_platforms:
+ try:
+ targets.append((target,(arch,conf.get_msvc_version('wsdk',version,'/'+target,os.path.join(path,'bin','SetEnv.cmd')))))
+ except conf.errors.ConfigurationError:
+ pass
+ versions.append(('wsdk '+version[1:],targets))
+def gather_wince_supported_platforms():
+ supported_wince_platforms=[]
+ try:
+ ce_sdk=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Microsoft\\Windows CE Tools\\SDKs')
+ except WindowsError:
+ try:
+ ce_sdk=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Microsoft\\Windows CE Tools\\SDKs')
+ except WindowsError:
+ ce_sdk=''
+ if not ce_sdk:
+ return supported_wince_platforms
+ ce_index=0
+ while 1:
+ try:
+ sdk_device=Utils.winreg.EnumKey(ce_sdk,ce_index)
+ except WindowsError:
+ break
+ ce_index=ce_index+1
+ sdk=Utils.winreg.OpenKey(ce_sdk,sdk_device)
+ try:
+ path,type=Utils.winreg.QueryValueEx(sdk,'SDKRootDir')
+ except WindowsError:
+ try:
+ path,type=Utils.winreg.QueryValueEx(sdk,'SDKInformation')
+ path,xml=os.path.split(path)
+ except WindowsError:
+ continue
+ path=str(path)
+ path,device=os.path.split(path)
+ if not device:
+ path,device=os.path.split(path)
+ for arch,compiler in all_wince_platforms:
+ platforms=[]
+ if os.path.isdir(os.path.join(path,device,'Lib',arch)):
+ platforms.append((arch,compiler,os.path.join(path,device,'Include',arch),os.path.join(path,device,'Lib',arch)))
+ if platforms:
+ supported_wince_platforms.append((device,platforms))
+ return supported_wince_platforms
+def gather_msvc_detected_versions():
+ version_pattern=re.compile('^(\d\d?\.\d\d?)(Exp)?$')
+ detected_versions=[]
+ for vcver,vcvar in[('VCExpress','Exp'),('VisualStudio','')]:
+ try:
+ prefix='SOFTWARE\\Wow6432node\\Microsoft\\'+vcver
+ all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,prefix)
+ except WindowsError:
+ try:
+ prefix='SOFTWARE\\Microsoft\\'+vcver
+ all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,prefix)
+ except WindowsError:
+ continue
+ index=0
+ while 1:
+ try:
+ version=Utils.winreg.EnumKey(all_versions,index)
+ except WindowsError:
+ break
+ index=index+1
+ match=version_pattern.match(version)
+ if not match:
+ continue
+ else:
+ versionnumber=float(match.group(1))
+ detected_versions.append((versionnumber,version+vcvar,prefix+"\\"+version))
+ def fun(tup):
+ return tup[0]
+ detected_versions.sort(key=fun)
+ return detected_versions
+ at conf
+def gather_msvc_targets(conf,versions,version,vc_path):
+ targets=[]
+ if os.path.isfile(os.path.join(vc_path,'vcvarsall.bat')):
+ for target,realtarget in all_msvc_platforms[::-1]:
+ try:
+ targets.append((target,(realtarget,conf.get_msvc_version('msvc',version,target,os.path.join(vc_path,'vcvarsall.bat')))))
+ except conf.errors.ConfigurationError:
+ pass
+ elif os.path.isfile(os.path.join(vc_path,'Common7','Tools','vsvars32.bat')):
+ try:
+ targets.append(('x86',('x86',conf.get_msvc_version('msvc',version,'x86',os.path.join(vc_path,'Common7','Tools','vsvars32.bat')))))
+ except conf.errors.ConfigurationError:
+ pass
+ elif os.path.isfile(os.path.join(vc_path,'Bin','vcvars32.bat')):
+ try:
+ targets.append(('x86',('x86',conf.get_msvc_version('msvc',version,'',os.path.join(vc_path,'Bin','vcvars32.bat')))))
+ except conf.errors.ConfigurationError:
+ pass
+ versions.append(('msvc '+version,targets))
+ at conf
+def gather_wince_targets(conf,versions,version,vc_path,vsvars,supported_platforms):
+ for device,platforms in supported_platforms:
+ cetargets=[]
+ for platform,compiler,include,lib in platforms:
+ winCEpath=os.path.join(vc_path,'ce')
+ if not os.path.isdir(winCEpath):
+ continue
+ try:
+ common_bindirs,_1,_2=conf.get_msvc_version('msvc',version,'x86',vsvars)
+ except conf.errors.ConfigurationError:
+ continue
+ if os.path.isdir(os.path.join(winCEpath,'lib',platform)):
+ bindirs=[os.path.join(winCEpath,'bin',compiler),os.path.join(winCEpath,'bin','x86_'+compiler)]+common_bindirs
+ incdirs=[os.path.join(winCEpath,'include'),os.path.join(winCEpath,'atlmfc','include'),include]
+ libdirs=[os.path.join(winCEpath,'lib',platform),os.path.join(winCEpath,'atlmfc','lib',platform),lib]
+ cetargets.append((platform,(platform,(bindirs,incdirs,libdirs))))
+ if cetargets:
+ versions.append((device+' '+version,cetargets))
+ at conf
+def gather_msvc_versions(conf,versions):
+ vc_paths=[]
+ for(v,version,reg)in gather_msvc_detected_versions():
+ try:
+ try:
+ msvc_version=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,reg+"\\Setup\\VC")
+ except WindowsError:
+ msvc_version=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,reg+"\\Setup\\Microsoft Visual C++")
+ path,type=Utils.winreg.QueryValueEx(msvc_version,'ProductDir')
+ vc_paths.append((version,os.path.abspath(str(path))))
+ except WindowsError:
+ continue
+ wince_supported_platforms=gather_wince_supported_platforms()
+ for version,vc_path in vc_paths:
+ vs_path=os.path.dirname(vc_path)
+ vsvars=os.path.join(vs_path,'Common7','Tools','vsvars32.bat')
+ if wince_supported_platforms and os.path.isfile(vsvars):
+ conf.gather_wince_targets(versions,version,vc_path,vsvars,wince_supported_platforms)
+ for version,vc_path in vc_paths:
+ vs_path=os.path.dirname(vc_path)
+ conf.gather_msvc_targets(versions,version,vc_path)
+ at conf
+def gather_icl_versions(conf,versions):
+ version_pattern=re.compile('^...?.?\....?.?')
+ try:
+ all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Intel\\Compilers\\C++')
+ except WindowsError:
+ try:
+ all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Intel\\Compilers\\C++')
+ except WindowsError:
+ return
+ index=0
+ while 1:
+ try:
+ version=Utils.winreg.EnumKey(all_versions,index)
+ except WindowsError:
+ break
+ index=index+1
+ if not version_pattern.match(version):
+ continue
+ targets=[]
+ for target,arch in all_icl_platforms:
+ try:
+ if target=='intel64':targetDir='EM64T_NATIVE'
+ else:targetDir=target
+ Utils.winreg.OpenKey(all_versions,version+'\\'+targetDir)
+ icl_version=Utils.winreg.OpenKey(all_versions,version)
+ path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir')
+ batch_file=os.path.join(path,'bin','iclvars.bat')
+ if os.path.isfile(batch_file):
+ try:
+ targets.append((target,(arch,conf.get_msvc_version('intel',version,target,batch_file))))
+ except conf.errors.ConfigurationError:
+ pass
+ except WindowsError:
+ pass
+ for target,arch in all_icl_platforms:
+ try:
+ icl_version=Utils.winreg.OpenKey(all_versions,version+'\\'+target)
+ path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir')
+ batch_file=os.path.join(path,'bin','iclvars.bat')
+ if os.path.isfile(batch_file):
+ try:
+ targets.append((target,(arch,conf.get_msvc_version('intel',version,target,batch_file))))
+ except conf.errors.ConfigurationError:
+ pass
+ except WindowsError:
+ continue
+ major=version[0:2]
+ versions.append(('intel '+major,targets))
+ at conf
+def gather_intel_composer_versions(conf,versions):
+ version_pattern=re.compile('^...?.?\...?.?.?')
+ try:
+ all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Intel\\Suites')
+ except WindowsError:
+ try:
+ all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Intel\\Suites')
+ except WindowsError:
+ return
+ index=0
+ while 1:
+ try:
+ version=Utils.winreg.EnumKey(all_versions,index)
+ except WindowsError:
+ break
+ index=index+1
+ if not version_pattern.match(version):
+ continue
+ targets=[]
+ for target,arch in all_icl_platforms:
+ try:
+ if target=='intel64':targetDir='EM64T_NATIVE'
+ else:targetDir=target
+ try:
+ defaults=Utils.winreg.OpenKey(all_versions,version+'\\Defaults\\C++\\'+targetDir)
+ except WindowsError:
+ if targetDir=='EM64T_NATIVE':
+ defaults=Utils.winreg.OpenKey(all_versions,version+'\\Defaults\\C++\\EM64T')
+ else:
+ raise WindowsError
+ uid,type=Utils.winreg.QueryValueEx(defaults,'SubKey')
+ Utils.winreg.OpenKey(all_versions,version+'\\'+uid+'\\C++\\'+targetDir)
+ icl_version=Utils.winreg.OpenKey(all_versions,version+'\\'+uid+'\\C++')
+ path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir')
+ batch_file=os.path.join(path,'bin','iclvars.bat')
+ if os.path.isfile(batch_file):
+ try:
+ targets.append((target,(arch,conf.get_msvc_version('intel',version,target,batch_file))))
+ except conf.errors.ConfigurationError ,e:
+ pass
+ compilervars_warning_attr='_compilervars_warning_key'
+ if version[0:2]=='13'and getattr(conf,compilervars_warning_attr,True):
+ setattr(conf,compilervars_warning_attr,False)
+ patch_url='http://software.intel.com/en-us/forums/topic/328487'
+ compilervars_arch=os.path.join(path,'bin','compilervars_arch.bat')
+ vs_express_path=os.environ['VS110COMNTOOLS']+r'..\IDE\VSWinExpress.exe'
+ dev_env_path=os.environ['VS110COMNTOOLS']+r'..\IDE\devenv.exe'
+ if(r'if exist "%VS110COMNTOOLS%..\IDE\VSWinExpress.exe"'in Utils.readf(compilervars_arch)and not os.path.exists(vs_express_path)and not os.path.exists(dev_env_path)):
+ Logs.warn(('The Intel compilervar_arch.bat only checks for one Visual Studio SKU ''(VSWinExpress.exe) but it does not seem to be installed at %r. ''The intel command line set up will fail to configure unless the file %r''is patched. See: %s')%(vs_express_path,compilervars_arch,patch_url))
+ except WindowsError:
+ pass
+ major=version[0:2]
+ versions.append(('intel '+major,targets))
+ at conf
+def get_msvc_versions(conf):
+ if not conf.env['MSVC_INSTALLED_VERSIONS']:
+ lst=[]
+ conf.gather_icl_versions(lst)
+ conf.gather_intel_composer_versions(lst)
+ conf.gather_wsdk_versions(lst)
+ conf.gather_msvc_versions(lst)
+ conf.env['MSVC_INSTALLED_VERSIONS']=lst
+ return conf.env['MSVC_INSTALLED_VERSIONS']
+ at conf
+def print_all_msvc_detected(conf):
+ for version,targets in conf.env['MSVC_INSTALLED_VERSIONS']:
+ Logs.info(version)
+ for target,l in targets:
+ Logs.info("\t"+target)
+ at conf
+def detect_msvc(conf,arch=False):
+ versions=get_msvc_versions(conf)
+ return setup_msvc(conf,versions,arch)
+ at conf
+def find_lt_names_msvc(self,libname,is_static=False):
+ lt_names=['lib%s.la'%libname,'%s.la'%libname,]
+ for path in self.env['LIBPATH']:
+ for la in lt_names:
+ laf=os.path.join(path,la)
+ dll=None
+ if os.path.exists(laf):
+ ltdict=Utils.read_la_file(laf)
+ lt_libdir=None
+ if ltdict.get('libdir',''):
+ lt_libdir=ltdict['libdir']
+ if not is_static and ltdict.get('library_names',''):
+ dllnames=ltdict['library_names'].split()
+ dll=dllnames[0].lower()
+ dll=re.sub('\.dll$','',dll)
+ return(lt_libdir,dll,False)
+ elif ltdict.get('old_library',''):
+ olib=ltdict['old_library']
+ if os.path.exists(os.path.join(path,olib)):
+ return(path,olib,True)
+ elif lt_libdir!=''and os.path.exists(os.path.join(lt_libdir,olib)):
+ return(lt_libdir,olib,True)
+ else:
+ return(None,olib,True)
+ else:
+ raise self.errors.WafError('invalid libtool object file: %s'%laf)
+ return(None,None,None)
+ at conf
+def libname_msvc(self,libname,is_static=False):
+ lib=libname.lower()
+ lib=re.sub('\.lib$','',lib)
+ if lib in g_msvc_systemlibs:
+ return lib
+ lib=re.sub('^lib','',lib)
+ if lib=='m':
+ return None
+ (lt_path,lt_libname,lt_static)=self.find_lt_names_msvc(lib,is_static)
+ if lt_path!=None and lt_libname!=None:
+ if lt_static==True:
+ return os.path.join(lt_path,lt_libname)
+ if lt_path!=None:
+ _libpaths=[lt_path]+self.env['LIBPATH']
+ else:
+ _libpaths=self.env['LIBPATH']
+ static_libs=['lib%ss.lib'%lib,'lib%s.lib'%lib,'%ss.lib'%lib,'%s.lib'%lib,]
+ dynamic_libs=['lib%s.dll.lib'%lib,'lib%s.dll.a'%lib,'%s.dll.lib'%lib,'%s.dll.a'%lib,'lib%s_d.lib'%lib,'%s_d.lib'%lib,'%s.lib'%lib,]
+ libnames=static_libs
+ if not is_static:
+ libnames=dynamic_libs+static_libs
+ for path in _libpaths:
+ for libn in libnames:
+ if os.path.exists(os.path.join(path,libn)):
+ debug('msvc: lib found: %s'%os.path.join(path,libn))
+ return re.sub('\.lib$','',libn)
+ self.fatal("The library %r could not be found"%libname)
+ return re.sub('\.lib$','',libname)
+ at conf
+def check_lib_msvc(self,libname,is_static=False,uselib_store=None):
+ libn=self.libname_msvc(libname,is_static)
+ if not uselib_store:
+ uselib_store=libname.upper()
+ if False and is_static:
+ self.env['STLIB_'+uselib_store]=[libn]
+ else:
+ self.env['LIB_'+uselib_store]=[libn]
+ at conf
+def check_libs_msvc(self,libnames,is_static=False):
+ for libname in Utils.to_list(libnames):
+ self.check_lib_msvc(libname,is_static)
+def configure(conf):
+ conf.autodetect(True)
+ conf.find_msvc()
+ conf.msvc_common_flags()
+ conf.cc_load_tools()
+ conf.cxx_load_tools()
+ conf.cc_add_flags()
+ conf.cxx_add_flags()
+ conf.link_add_flags()
+ conf.visual_studio_add_flags()
+ at conf
+def no_autodetect(conf):
+ conf.env.NO_MSVC_DETECT=1
+ configure(conf)
+ at conf
+def autodetect(conf,arch=False):
+ v=conf.env
+ if v.NO_MSVC_DETECT:
+ return
+ if arch:
+ compiler,version,path,includes,libdirs,arch=conf.detect_msvc(True)
+ v['DEST_CPU']=arch
+ else:
+ compiler,version,path,includes,libdirs=conf.detect_msvc()
+ v['PATH']=path
+ v['INCLUDES']=includes
+ v['LIBPATH']=libdirs
+ v['MSVC_COMPILER']=compiler
+ try:
+ v['MSVC_VERSION']=float(version)
+ except Exception:
+ v['MSVC_VERSION']=float(version[:-3])
+def _get_prog_names(conf,compiler):
+ if compiler=='intel':
+ compiler_name='ICL'
+ linker_name='XILINK'
+ lib_name='XILIB'
+ else:
+ compiler_name='CL'
+ linker_name='LINK'
+ lib_name='LIB'
+ return compiler_name,linker_name,lib_name
+ at conf
+def find_msvc(conf):
+ if sys.platform=='cygwin':
+ conf.fatal('MSVC module does not work under cygwin Python!')
+ v=conf.env
+ path=v['PATH']
+ compiler=v['MSVC_COMPILER']
+ version=v['MSVC_VERSION']
+ compiler_name,linker_name,lib_name=_get_prog_names(conf,compiler)
+ v.MSVC_MANIFEST=(compiler=='msvc'and version>=8)or(compiler=='wsdk'and version>=6)or(compiler=='intel'and version>=11)
+ cxx=None
+ if v['CXX']:cxx=v['CXX']
+ elif'CXX'in conf.environ:cxx=conf.environ['CXX']
+ cxx=conf.find_program(compiler_name,var='CXX',path_list=path)
+ cxx=conf.cmd_to_list(cxx)
+ env=dict(conf.environ)
+ if path:env.update(PATH=';'.join(path))
+ if not conf.cmd_and_log(cxx+['/nologo','/help'],env=env):
+ conf.fatal('the msvc compiler could not be identified')
+ v['CC']=v['CXX']=cxx
+ v['CC_NAME']=v['CXX_NAME']='msvc'
+ if not v['LINK_CXX']:
+ link=conf.find_program(linker_name,path_list=path)
+ if link:v['LINK_CXX']=link
+ else:conf.fatal('%s was not found (linker)'%linker_name)
+ v['LINK']=link
+ if not v['LINK_CC']:
+ v['LINK_CC']=v['LINK_CXX']
+ if not v['AR']:
+ stliblink=conf.find_program(lib_name,path_list=path,var='AR')
+ if not stliblink:return
+ v['ARFLAGS']=['/NOLOGO']
+ if v.MSVC_MANIFEST:
+ conf.find_program('MT',path_list=path,var='MT')
+ v['MTFLAGS']=['/NOLOGO']
+ conf.load('winres')
+ if not conf.env['WINRC']:
+ warn('Resource compiler not found. Compiling resource file is disabled')
+ at conf
+def visual_studio_add_flags(self):
+ v=self.env
+ try:v.prepend_value('INCLUDES',[x for x in self.environ['INCLUDE'].split(';')if x])
+ except Exception:pass
+ try:v.prepend_value('LIBPATH',[x for x in self.environ['LIB'].split(';')if x])
+ except Exception:pass
+ at conf
+def msvc_common_flags(conf):
+ v=conf.env
+ v['DEST_BINFMT']='pe'
+ v.append_value('CFLAGS',['/nologo'])
+ v.append_value('CXXFLAGS',['/nologo'])
+ v['DEFINES_ST']='/D%s'
+ v['CC_SRC_F']=''
+ v['CC_TGT_F']=['/c','/Fo']
+ if v['MSVC_VERSION']>=8:
+ v['CC_TGT_F']=['/FC']+v['CC_TGT_F']
+ v['CXX_SRC_F']=''
+ v['CXX_TGT_F']=['/c','/Fo']
+ if v['MSVC_VERSION']>=8:
+ v['CXX_TGT_F']=['/FC']+v['CXX_TGT_F']
+ v['CPPPATH_ST']='/I%s'
+ v['AR_TGT_F']=v['CCLNK_TGT_F']=v['CXXLNK_TGT_F']='/OUT:'
+ v['CFLAGS_CONSOLE']=v['CXXFLAGS_CONSOLE']=['/SUBSYSTEM:CONSOLE']
+ v['CFLAGS_NATIVE']=v['CXXFLAGS_NATIVE']=['/SUBSYSTEM:NATIVE']
+ v['CFLAGS_POSIX']=v['CXXFLAGS_POSIX']=['/SUBSYSTEM:POSIX']
+ v['CFLAGS_WINDOWS']=v['CXXFLAGS_WINDOWS']=['/SUBSYSTEM:WINDOWS']
+ v['CFLAGS_WINDOWSCE']=v['CXXFLAGS_WINDOWSCE']=['/SUBSYSTEM:WINDOWSCE']
+ v['CFLAGS_CRT_MULTITHREADED']=v['CXXFLAGS_CRT_MULTITHREADED']=['/MT']
+ v['CFLAGS_CRT_MULTITHREADED_DLL']=v['CXXFLAGS_CRT_MULTITHREADED_DLL']=['/MD']
+ v['CFLAGS_CRT_MULTITHREADED_DBG']=v['CXXFLAGS_CRT_MULTITHREADED_DBG']=['/MTd']
+ v['CFLAGS_CRT_MULTITHREADED_DLL_DBG']=v['CXXFLAGS_CRT_MULTITHREADED_DLL_DBG']=['/MDd']
+ v['LIB_ST']='%s.lib'
+ v['LIBPATH_ST']='/LIBPATH:%s'
+ v['STLIB_ST']='%s.lib'
+ v['STLIBPATH_ST']='/LIBPATH:%s'
+ v.append_value('LINKFLAGS',['/NOLOGO'])
+ if v['MSVC_MANIFEST']:
+ v.append_value('LINKFLAGS',['/MANIFEST'])
+ v['CFLAGS_cshlib']=[]
+ v['CXXFLAGS_cxxshlib']=[]
+ v['LINKFLAGS_cshlib']=v['LINKFLAGS_cxxshlib']=['/DLL']
+ v['cshlib_PATTERN']=v['cxxshlib_PATTERN']='%s.dll'
+ v['implib_PATTERN']='%s.lib'
+ v['IMPLIB_ST']='/IMPLIB:%s'
+ v['LINKFLAGS_cstlib']=[]
+ v['cstlib_PATTERN']=v['cxxstlib_PATTERN']='%s.lib'
+ v['cprogram_PATTERN']=v['cxxprogram_PATTERN']='%s.exe'
+ at after_method('apply_link')
+ at feature('c','cxx')
+def apply_flags_msvc(self):
+ if self.env.CC_NAME!='msvc'or not getattr(self,'link_task',None):
+ return
+ is_static=isinstance(self.link_task,ccroot.stlink_task)
+ subsystem=getattr(self,'subsystem','')
+ if subsystem:
+ subsystem='/subsystem:%s'%subsystem
+ flags=is_static and'ARFLAGS'or'LINKFLAGS'
+ self.env.append_value(flags,subsystem)
+ if not is_static:
+ for f in self.env.LINKFLAGS:
+ d=f.lower()
+ if d[1:]=='debug':
+ pdbnode=self.link_task.outputs[0].change_ext('.pdb')
+ self.link_task.outputs.append(pdbnode)
+ try:
+ self.install_task.source.append(pdbnode)
+ except AttributeError:
+ pass
+ break
+ at feature('cprogram','cshlib','cxxprogram','cxxshlib')
+ at after_method('apply_link')
+def apply_manifest(self):
+ if self.env.CC_NAME=='msvc'and self.env.MSVC_MANIFEST and getattr(self,'link_task',None):
+ out_node=self.link_task.outputs[0]
+ man_node=out_node.parent.find_or_declare(out_node.name+'.manifest')
+ self.link_task.outputs.append(man_node)
+ self.link_task.do_manifest=True
+def exec_mf(self):
+ env=self.env
+ mtool=env['MT']
+ if not mtool:
+ return 0
+ self.do_manifest=False
+ outfile=self.outputs[0].abspath()
+ manifest=None
+ for out_node in self.outputs:
+ if out_node.name.endswith('.manifest'):
+ manifest=out_node.abspath()
+ break
+ if manifest is None:
+ return 0
+ mode=''
+ if'cprogram'in self.generator.features or'cxxprogram'in self.generator.features:
+ mode='1'
+ elif'cshlib'in self.generator.features or'cxxshlib'in self.generator.features:
+ mode='2'
+ debug('msvc: embedding manifest in mode %r'%mode)
+ lst=[]
+ lst.append(env['MT'])
+ lst.extend(Utils.to_list(env['MTFLAGS']))
+ lst.extend(['-manifest',manifest])
+ lst.append('-outputresource:%s;%s'%(outfile,mode))
+ lst=[lst]
+ return self.exec_command(*lst)
+def quote_response_command(self,flag):
+ if flag.find(' ')>-1:
+ for x in('/LIBPATH:','/IMPLIB:','/OUT:','/I'):
+ if flag.startswith(x):
+ flag='%s"%s"'%(x,flag[len(x):])
+ break
+ else:
+ flag='"%s"'%flag
+ return flag
+def exec_response_command(self,cmd,**kw):
+ try:
+ tmp=None
+ if sys.platform.startswith('win')and isinstance(cmd,list)and len(' '.join(cmd))>=8192:
+ program=cmd[0]
+ cmd=[self.quote_response_command(x)for x in cmd]
+ (fd,tmp)=tempfile.mkstemp()
+ os.write(fd,'\r\n'.join(i.replace('\\','\\\\')for i in cmd[1:]))
+ os.close(fd)
+ cmd=[program,'@'+tmp]
+ ret=self.generator.bld.exec_command(cmd,**kw)
+ finally:
+ if tmp:
+ try:
+ os.remove(tmp)
+ except OSError:
+ pass
+ return ret
+def exec_command_msvc(self,*k,**kw):
+ assert self.env['CC_NAME']=='msvc'
+ if isinstance(k[0],list):
+ lst=[]
+ carry=''
+ for a in k[0]:
+ if a=='/Fo'or a=='/doc'or a[-1]==':':
+ carry=a
+ else:
+ lst.append(carry+a)
+ carry=''
+ k=[lst]
+ if self.env['PATH']:
+ env=dict(self.env.env or os.environ)
+ env.update(PATH=';'.join(self.env['PATH']))
+ kw['env']=env
+ bld=self.generator.bld
+ try:
+ if not kw.get('cwd',None):
+ kw['cwd']=bld.cwd
+ except AttributeError:
+ bld.cwd=kw['cwd']=bld.variant_dir
+ ret=self.exec_response_command(k[0],**kw)
+ if not ret and getattr(self,'do_manifest',None):
+ ret=self.exec_mf()
+ return ret
+def wrap_class(class_name):
+ cls=Task.classes.get(class_name,None)
+ if not cls:
+ return None
+ derived_class=type(class_name,(cls,),{})
+ def exec_command(self,*k,**kw):
+ if self.env['CC_NAME']=='msvc':
+ return self.exec_command_msvc(*k,**kw)
+ else:
+ return super(derived_class,self).exec_command(*k,**kw)
+ derived_class.exec_command=exec_command
+ derived_class.exec_response_command=exec_response_command
+ derived_class.quote_response_command=quote_response_command
+ derived_class.exec_command_msvc=exec_command_msvc
+ derived_class.exec_mf=exec_mf
+ return derived_class
+for k in'c cxx cprogram cxxprogram cshlib cxxshlib cstlib cxxstlib'.split():
+ wrap_class(k)
diff --git a/waflib/Tools/nasm.py b/waflib/Tools/nasm.py
new file mode 100644
index 0000000..e3126dd
--- /dev/null
+++ b/waflib/Tools/nasm.py
@@ -0,0 +1,14 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import waflib.Tools.asm
+from waflib.TaskGen import feature
+ at feature('asm')
+def apply_nasm_vars(self):
+ self.env.append_value('ASFLAGS',self.to_list(getattr(self,'nasm_flags',[])))
+def configure(conf):
+ nasm=conf.find_program(['nasm','yasm'],var='AS')
+ conf.env.AS_TGT_F=['-o']
+ conf.env.ASLNK_TGT_F=['-o']
+ conf.load('asm')
diff --git a/waflib/Tools/perl.py b/waflib/Tools/perl.py
new file mode 100644
index 0000000..8b6c2f8
--- /dev/null
+++ b/waflib/Tools/perl.py
@@ -0,0 +1,80 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import os
+from waflib import Task,Options,Utils
+from waflib.Configure import conf
+from waflib.TaskGen import extension,feature,before_method
+ at before_method('apply_incpaths','apply_link','propagate_uselib_vars')
+ at feature('perlext')
+def init_perlext(self):
+ self.uselib=self.to_list(getattr(self,'uselib',[]))
+ if not'PERLEXT'in self.uselib:self.uselib.append('PERLEXT')
+ self.env['cshlib_PATTERN']=self.env['cxxshlib_PATTERN']=self.env['perlext_PATTERN']
+ at extension('.xs')
+def xsubpp_file(self,node):
+ outnode=node.change_ext('.c')
+ self.create_task('xsubpp',node,outnode)
+ self.source.append(outnode)
+class xsubpp(Task.Task):
+ run_str='${PERL} ${XSUBPP} -noprototypes -typemap ${EXTUTILS_TYPEMAP} ${SRC} > ${TGT}'
+ color='BLUE'
+ ext_out=['.h']
+ at conf
+def check_perl_version(self,minver=None):
+ res=True
+ if minver:
+ cver='.'.join(map(str,minver))
+ else:
+ cver=''
+ self.start_msg('Checking for minimum perl version %s'%cver)
+ perl=getattr(Options.options,'perlbinary',None)
+ if not perl:
+ perl=self.find_program('perl',var='PERL')
+ if not perl:
+ self.end_msg("Perl not found",color="YELLOW")
+ return False
+ self.env['PERL']=perl
+ version=self.cmd_and_log([perl,"-e",'printf \"%vd\", $^V'])
+ if not version:
+ res=False
+ version="Unknown"
+ elif not minver is None:
+ ver=tuple(map(int,version.split(".")))
+ if ver<minver:
+ res=False
+ self.end_msg(version,color=res and"GREEN"or"YELLOW")
+ return res
+ at conf
+def check_perl_module(self,module):
+ cmd=[self.env['PERL'],'-e','use %s'%module]
+ self.start_msg('perl module %s'%module)
+ try:
+ r=self.cmd_and_log(cmd)
+ except Exception:
+ self.end_msg(False)
+ return None
+ self.end_msg(r or True)
+ return r
+ at conf
+def check_perl_ext_devel(self):
+ env=self.env
+ perl=env.PERL
+ if not perl:
+ self.fatal('find perl first')
+ def read_out(cmd):
+ return Utils.to_list(self.cmd_and_log(perl+cmd))
+ env['LINKFLAGS_PERLEXT']=read_out(" -MConfig -e'print $Config{lddlflags}'")
+ env['INCLUDES_PERLEXT']=read_out(" -MConfig -e'print \"$Config{archlib}/CORE\"'")
+ env['CFLAGS_PERLEXT']=read_out(" -MConfig -e'print \"$Config{ccflags} $Config{cccdlflags}\"'")
+ env['XSUBPP']=read_out(" -MConfig -e'print \"$Config{privlib}/ExtUtils/xsubpp$Config{exe_ext}\"'")
+ env['EXTUTILS_TYPEMAP']=read_out(" -MConfig -e'print \"$Config{privlib}/ExtUtils/typemap\"'")
+ if not getattr(Options.options,'perlarchdir',None):
+ env['ARCHDIR_PERL']=self.cmd_and_log(perl+" -MConfig -e'print $Config{sitearch}'")
+ else:
+ env['ARCHDIR_PERL']=getattr(Options.options,'perlarchdir')
+ env['perlext_PATTERN']='%s.'+self.cmd_and_log(perl+" -MConfig -e'print $Config{dlext}'")
+def options(opt):
+ opt.add_option('--with-perl-binary',type='string',dest='perlbinary',help='Specify alternate perl binary',default=None)
+ opt.add_option('--with-perl-archdir',type='string',dest='perlarchdir',help='Specify directory where to install arch specific files',default=None)
diff --git a/waflib/Tools/python.py b/waflib/Tools/python.py
new file mode 100644
index 0000000..4bb911e
--- /dev/null
+++ b/waflib/Tools/python.py
@@ -0,0 +1,340 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import os,sys
+from waflib import Utils,Options,Errors,Logs
+from waflib.TaskGen import extension,before_method,after_method,feature
+from waflib.Configure import conf
+FRAG='''
+#include <Python.h>
+#ifdef __cplusplus
+extern "C" {
+#endif
+ void Py_Initialize(void);
+ void Py_Finalize(void);
+#ifdef __cplusplus
+}
+#endif
+int main(int argc, char **argv)
+{
+ (void)argc; (void)argv;
+ Py_Initialize();
+ Py_Finalize();
+ return 0;
+}
+'''
+INST='''
+import sys, py_compile
+py_compile.compile(sys.argv[1], sys.argv[2], sys.argv[3])
+'''
+DISTUTILS_IMP=['from distutils.sysconfig import get_config_var, get_python_lib']
+ at extension('.py')
+def process_py(self,node):
+ try:
+ if not self.bld.is_install:
+ return
+ except AttributeError:
+ return
+ try:
+ if not self.install_path:
+ return
+ except AttributeError:
+ self.install_path='${PYTHONDIR}'
+ def inst_py(ctx):
+ install_from=getattr(self,'install_from',None)
+ if install_from:
+ install_from=self.path.find_dir(install_from)
+ install_pyfile(self,node,install_from)
+ self.bld.add_post_fun(inst_py)
+def install_pyfile(self,node,install_from=None):
+ from_node=install_from or node.parent
+ tsk=self.bld.install_as(self.install_path+'/'+node.path_from(from_node),node,postpone=False)
+ path=tsk.get_install_path()
+ if self.bld.is_install<0:
+ Logs.info("+ removing byte compiled python files")
+ for x in'co':
+ try:
+ os.remove(path+x)
+ except OSError:
+ pass
+ if self.bld.is_install>0:
+ try:
+ st1=os.stat(path)
+ except OSError:
+ Logs.error('The python file is missing, this should not happen')
+ for x in['c','o']:
+ do_inst=self.env['PY'+x.upper()]
+ try:
+ st2=os.stat(path+x)
+ except OSError:
+ pass
+ else:
+ if st1.st_mtime<=st2.st_mtime:
+ do_inst=False
+ if do_inst:
+ lst=(x=='o')and[self.env['PYFLAGS_OPT']]or[]
+ (a,b,c)=(path,path+x,tsk.get_install_path(destdir=False)+x)
+ argv=self.env['PYTHON']+lst+['-c',INST,a,b,c]
+ Logs.info('+ byte compiling %r'%(path+x))
+ env=self.env.env or None
+ ret=Utils.subprocess.Popen(argv,env=env).wait()
+ if ret:
+ raise Errors.WafError('py%s compilation failed %r'%(x,path))
+ at feature('py')
+def feature_py(self):
+ pass
+ at feature('pyext')
+ at before_method('propagate_uselib_vars','apply_link')
+ at after_method('apply_bundle')
+def init_pyext(self):
+ self.uselib=self.to_list(getattr(self,'uselib',[]))
+ if not'PYEXT'in self.uselib:
+ self.uselib.append('PYEXT')
+ self.env.cshlib_PATTERN=self.env.cxxshlib_PATTERN=self.env.macbundle_PATTERN=self.env.pyext_PATTERN
+ self.env.fcshlib_PATTERN=self.env.dshlib_PATTERN=self.env.pyext_PATTERN
+ try:
+ if not self.install_path:
+ return
+ except AttributeError:
+ self.install_path='${PYTHONARCHDIR}'
+ at feature('pyext')
+ at before_method('apply_link','apply_bundle')
+def set_bundle(self):
+ if Utils.unversioned_sys_platform()=='darwin':
+ self.mac_bundle=True
+ at before_method('propagate_uselib_vars')
+ at feature('pyembed')
+def init_pyembed(self):
+ self.uselib=self.to_list(getattr(self,'uselib',[]))
+ if not'PYEMBED'in self.uselib:
+ self.uselib.append('PYEMBED')
+ at conf
+def get_python_variables(self,variables,imports=None):
+ if not imports:
+ try:
+ imports=self.python_imports
+ except AttributeError:
+ imports=DISTUTILS_IMP
+ program=list(imports)
+ program.append('')
+ for v in variables:
+ program.append("print(repr(%s))"%v)
+ os_env=dict(os.environ)
+ try:
+ del os_env['MACOSX_DEPLOYMENT_TARGET']
+ except KeyError:
+ pass
+ try:
+ out=self.cmd_and_log(self.env.PYTHON+['-c','\n'.join(program)],env=os_env)
+ except Errors.WafError:
+ self.fatal('The distutils module is unusable: install "python-devel"?')
+ self.to_log(out)
+ return_values=[]
+ for s in out.split('\n'):
+ s=s.strip()
+ if not s:
+ continue
+ if s=='None':
+ return_values.append(None)
+ elif(s[0]=="'"and s[-1]=="'")or(s[0]=='"'and s[-1]=='"'):
+ return_values.append(eval(s))
+ elif s[0].isdigit():
+ return_values.append(int(s))
+ else:break
+ return return_values
+ at conf
+def check_python_headers(conf):
+ env=conf.env
+ if not env['CC_NAME']and not env['CXX_NAME']:
+ conf.fatal('load a compiler first (gcc, g++, ..)')
+ if not env['PYTHON_VERSION']:
+ conf.check_python_version()
+ pybin=conf.env.PYTHON
+ if not pybin:
+ conf.fatal('Could not find the python executable')
+ v='prefix SO LDFLAGS LIBDIR LIBPL INCLUDEPY Py_ENABLE_SHARED MACOSX_DEPLOYMENT_TARGET LDSHARED CFLAGS'.split()
+ try:
+ lst=conf.get_python_variables(["get_config_var('%s') or ''"%x for x in v])
+ except RuntimeError:
+ conf.fatal("Python development headers not found (-v for details).")
+ vals=['%s = %r'%(x,y)for(x,y)in zip(v,lst)]
+ conf.to_log("Configuration returned from %r:\n%r\n"%(pybin,'\n'.join(vals)))
+ dct=dict(zip(v,lst))
+ x='MACOSX_DEPLOYMENT_TARGET'
+ if dct[x]:
+ conf.env[x]=conf.environ[x]=dct[x]
+ env['pyext_PATTERN']='%s'+dct['SO']
+ all_flags=dct['LDFLAGS']+' '+dct['CFLAGS']
+ conf.parse_flags(all_flags,'PYEMBED')
+ all_flags=dct['LDFLAGS']+' '+dct['LDSHARED']+' '+dct['CFLAGS']
+ conf.parse_flags(all_flags,'PYEXT')
+ result=None
+ for name in('python'+env['PYTHON_VERSION'],'python'+env['PYTHON_VERSION'].replace('.','')):
+ if not result and env['LIBPATH_PYEMBED']:
+ path=env['LIBPATH_PYEMBED']
+ conf.to_log("\n\n# Trying default LIBPATH_PYEMBED: %r\n"%path)
+ result=conf.check(lib=name,uselib='PYEMBED',libpath=path,mandatory=False,msg='Checking for library %s in LIBPATH_PYEMBED'%name)
+ if not result and dct['LIBDIR']:
+ path=[dct['LIBDIR']]
+ conf.to_log("\n\n# try again with -L$python_LIBDIR: %r\n"%path)
+ result=conf.check(lib=name,uselib='PYEMBED',libpath=path,mandatory=False,msg='Checking for library %s in LIBDIR'%name)
+ if not result and dct['LIBPL']:
+ path=[dct['LIBPL']]
+ conf.to_log("\n\n# try again with -L$python_LIBPL (some systems don't install the python library in $prefix/lib)\n")
+ result=conf.check(lib=name,uselib='PYEMBED',libpath=path,mandatory=False,msg='Checking for library %s in python_LIBPL'%name)
+ if not result:
+ path=[os.path.join(dct['prefix'],"libs")]
+ conf.to_log("\n\n# try again with -L$prefix/libs, and pythonXY name rather than pythonX.Y (win32)\n")
+ result=conf.check(lib=name,uselib='PYEMBED',libpath=path,mandatory=False,msg='Checking for library %s in $prefix/libs'%name)
+ if result:
+ break
+ if result:
+ env['LIBPATH_PYEMBED']=path
+ env.append_value('LIB_PYEMBED',[name])
+ else:
+ conf.to_log("\n\n### LIB NOT FOUND\n")
+ if(Utils.is_win32 or sys.platform.startswith('os2')or dct['Py_ENABLE_SHARED']):
+ env['LIBPATH_PYEXT']=env['LIBPATH_PYEMBED']
+ env['LIB_PYEXT']=env['LIB_PYEMBED']
+ num='.'.join(env['PYTHON_VERSION'].split('.')[:2])
+ conf.find_program([''.join(pybin)+'-config','python%s-config'%num,'python-config-%s'%num,'python%sm-config'%num],var='PYTHON_CONFIG',mandatory=False)
+ includes=[]
+ if conf.env.PYTHON_CONFIG:
+ for incstr in conf.cmd_and_log([conf.env.PYTHON_CONFIG,'--includes']).strip().split():
+ if(incstr.startswith('-I')or incstr.startswith('/I')):
+ incstr=incstr[2:]
+ if incstr not in includes:
+ includes.append(incstr)
+ conf.to_log("Include path for Python extensions (found via python-config --includes): %r\n"%(includes,))
+ env['INCLUDES_PYEXT']=includes
+ env['INCLUDES_PYEMBED']=includes
+ else:
+ conf.to_log("Include path for Python extensions ""(found via distutils module): %r\n"%(dct['INCLUDEPY'],))
+ env['INCLUDES_PYEXT']=[dct['INCLUDEPY']]
+ env['INCLUDES_PYEMBED']=[dct['INCLUDEPY']]
+ if env['CC_NAME']=='gcc':
+ env.append_value('CFLAGS_PYEMBED',['-fno-strict-aliasing'])
+ env.append_value('CFLAGS_PYEXT',['-fno-strict-aliasing'])
+ if env['CXX_NAME']=='gcc':
+ env.append_value('CXXFLAGS_PYEMBED',['-fno-strict-aliasing'])
+ env.append_value('CXXFLAGS_PYEXT',['-fno-strict-aliasing'])
+ if env.CC_NAME=="msvc":
+ from distutils.msvccompiler import MSVCCompiler
+ dist_compiler=MSVCCompiler()
+ dist_compiler.initialize()
+ env.append_value('CFLAGS_PYEXT',dist_compiler.compile_options)
+ env.append_value('CXXFLAGS_PYEXT',dist_compiler.compile_options)
+ env.append_value('LINKFLAGS_PYEXT',dist_compiler.ldflags_shared)
+ try:
+ conf.check(header_name='Python.h',define_name='HAVE_PYTHON_H',uselib='PYEMBED',fragment=FRAG,errmsg=':-(')
+ except conf.errors.ConfigurationError:
+ xx=conf.env.CXX_NAME and'cxx'or'c'
+ conf.check_cfg(msg='Asking python-config for the flags (pyembed)',path=conf.env.PYTHON_CONFIG,package='',uselib_store='PYEMBED',args=['--cflags','--libs','--ldflags'])
+ conf.check(header_name='Python.h',define_name='HAVE_PYTHON_H',msg='Getting pyembed flags from python-config',fragment=FRAG,errmsg='Could not build a python embedded interpreter',features='%s %sprogram pyembed'%(xx,xx))
+ conf.check_cfg(msg='Asking python-config for the flags (pyext)',path=conf.env.PYTHON_CONFIG,package='',uselib_store='PYEXT',args=['--cflags','--libs','--ldflags'])
+ conf.check(header_name='Python.h',define_name='HAVE_PYTHON_H',msg='Getting pyext flags from python-config',features='%s %sshlib pyext'%(xx,xx),fragment=FRAG,errmsg='Could not build python extensions')
+ at conf
+def check_python_version(conf,minver=None):
+ assert minver is None or isinstance(minver,tuple)
+ pybin=conf.env['PYTHON']
+ if not pybin:
+ conf.fatal('could not find the python executable')
+ cmd=pybin+['-c','import sys\nfor x in sys.version_info: print(str(x))']
+ Logs.debug('python: Running python command %r'%cmd)
+ lines=conf.cmd_and_log(cmd).split()
+ assert len(lines)==5,"found %i lines, expected 5: %r"%(len(lines),lines)
+ pyver_tuple=(int(lines[0]),int(lines[1]),int(lines[2]),lines[3],int(lines[4]))
+ result=(minver is None)or(pyver_tuple>=minver)
+ if result:
+ pyver='.'.join([str(x)for x in pyver_tuple[:2]])
+ conf.env['PYTHON_VERSION']=pyver
+ if'PYTHONDIR'in conf.environ:
+ pydir=conf.environ['PYTHONDIR']
+ else:
+ if Utils.is_win32:
+ (python_LIBDEST,pydir)=conf.get_python_variables(["get_config_var('LIBDEST') or ''","get_python_lib(standard_lib=0, prefix=%r) or ''"%conf.env['PREFIX']])
+ else:
+ python_LIBDEST=None
+ (pydir,)=conf.get_python_variables(["get_python_lib(standard_lib=0, prefix=%r) or ''"%conf.env['PREFIX']])
+ if python_LIBDEST is None:
+ if conf.env['LIBDIR']:
+ python_LIBDEST=os.path.join(conf.env['LIBDIR'],"python"+pyver)
+ else:
+ python_LIBDEST=os.path.join(conf.env['PREFIX'],"lib","python"+pyver)
+ if'PYTHONARCHDIR'in conf.environ:
+ pyarchdir=conf.environ['PYTHONARCHDIR']
+ else:
+ (pyarchdir,)=conf.get_python_variables(["get_python_lib(plat_specific=1, standard_lib=0, prefix=%r) or ''"%conf.env['PREFIX']])
+ if not pyarchdir:
+ pyarchdir=pydir
+ if hasattr(conf,'define'):
+ conf.define('PYTHONDIR',pydir)
+ conf.define('PYTHONARCHDIR',pyarchdir)
+ conf.env['PYTHONDIR']=pydir
+ conf.env['PYTHONARCHDIR']=pyarchdir
+ pyver_full='.'.join(map(str,pyver_tuple[:3]))
+ if minver is None:
+ conf.msg('Checking for python version',pyver_full)
+ else:
+ minver_str='.'.join(map(str,minver))
+ conf.msg('Checking for python version',pyver_tuple,">= %s"%(minver_str,)and'GREEN'or'YELLOW')
+ if not result:
+ conf.fatal('The python version is too old, expecting %r'%(minver,))
+PYTHON_MODULE_TEMPLATE='''
+import %s as current_module
+version = getattr(current_module, '__version__', None)
+if version is not None:
+ print(str(version))
+else:
+ print('unknown version')
+'''
+ at conf
+def check_python_module(conf,module_name,condition=''):
+ msg='Python module %s'%module_name
+ if condition:
+ msg='%s (%s)'%(msg,condition)
+ conf.start_msg(msg)
+ try:
+ ret=conf.cmd_and_log(conf.env['PYTHON']+['-c',PYTHON_MODULE_TEMPLATE%module_name])
+ except Exception:
+ conf.end_msg(False)
+ conf.fatal('Could not find the python module %r'%module_name)
+ ret=ret.strip()
+ if condition:
+ conf.end_msg(ret)
+ if ret=='unknown version':
+ conf.fatal('Could not check the %s version'%module_name)
+ from distutils.version import LooseVersion
+ def num(*k):
+ if isinstance(k[0],int):
+ return LooseVersion('.'.join([str(x)for x in k]))
+ else:
+ return LooseVersion(k[0])
+ d={'num':num,'ver':LooseVersion(ret)}
+ ev=eval(condition,{},d)
+ if not ev:
+ conf.fatal('The %s version does not satisfy the requirements'%module_name)
+ else:
+ if ret=='unknown version':
+ conf.end_msg(True)
+ else:
+ conf.end_msg(ret)
+def configure(conf):
+ try:
+ conf.find_program('python',var='PYTHON')
+ except conf.errors.ConfigurationError:
+ Logs.warn("could not find a python executable, setting to sys.executable '%s'"%sys.executable)
+ conf.env.PYTHON=sys.executable
+ if conf.env.PYTHON!=sys.executable:
+ Logs.warn("python executable %r differs from system %r"%(conf.env.PYTHON,sys.executable))
+ conf.env.PYTHON=conf.cmd_to_list(conf.env.PYTHON)
+ v=conf.env
+ v['PYCMD']='"import sys, py_compile;py_compile.compile(sys.argv[1], sys.argv[2])"'
+ v['PYFLAGS']=''
+ v['PYFLAGS_OPT']='-O'
+ v['PYC']=getattr(Options.options,'pyc',1)
+ v['PYO']=getattr(Options.options,'pyo',1)
+def options(opt):
+ opt.add_option('--nopyc',action='store_false',default=1,help='Do not install bytecode compiled .pyc files (configuration) [Default:install]',dest='pyc')
+ opt.add_option('--nopyo',action='store_false',default=1,help='Do not install optimised compiled .pyo files (configuration) [Default:install]',dest='pyo')
diff --git a/waflib/Tools/qt4.py b/waflib/Tools/qt4.py
new file mode 100644
index 0000000..4300a57
--- /dev/null
+++ b/waflib/Tools/qt4.py
@@ -0,0 +1,437 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+try:
+ from xml.sax import make_parser
+ from xml.sax.handler import ContentHandler
+except ImportError:
+ has_xml=False
+ ContentHandler=object
+else:
+ has_xml=True
+import os,sys
+from waflib.Tools import c_preproc,cxx
+from waflib import Task,Utils,Options,Errors
+from waflib.TaskGen import feature,after_method,extension
+from waflib.Configure import conf
+from waflib import Logs
+MOC_H=['.h','.hpp','.hxx','.hh']
+EXT_RCC=['.qrc']
+EXT_UI=['.ui']
+EXT_QT4=['.cpp','.cc','.cxx','.C']
+QT4_LIBS="QtCore QtGui QtUiTools QtNetwork QtOpenGL QtSql QtSvg QtTest QtXml QtXmlPatterns QtWebKit Qt3Support QtHelp QtScript QtDeclarative"
+class qxx(cxx.cxx):
+ def __init__(self,*k,**kw):
+ Task.Task.__init__(self,*k,**kw)
+ self.moc_done=0
+ def scan(self):
+ (nodes,names)=c_preproc.scan(self)
+ for x in nodes:
+ if x.name.endswith('.moc'):
+ nodes.remove(x)
+ names.append(x.path_from(self.inputs[0].parent.get_bld()))
+ return(nodes,names)
+ def runnable_status(self):
+ if self.moc_done:
+ return Task.Task.runnable_status(self)
+ else:
+ for t in self.run_after:
+ if not t.hasrun:
+ return Task.ASK_LATER
+ self.add_moc_tasks()
+ return Task.Task.runnable_status(self)
+ def add_moc_tasks(self):
+ node=self.inputs[0]
+ bld=self.generator.bld
+ try:
+ self.signature()
+ except KeyError:
+ pass
+ else:
+ delattr(self,'cache_sig')
+ moctasks=[]
+ mocfiles=[]
+ try:
+ tmp_lst=bld.raw_deps[self.uid()]
+ bld.raw_deps[self.uid()]=[]
+ except KeyError:
+ tmp_lst=[]
+ for d in tmp_lst:
+ if not d.endswith('.moc'):
+ continue
+ if d in mocfiles:
+ Logs.error("paranoia owns")
+ continue
+ mocfiles.append(d)
+ h_node=None
+ try:ext=Options.options.qt_header_ext.split()
+ except AttributeError:pass
+ if not ext:ext=MOC_H
+ base2=d[:-4]
+ for x in[node.parent]+self.generator.includes_nodes:
+ for e in ext:
+ h_node=x.find_node(base2+e)
+ if h_node:
+ break
+ if h_node:
+ m_node=h_node.change_ext('.moc')
+ break
+ else:
+ for k in EXT_QT4:
+ if base2.endswith(k):
+ for x in[node.parent]+self.generator.includes_nodes:
+ h_node=x.find_node(base2)
+ if h_node:
+ break
+ if h_node:
+ m_node=h_node.change_ext(k+'.moc')
+ break
+ if not h_node:
+ raise Errors.WafError('no header found for %r which is a moc file'%d)
+ bld.node_deps[(self.inputs[0].parent.abspath(),m_node.name)]=h_node
+ task=Task.classes['moc'](env=self.env,generator=self.generator)
+ task.set_inputs(h_node)
+ task.set_outputs(m_node)
+ gen=bld.producer
+ gen.outstanding.insert(0,task)
+ gen.total+=1
+ moctasks.append(task)
+ tmp_lst=bld.raw_deps[self.uid()]=mocfiles
+ lst=bld.node_deps.get(self.uid(),())
+ for d in lst:
+ name=d.name
+ if name.endswith('.moc'):
+ task=Task.classes['moc'](env=self.env,generator=self.generator)
+ task.set_inputs(bld.node_deps[(self.inputs[0].parent.abspath(),name)])
+ task.set_outputs(d)
+ gen=bld.producer
+ gen.outstanding.insert(0,task)
+ gen.total+=1
+ moctasks.append(task)
+ self.run_after.update(set(moctasks))
+ self.moc_done=1
+ run=Task.classes['cxx'].__dict__['run']
+class trans_update(Task.Task):
+ run_str='${QT_LUPDATE} ${SRC} -ts ${TGT}'
+ color='BLUE'
+Task.update_outputs(trans_update)
+class XMLHandler(ContentHandler):
+ def __init__(self):
+ self.buf=[]
+ self.files=[]
+ def startElement(self,name,attrs):
+ if name=='file':
+ self.buf=[]
+ def endElement(self,name):
+ if name=='file':
+ self.files.append(str(''.join(self.buf)))
+ def characters(self,cars):
+ self.buf.append(cars)
+ at extension(*EXT_RCC)
+def create_rcc_task(self,node):
+ rcnode=node.change_ext('_rc.cpp')
+ rcctask=self.create_task('rcc',node,rcnode)
+ cpptask=self.create_task('cxx',rcnode,rcnode.change_ext('.o'))
+ try:
+ self.compiled_tasks.append(cpptask)
+ except AttributeError:
+ self.compiled_tasks=[cpptask]
+ return cpptask
+ at extension(*EXT_UI)
+def create_uic_task(self,node):
+ uictask=self.create_task('ui4',node)
+ uictask.outputs=[self.path.find_or_declare(self.env['ui_PATTERN']%node.name[:-3])]
+ at extension('.ts')
+def add_lang(self,node):
+ self.lang=self.to_list(getattr(self,'lang',[]))+[node]
+ at feature('qt4')
+ at after_method('apply_link')
+def apply_qt4(self):
+ if getattr(self,'lang',None):
+ qmtasks=[]
+ for x in self.to_list(self.lang):
+ if isinstance(x,str):
+ x=self.path.find_resource(x+'.ts')
+ qmtasks.append(self.create_task('ts2qm',x,x.change_ext('.qm')))
+ if getattr(self,'update',None)and Options.options.trans_qt4:
+ cxxnodes=[a.inputs[0]for a in self.compiled_tasks]+[a.inputs[0]for a in self.tasks if getattr(a,'inputs',None)and a.inputs[0].name.endswith('.ui')]
+ for x in qmtasks:
+ self.create_task('trans_update',cxxnodes,x.inputs)
+ if getattr(self,'langname',None):
+ qmnodes=[x.outputs[0]for x in qmtasks]
+ rcnode=self.langname
+ if isinstance(rcnode,str):
+ rcnode=self.path.find_or_declare(rcnode+'.qrc')
+ t=self.create_task('qm2rcc',qmnodes,rcnode)
+ k=create_rcc_task(self,t.outputs[0])
+ self.link_task.inputs.append(k.outputs[0])
+ lst=[]
+ for flag in self.to_list(self.env['CXXFLAGS']):
+ if len(flag)<2:continue
+ f=flag[0:2]
+ if f in['-D','-I','/D','/I']:
+ if(f[0]=='/'):
+ lst.append('-'+flag[1:])
+ else:
+ lst.append(flag)
+ self.env['MOC_FLAGS']=lst
+ at extension(*EXT_QT4)
+def cxx_hook(self,node):
+ return self.create_compiled_task('qxx',node)
+class rcc(Task.Task):
+ color='BLUE'
+ run_str='${QT_RCC} -name ${SRC[0].name} ${SRC[0].abspath()} ${RCC_ST} -o ${TGT}'
+ ext_out=['.h']
+ def scan(self):
+ node=self.inputs[0]
+ if not has_xml:
+ Logs.error('no xml support was found, the rcc dependencies will be incomplete!')
+ return([],[])
+ parser=make_parser()
+ curHandler=XMLHandler()
+ parser.setContentHandler(curHandler)
+ fi=open(self.inputs[0].abspath(),'r')
+ try:
+ parser.parse(fi)
+ finally:
+ fi.close()
+ nodes=[]
+ names=[]
+ root=self.inputs[0].parent
+ for x in curHandler.files:
+ nd=root.find_resource(x)
+ if nd:nodes.append(nd)
+ else:names.append(x)
+ return(nodes,names)
+class moc(Task.Task):
+ color='BLUE'
+ run_str='${QT_MOC} ${MOC_FLAGS} ${MOCCPPPATH_ST:INCPATHS} ${MOCDEFINES_ST:DEFINES} ${SRC} ${MOC_ST} ${TGT}'
+class ui4(Task.Task):
+ color='BLUE'
+ run_str='${QT_UIC} ${SRC} -o ${TGT}'
+ ext_out=['.h']
+class ts2qm(Task.Task):
+ color='BLUE'
+ run_str='${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}'
+class qm2rcc(Task.Task):
+ color='BLUE'
+ after='ts2qm'
+ def run(self):
+ txt='\n'.join(['<file>%s</file>'%k.path_from(self.outputs[0].parent)for k in self.inputs])
+ code='<!DOCTYPE RCC><RCC version="1.0">\n<qresource>\n%s\n</qresource>\n</RCC>'%txt
+ self.outputs[0].write(code)
+def configure(self):
+ self.find_qt4_binaries()
+ self.set_qt4_libs_to_check()
+ self.find_qt4_libraries()
+ self.add_qt4_rpath()
+ self.simplify_qt4_libs()
+ at conf
+def find_qt4_binaries(self):
+ env=self.env
+ opt=Options.options
+ qtdir=getattr(opt,'qtdir','')
+ qtbin=getattr(opt,'qtbin','')
+ paths=[]
+ if qtdir:
+ qtbin=os.path.join(qtdir,'bin')
+ if not qtdir:
+ qtdir=os.environ.get('QT4_ROOT','')
+ qtbin=os.environ.get('QT4_BIN',None)or os.path.join(qtdir,'bin')
+ if qtbin:
+ paths=[qtbin]
+ if not qtdir:
+ paths=os.environ.get('PATH','').split(os.pathsep)
+ paths.append('/usr/share/qt4/bin/')
+ try:
+ lst=Utils.listdir('/usr/local/Trolltech/')
+ except OSError:
+ pass
+ else:
+ if lst:
+ lst.sort()
+ lst.reverse()
+ qtdir='/usr/local/Trolltech/%s/'%lst[0]
+ qtbin=os.path.join(qtdir,'bin')
+ paths.append(qtbin)
+ cand=None
+ prev_ver=['4','0','0']
+ for qmk in['qmake-qt4','qmake4','qmake']:
+ try:
+ qmake=self.find_program(qmk,path_list=paths)
+ except self.errors.ConfigurationError:
+ pass
+ else:
+ try:
+ version=self.cmd_and_log([qmake,'-query','QT_VERSION']).strip()
+ except self.errors.WafError:
+ pass
+ else:
+ if version:
+ new_ver=version.split('.')
+ if new_ver>prev_ver:
+ cand=qmake
+ prev_ver=new_ver
+ if cand:
+ self.env.QMAKE=cand
+ else:
+ self.fatal('Could not find qmake for qt4')
+ qtbin=self.cmd_and_log([self.env.QMAKE,'-query','QT_INSTALL_BINS']).strip()+os.sep
+ def find_bin(lst,var):
+ if var in env:
+ return
+ for f in lst:
+ try:
+ ret=self.find_program(f,path_list=paths)
+ except self.errors.ConfigurationError:
+ pass
+ else:
+ env[var]=ret
+ break
+ find_bin(['uic-qt3','uic3'],'QT_UIC3')
+ find_bin(['uic-qt4','uic'],'QT_UIC')
+ if not env['QT_UIC']:
+ self.fatal('cannot find the uic compiler for qt4')
+ try:
+ uicver=self.cmd_and_log(env['QT_UIC']+" -version 2>&1").strip()
+ except self.errors.ConfigurationError:
+ self.fatal('this uic compiler is for qt3, add uic for qt4 to your path')
+ uicver=uicver.replace('Qt User Interface Compiler ','').replace('User Interface Compiler for Qt','')
+ self.msg('Checking for uic version','%s'%uicver)
+ if uicver.find(' 3.')!=-1:
+ self.fatal('this uic compiler is for qt3, add uic for qt4 to your path')
+ find_bin(['moc-qt4','moc'],'QT_MOC')
+ find_bin(['rcc'],'QT_RCC')
+ find_bin(['lrelease-qt4','lrelease'],'QT_LRELEASE')
+ find_bin(['lupdate-qt4','lupdate'],'QT_LUPDATE')
+ env['UIC3_ST']='%s -o %s'
+ env['UIC_ST']='%s -o %s'
+ env['MOC_ST']='-o'
+ env['ui_PATTERN']='ui_%s.h'
+ env['QT_LRELEASE_FLAGS']=['-silent']
+ env.MOCCPPPATH_ST='-I%s'
+ env.MOCDEFINES_ST='-D%s'
+ at conf
+def find_qt4_libraries(self):
+ qtlibs=getattr(Options.options,'qtlibs',None)or os.environ.get("QT4_LIBDIR",None)
+ if not qtlibs:
+ try:
+ qtlibs=self.cmd_and_log([self.env.QMAKE,'-query','QT_INSTALL_LIBS']).strip()
+ except Errors.WafError:
+ qtdir=self.cmd_and_log([self.env.QMAKE,'-query','QT_INSTALL_PREFIX']).strip()+os.sep
+ qtlibs=os.path.join(qtdir,'lib')
+ self.msg('Found the Qt4 libraries in',qtlibs)
+ qtincludes=os.environ.get("QT4_INCLUDES",None)or self.cmd_and_log([self.env.QMAKE,'-query','QT_INSTALL_HEADERS']).strip()
+ env=self.env
+ if not'PKG_CONFIG_PATH'in os.environ:
+ os.environ['PKG_CONFIG_PATH']='%s:%s/pkgconfig:/usr/lib/qt4/lib/pkgconfig:/opt/qt4/lib/pkgconfig:/usr/lib/qt4/lib:/opt/qt4/lib'%(qtlibs,qtlibs)
+ try:
+ if os.environ.get("QT4_XCOMPILE",None):
+ raise self.errors.ConfigurationError()
+ self.check_cfg(atleast_pkgconfig_version='0.1')
+ except self.errors.ConfigurationError:
+ for i in self.qt4_vars:
+ uselib=i.upper()
+ if Utils.unversioned_sys_platform()=="darwin":
+ frameworkName=i+".framework"
+ qtDynamicLib=os.path.join(qtlibs,frameworkName,i)
+ if os.path.exists(qtDynamicLib):
+ env.append_unique('FRAMEWORK_'+uselib,i)
+ self.msg('Checking for %s'%i,qtDynamicLib,'GREEN')
+ else:
+ self.msg('Checking for %s'%i,False,'YELLOW')
+ env.append_unique('INCLUDES_'+uselib,os.path.join(qtlibs,frameworkName,'Headers'))
+ elif env.DEST_OS!="win32":
+ qtDynamicLib=os.path.join(qtlibs,"lib"+i+".so")
+ qtStaticLib=os.path.join(qtlibs,"lib"+i+".a")
+ if os.path.exists(qtDynamicLib):
+ env.append_unique('LIB_'+uselib,i)
+ self.msg('Checking for %s'%i,qtDynamicLib,'GREEN')
+ elif os.path.exists(qtStaticLib):
+ env.append_unique('LIB_'+uselib,i)
+ self.msg('Checking for %s'%i,qtStaticLib,'GREEN')
+ else:
+ self.msg('Checking for %s'%i,False,'YELLOW')
+ env.append_unique('LIBPATH_'+uselib,qtlibs)
+ env.append_unique('INCLUDES_'+uselib,qtincludes)
+ env.append_unique('INCLUDES_'+uselib,os.path.join(qtincludes,i))
+ else:
+ for k in("lib%s.a","lib%s4.a","%s.lib","%s4.lib"):
+ lib=os.path.join(qtlibs,k%i)
+ if os.path.exists(lib):
+ env.append_unique('LIB_'+uselib,i+k[k.find("%s")+2:k.find('.')])
+ self.msg('Checking for %s'%i,lib,'GREEN')
+ break
+ else:
+ self.msg('Checking for %s'%i,False,'YELLOW')
+ env.append_unique('LIBPATH_'+uselib,qtlibs)
+ env.append_unique('INCLUDES_'+uselib,qtincludes)
+ env.append_unique('INCLUDES_'+uselib,os.path.join(qtincludes,i))
+ uselib=i.upper()+"_debug"
+ for k in("lib%sd.a","lib%sd4.a","%sd.lib","%sd4.lib"):
+ lib=os.path.join(qtlibs,k%i)
+ if os.path.exists(lib):
+ env.append_unique('LIB_'+uselib,i+k[k.find("%s")+2:k.find('.')])
+ self.msg('Checking for %s'%i,lib,'GREEN')
+ break
+ else:
+ self.msg('Checking for %s'%i,False,'YELLOW')
+ env.append_unique('LIBPATH_'+uselib,qtlibs)
+ env.append_unique('INCLUDES_'+uselib,qtincludes)
+ env.append_unique('INCLUDES_'+uselib,os.path.join(qtincludes,i))
+ else:
+ for i in self.qt4_vars_debug+self.qt4_vars:
+ self.check_cfg(package=i,args='--cflags --libs',mandatory=False)
+ at conf
+def simplify_qt4_libs(self):
+ env=self.env
+ def process_lib(vars_,coreval):
+ for d in vars_:
+ var=d.upper()
+ if var=='QTCORE':
+ continue
+ value=env['LIBPATH_'+var]
+ if value:
+ core=env[coreval]
+ accu=[]
+ for lib in value:
+ if lib in core:
+ continue
+ accu.append(lib)
+ env['LIBPATH_'+var]=accu
+ process_lib(self.qt4_vars,'LIBPATH_QTCORE')
+ process_lib(self.qt4_vars_debug,'LIBPATH_QTCORE_DEBUG')
+ at conf
+def add_qt4_rpath(self):
+ env=self.env
+ if Options.options.want_rpath:
+ def process_rpath(vars_,coreval):
+ for d in vars_:
+ var=d.upper()
+ value=env['LIBPATH_'+var]
+ if value:
+ core=env[coreval]
+ accu=[]
+ for lib in value:
+ if var!='QTCORE':
+ if lib in core:
+ continue
+ accu.append('-Wl,--rpath='+lib)
+ env['RPATH_'+var]=accu
+ process_rpath(self.qt4_vars,'LIBPATH_QTCORE')
+ process_rpath(self.qt4_vars_debug,'LIBPATH_QTCORE_DEBUG')
+ at conf
+def set_qt4_libs_to_check(self):
+ if not hasattr(self,'qt4_vars'):
+ self.qt4_vars=QT4_LIBS
+ self.qt4_vars=Utils.to_list(self.qt4_vars)
+ if not hasattr(self,'qt4_vars_debug'):
+ self.qt4_vars_debug=[a+'_debug'for a in self.qt4_vars]
+ self.qt4_vars_debug=Utils.to_list(self.qt4_vars_debug)
+def options(opt):
+ opt.add_option('--want-rpath',action='store_true',default=False,dest='want_rpath',help='enable the rpath for qt libraries')
+ opt.add_option('--header-ext',type='string',default='',help='header extension for moc files',dest='qt_header_ext')
+ for i in'qtdir qtbin qtlibs'.split():
+ opt.add_option('--'+i,type='string',default='',dest=i)
+ opt.add_option('--translate',action="store_true",help="collect translation strings",dest="trans_qt4",default=False)
diff --git a/waflib/Tools/ruby.py b/waflib/Tools/ruby.py
new file mode 100644
index 0000000..04cddfb
--- /dev/null
+++ b/waflib/Tools/ruby.py
@@ -0,0 +1,103 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import os
+from waflib import Task,Options,Utils
+from waflib.TaskGen import before_method,feature,after_method,Task,extension
+from waflib.Configure import conf
+ at feature('rubyext')
+ at before_method('apply_incpaths','apply_lib_vars','apply_bundle','apply_link')
+def init_rubyext(self):
+ self.install_path='${ARCHDIR_RUBY}'
+ self.uselib=self.to_list(getattr(self,'uselib',''))
+ if not'RUBY'in self.uselib:
+ self.uselib.append('RUBY')
+ if not'RUBYEXT'in self.uselib:
+ self.uselib.append('RUBYEXT')
+ at feature('rubyext')
+ at before_method('apply_link','propagate_uselib')
+def apply_ruby_so_name(self):
+ self.env['cshlib_PATTERN']=self.env['cxxshlib_PATTERN']=self.env['rubyext_PATTERN']
+ at conf
+def check_ruby_version(self,minver=()):
+ if Options.options.rubybinary:
+ self.env.RUBY=Options.options.rubybinary
+ else:
+ self.find_program('ruby',var='RUBY')
+ ruby=self.env.RUBY
+ try:
+ version=self.cmd_and_log([ruby,'-e','puts defined?(VERSION) ? VERSION : RUBY_VERSION']).strip()
+ except Exception:
+ self.fatal('could not determine ruby version')
+ self.env.RUBY_VERSION=version
+ try:
+ ver=tuple(map(int,version.split(".")))
+ except Exception:
+ self.fatal('unsupported ruby version %r'%version)
+ cver=''
+ if minver:
+ if ver<minver:
+ self.fatal('ruby is too old %r'%ver)
+ cver='.'.join([str(x)for x in minver])
+ else:
+ cver=ver
+ self.msg('Checking for ruby version %s'%str(minver or''),cver)
+ at conf
+def check_ruby_ext_devel(self):
+ if not self.env.RUBY:
+ self.fatal('ruby detection is required first')
+ if not self.env.CC_NAME and not self.env.CXX_NAME:
+ self.fatal('load a c/c++ compiler first')
+ version=tuple(map(int,self.env.RUBY_VERSION.split(".")))
+ def read_out(cmd):
+ return Utils.to_list(self.cmd_and_log([self.env.RUBY,'-rrbconfig','-e',cmd]))
+ def read_config(key):
+ return read_out('puts Config::CONFIG[%r]'%key)
+ ruby=self.env['RUBY']
+ archdir=read_config('archdir')
+ cpppath=archdir
+ if version>=(1,9,0):
+ ruby_hdrdir=read_config('rubyhdrdir')
+ cpppath+=ruby_hdrdir
+ cpppath+=[os.path.join(ruby_hdrdir[0],read_config('arch')[0])]
+ self.check(header_name='ruby.h',includes=cpppath,errmsg='could not find ruby header file')
+ self.env.LIBPATH_RUBYEXT=read_config('libdir')
+ self.env.LIBPATH_RUBYEXT+=archdir
+ self.env.INCLUDES_RUBYEXT=cpppath
+ self.env.CFLAGS_RUBYEXT=read_config('CCDLFLAGS')
+ self.env.rubyext_PATTERN='%s.'+read_config('DLEXT')[0]
+ flags=read_config('LDSHARED')
+ while flags and flags[0][0]!='-':
+ flags=flags[1:]
+ if len(flags)>1 and flags[1]=="ppc":
+ flags=flags[2:]
+ self.env.LINKFLAGS_RUBYEXT=flags
+ self.env.LINKFLAGS_RUBYEXT+=read_config('LIBS')
+ self.env.LINKFLAGS_RUBYEXT+=read_config('LIBRUBYARG_SHARED')
+ if Options.options.rubyarchdir:
+ self.env.ARCHDIR_RUBY=Options.options.rubyarchdir
+ else:
+ self.env.ARCHDIR_RUBY=read_config('sitearchdir')[0]
+ if Options.options.rubylibdir:
+ self.env.LIBDIR_RUBY=Options.options.rubylibdir
+ else:
+ self.env.LIBDIR_RUBY=read_config('sitelibdir')[0]
+ at conf
+def check_ruby_module(self,module_name):
+ self.start_msg('Ruby module %s'%module_name)
+ try:
+ self.cmd_and_log([self.env['RUBY'],'-e','require \'%s\';puts 1'%module_name])
+ except Exception:
+ self.end_msg(False)
+ self.fatal('Could not find the ruby module %r'%module_name)
+ self.end_msg(True)
+ at extension('.rb')
+def process(self,node):
+ tsk=self.create_task('run_ruby',node)
+class run_ruby(Task.Task):
+ run_str='${RUBY} ${RBFLAGS} -I ${SRC[0].parent.abspath()} ${SRC}'
+def options(opt):
+ opt.add_option('--with-ruby-archdir',type='string',dest='rubyarchdir',help='Specify directory where to install arch specific files')
+ opt.add_option('--with-ruby-libdir',type='string',dest='rubylibdir',help='Specify alternate ruby library path')
+ opt.add_option('--with-ruby-binary',type='string',dest='rubybinary',help='Specify alternate ruby binary')
diff --git a/waflib/Tools/suncc.py b/waflib/Tools/suncc.py
new file mode 100644
index 0000000..edd24cd
--- /dev/null
+++ b/waflib/Tools/suncc.py
@@ -0,0 +1,53 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import os
+from waflib import Utils
+from waflib.Tools import ccroot,ar
+from waflib.Configure import conf
+ at conf
+def find_scc(conf):
+ v=conf.env
+ cc=None
+ if v['CC']:cc=v['CC']
+ elif'CC'in conf.environ:cc=conf.environ['CC']
+ if not cc:cc=conf.find_program('cc',var='CC')
+ if not cc:conf.fatal('Could not find a Sun C compiler')
+ cc=conf.cmd_to_list(cc)
+ try:
+ conf.cmd_and_log(cc+['-flags'])
+ except Exception:
+ conf.fatal('%r is not a Sun compiler'%cc)
+ v['CC']=cc
+ v['CC_NAME']='sun'
+ at conf
+def scc_common_flags(conf):
+ v=conf.env
+ v['CC_SRC_F']=[]
+ v['CC_TGT_F']=['-c','-o']
+ if not v['LINK_CC']:v['LINK_CC']=v['CC']
+ v['CCLNK_SRC_F']=''
+ v['CCLNK_TGT_F']=['-o']
+ v['CPPPATH_ST']='-I%s'
+ v['DEFINES_ST']='-D%s'
+ v['LIB_ST']='-l%s'
+ v['LIBPATH_ST']='-L%s'
+ v['STLIB_ST']='-l%s'
+ v['STLIBPATH_ST']='-L%s'
+ v['SONAME_ST']='-Wl,-h,%s'
+ v['SHLIB_MARKER']='-Bdynamic'
+ v['STLIB_MARKER']='-Bstatic'
+ v['cprogram_PATTERN']='%s'
+ v['CFLAGS_cshlib']=['-Kpic','-DPIC']
+ v['LINKFLAGS_cshlib']=['-G']
+ v['cshlib_PATTERN']='lib%s.so'
+ v['LINKFLAGS_cstlib']=['-Bstatic']
+ v['cstlib_PATTERN']='lib%s.a'
+def configure(conf):
+ conf.find_scc()
+ conf.find_ar()
+ conf.scc_common_flags()
+ conf.cc_load_tools()
+ conf.cc_add_flags()
+ conf.link_add_flags()
diff --git a/waflib/Tools/suncxx.py b/waflib/Tools/suncxx.py
new file mode 100644
index 0000000..4b8b931
--- /dev/null
+++ b/waflib/Tools/suncxx.py
@@ -0,0 +1,54 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import os
+from waflib import Utils
+from waflib.Tools import ccroot,ar
+from waflib.Configure import conf
+ at conf
+def find_sxx(conf):
+ v=conf.env
+ cc=None
+ if v['CXX']:cc=v['CXX']
+ elif'CXX'in conf.environ:cc=conf.environ['CXX']
+ if not cc:cc=conf.find_program('CC',var='CXX')
+ if not cc:cc=conf.find_program('c++',var='CXX')
+ if not cc:conf.fatal('Could not find a Sun C++ compiler')
+ cc=conf.cmd_to_list(cc)
+ try:
+ conf.cmd_and_log(cc+['-flags'])
+ except Exception:
+ conf.fatal('%r is not a Sun compiler'%cc)
+ v['CXX']=cc
+ v['CXX_NAME']='sun'
+ at conf
+def sxx_common_flags(conf):
+ v=conf.env
+ v['CXX_SRC_F']=[]
+ v['CXX_TGT_F']=['-c','-o']
+ if not v['LINK_CXX']:v['LINK_CXX']=v['CXX']
+ v['CXXLNK_SRC_F']=[]
+ v['CXXLNK_TGT_F']=['-o']
+ v['CPPPATH_ST']='-I%s'
+ v['DEFINES_ST']='-D%s'
+ v['LIB_ST']='-l%s'
+ v['LIBPATH_ST']='-L%s'
+ v['STLIB_ST']='-l%s'
+ v['STLIBPATH_ST']='-L%s'
+ v['SONAME_ST']='-Wl,-h,%s'
+ v['SHLIB_MARKER']='-Bdynamic'
+ v['STLIB_MARKER']='-Bstatic'
+ v['cxxprogram_PATTERN']='%s'
+ v['CXXFLAGS_cxxshlib']=['-Kpic','-DPIC']
+ v['LINKFLAGS_cxxshlib']=['-G']
+ v['cxxshlib_PATTERN']='lib%s.so'
+ v['LINKFLAGS_cxxstlib']=['-Bstatic']
+ v['cxxstlib_PATTERN']='lib%s.a'
+def configure(conf):
+ conf.find_sxx()
+ conf.find_ar()
+ conf.sxx_common_flags()
+ conf.cxx_load_tools()
+ conf.cxx_add_flags()
+ conf.link_add_flags()
diff --git a/waflib/Tools/tex.py b/waflib/Tools/tex.py
new file mode 100644
index 0000000..4448381
--- /dev/null
+++ b/waflib/Tools/tex.py
@@ -0,0 +1,250 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import os,re
+from waflib import Utils,Task,Errors,Logs
+from waflib.TaskGen import feature,before_method
+re_bibunit=re.compile(r'\\(?P<type>putbib)\[(?P<file>[^\[\]]*)\]',re.M)
+def bibunitscan(self):
+ node=self.inputs[0]
+ nodes=[]
+ if not node:return nodes
+ code=node.read()
+ for match in re_bibunit.finditer(code):
+ path=match.group('file')
+ if path:
+ for k in['','.bib']:
+ Logs.debug('tex: trying %s%s'%(path,k))
+ fi=node.parent.find_resource(path+k)
+ if fi:
+ nodes.append(fi)
+ else:
+ Logs.debug('tex: could not find %s'%path)
+ Logs.debug("tex: found the following bibunit files: %s"%nodes)
+ return nodes
+exts_deps_tex=['','.ltx','.tex','.bib','.pdf','.png','.eps','.ps']
+exts_tex=['.ltx','.tex']
+re_tex=re.compile(r'\\(?P<type>include|bibliography|putbib|includegraphics|input|import|bringin|lstinputlisting)(\[[^\[\]]*\])?{(?P<file>[^{}]*)}',re.M)
+g_bibtex_re=re.compile('bibdata',re.M)
+class tex(Task.Task):
+ bibtex_fun,_=Task.compile_fun('${BIBTEX} ${BIBTEXFLAGS} ${SRCFILE}',shell=False)
+ bibtex_fun.__doc__="""
+ Execute the program **bibtex**
+ """
+ makeindex_fun,_=Task.compile_fun('${MAKEINDEX} ${MAKEINDEXFLAGS} ${SRCFILE}',shell=False)
+ makeindex_fun.__doc__="""
+ Execute the program **makeindex**
+ """
+ def exec_command(self,cmd,**kw):
+ bld=self.generator.bld
+ try:
+ if not kw.get('cwd',None):
+ kw['cwd']=bld.cwd
+ except AttributeError:
+ bld.cwd=kw['cwd']=bld.variant_dir
+ return Utils.subprocess.Popen(cmd,**kw).wait()
+ def scan_aux(self,node):
+ nodes=[node]
+ re_aux=re.compile(r'\\@input{(?P<file>[^{}]*)}',re.M)
+ def parse_node(node):
+ code=node.read()
+ for match in re_aux.finditer(code):
+ path=match.group('file')
+ found=node.parent.find_or_declare(path)
+ if found and found not in nodes:
+ Logs.debug('tex: found aux node '+found.abspath())
+ nodes.append(found)
+ parse_node(found)
+ parse_node(node)
+ return nodes
+ def scan(self):
+ node=self.inputs[0]
+ nodes=[]
+ names=[]
+ seen=[]
+ if not node:return(nodes,names)
+ def parse_node(node):
+ if node in seen:
+ return
+ seen.append(node)
+ code=node.read()
+ global re_tex
+ for match in re_tex.finditer(code):
+ for path in match.group('file').split(','):
+ if path:
+ add_name=True
+ found=None
+ for k in exts_deps_tex:
+ Logs.debug('tex: trying %s%s'%(path,k))
+ found=node.parent.find_resource(path+k)
+ if found and not found in self.outputs:
+ nodes.append(found)
+ add_name=False
+ for ext in exts_tex:
+ if found.name.endswith(ext):
+ parse_node(found)
+ break
+ if add_name:
+ names.append(path)
+ parse_node(node)
+ for x in nodes:
+ x.parent.get_bld().mkdir()
+ Logs.debug("tex: found the following : %s and names %s"%(nodes,names))
+ return(nodes,names)
+ def check_status(self,msg,retcode):
+ if retcode!=0:
+ raise Errors.WafError("%r command exit status %r"%(msg,retcode))
+ def bibfile(self):
+ need_bibtex=False
+ try:
+ for aux_node in self.aux_nodes:
+ ct=aux_node.read()
+ if g_bibtex_re.findall(ct):
+ need_bibtex=True
+ break
+ except(OSError,IOError):
+ Logs.error('error bibtex scan')
+ else:
+ if need_bibtex:
+ Logs.warn('calling bibtex')
+ self.env.env={}
+ self.env.env.update(os.environ)
+ self.env.env.update({'BIBINPUTS':self.TEXINPUTS,'BSTINPUTS':self.TEXINPUTS})
+ self.env.SRCFILE=self.aux_nodes[0].name[:-4]
+ self.check_status('error when calling bibtex',self.bibtex_fun())
+ def bibunits(self):
+ try:
+ bibunits=bibunitscan(self)
+ except OSError:
+ Logs.error('error bibunitscan')
+ else:
+ if bibunits:
+ fn=['bu'+str(i)for i in xrange(1,len(bibunits)+1)]
+ if fn:
+ Logs.warn('calling bibtex on bibunits')
+ for f in fn:
+ self.env.env={'BIBINPUTS':self.TEXINPUTS,'BSTINPUTS':self.TEXINPUTS}
+ self.env.SRCFILE=f
+ self.check_status('error when calling bibtex',self.bibtex_fun())
+ def makeindex(self):
+ try:
+ idx_path=self.idx_node.abspath()
+ os.stat(idx_path)
+ except OSError:
+ Logs.warn('index file %s absent, not calling makeindex'%idx_path)
+ else:
+ Logs.warn('calling makeindex')
+ self.env.SRCFILE=self.idx_node.name
+ self.env.env={}
+ self.check_status('error when calling makeindex %s'%idx_path,self.makeindex_fun())
+ def run(self):
+ env=self.env
+ if not env['PROMPT_LATEX']:
+ env.append_value('LATEXFLAGS','-interaction=batchmode')
+ env.append_value('PDFLATEXFLAGS','-interaction=batchmode')
+ env.append_value('XELATEXFLAGS','-interaction=batchmode')
+ fun=self.texfun
+ node=self.inputs[0]
+ srcfile=node.abspath()
+ texinputs=self.env.TEXINPUTS or''
+ self.TEXINPUTS=node.parent.get_bld().abspath()+os.pathsep+node.parent.get_src().abspath()+os.pathsep+texinputs+os.pathsep
+ self.cwd=self.inputs[0].parent.get_bld().abspath()
+ Logs.warn('first pass on %s'%self.__class__.__name__)
+ self.env.env={}
+ self.env.env.update(os.environ)
+ self.env.env.update({'TEXINPUTS':self.TEXINPUTS})
+ self.env.SRCFILE=srcfile
+ self.check_status('error when calling latex',fun())
+ self.aux_nodes=self.scan_aux(node.change_ext('.aux'))
+ self.idx_node=node.change_ext('.idx')
+ self.bibfile()
+ self.bibunits()
+ self.makeindex()
+ hash=''
+ for i in range(10):
+ prev_hash=hash
+ try:
+ hashes=[Utils.h_file(x.abspath())for x in self.aux_nodes]
+ hash=Utils.h_list(hashes)
+ except(OSError,IOError):
+ Logs.error('could not read aux.h')
+ pass
+ if hash and hash==prev_hash:
+ break
+ Logs.warn('calling %s'%self.__class__.__name__)
+ self.env.env={}
+ self.env.env.update(os.environ)
+ self.env.env.update({'TEXINPUTS':self.TEXINPUTS})
+ self.env.SRCFILE=srcfile
+ self.check_status('error when calling %s'%self.__class__.__name__,fun())
+class latex(tex):
+ texfun,vars=Task.compile_fun('${LATEX} ${LATEXFLAGS} ${SRCFILE}',shell=False)
+class pdflatex(tex):
+ texfun,vars=Task.compile_fun('${PDFLATEX} ${PDFLATEXFLAGS} ${SRCFILE}',shell=False)
+class xelatex(tex):
+ texfun,vars=Task.compile_fun('${XELATEX} ${XELATEXFLAGS} ${SRCFILE}',shell=False)
+class dvips(Task.Task):
+ run_str='${DVIPS} ${DVIPSFLAGS} ${SRC} -o ${TGT}'
+ color='BLUE'
+ after=['latex','pdflatex','xelatex']
+class dvipdf(Task.Task):
+ run_str='${DVIPDF} ${DVIPDFFLAGS} ${SRC} ${TGT}'
+ color='BLUE'
+ after=['latex','pdflatex','xelatex']
+class pdf2ps(Task.Task):
+ run_str='${PDF2PS} ${PDF2PSFLAGS} ${SRC} ${TGT}'
+ color='BLUE'
+ after=['latex','pdflatex','xelatex']
+ at feature('tex')
+ at before_method('process_source')
+def apply_tex(self):
+ if not getattr(self,'type',None)in['latex','pdflatex','xelatex']:
+ self.type='pdflatex'
+ tree=self.bld
+ outs=Utils.to_list(getattr(self,'outs',[]))
+ self.env['PROMPT_LATEX']=getattr(self,'prompt',1)
+ deps_lst=[]
+ if getattr(self,'deps',None):
+ deps=self.to_list(self.deps)
+ for filename in deps:
+ n=self.path.find_resource(filename)
+ if not n:
+ self.bld.fatal('Could not find %r for %r'%(filename,self))
+ if not n in deps_lst:
+ deps_lst.append(n)
+ for node in self.to_nodes(self.source):
+ if self.type=='latex':
+ task=self.create_task('latex',node,node.change_ext('.dvi'))
+ elif self.type=='pdflatex':
+ task=self.create_task('pdflatex',node,node.change_ext('.pdf'))
+ elif self.type=='xelatex':
+ task=self.create_task('xelatex',node,node.change_ext('.pdf'))
+ task.env=self.env
+ if deps_lst:
+ try:
+ lst=tree.node_deps[task.uid()]
+ for n in deps_lst:
+ if not n in lst:
+ lst.append(n)
+ except KeyError:
+ tree.node_deps[task.uid()]=deps_lst
+ if self.type=='latex':
+ if'ps'in outs:
+ tsk=self.create_task('dvips',task.outputs,node.change_ext('.ps'))
+ tsk.env.env={'TEXINPUTS':node.parent.abspath()+os.pathsep+self.path.abspath()+os.pathsep+self.path.get_bld().abspath()}
+ if'pdf'in outs:
+ tsk=self.create_task('dvipdf',task.outputs,node.change_ext('.pdf'))
+ tsk.env.env={'TEXINPUTS':node.parent.abspath()+os.pathsep+self.path.abspath()+os.pathsep+self.path.get_bld().abspath()}
+ elif self.type=='pdflatex':
+ if'ps'in outs:
+ self.create_task('pdf2ps',task.outputs,node.change_ext('.ps'))
+ self.source=[]
+def configure(self):
+ v=self.env
+ for p in'tex latex pdflatex xelatex bibtex dvips dvipdf ps2pdf makeindex pdf2ps'.split():
+ try:
+ self.find_program(p,var=p.upper())
+ except self.errors.ConfigurationError:
+ pass
+ v['DVIPSFLAGS']='-Ppdf'
diff --git a/waflib/Tools/vala.py b/waflib/Tools/vala.py
new file mode 100644
index 0000000..96248c1
--- /dev/null
+++ b/waflib/Tools/vala.py
@@ -0,0 +1,201 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import os.path,shutil,re
+from waflib import Context,Task,Utils,Logs,Options,Errors
+from waflib.TaskGen import extension,taskgen_method
+from waflib.Configure import conf
+class valac(Task.Task):
+ vars=["VALAC","VALAC_VERSION","VALAFLAGS"]
+ ext_out=['.h']
+ def run(self):
+ cmd=[self.env['VALAC']]+self.env['VALAFLAGS']
+ cmd.extend([a.abspath()for a in self.inputs])
+ ret=self.exec_command(cmd,cwd=self.outputs[0].parent.abspath())
+ if ret:
+ return ret
+ for x in self.outputs:
+ if id(x.parent)!=id(self.outputs[0].parent):
+ shutil.move(self.outputs[0].parent.abspath()+os.sep+x.name,x.abspath())
+ if self.generator.dump_deps_node:
+ self.generator.dump_deps_node.write('\n'.join(self.generator.packages))
+ return ret
+valac=Task.update_outputs(valac)
+ at taskgen_method
+def init_vala_task(self):
+ self.profile=getattr(self,'profile','gobject')
+ if self.profile=='gobject':
+ self.uselib=Utils.to_list(getattr(self,'uselib',[]))
+ if not'GOBJECT'in self.uselib:
+ self.uselib.append('GOBJECT')
+ def addflags(flags):
+ self.env.append_value('VALAFLAGS',flags)
+ if self.profile:
+ addflags('--profile=%s'%self.profile)
+ if hasattr(self,'threading'):
+ if self.profile=='gobject':
+ if not'GTHREAD'in self.uselib:
+ self.uselib.append('GTHREAD')
+ else:
+ Logs.warn("Profile %s means no threading support"%self.profile)
+ self.threading=False
+ if self.threading:
+ addflags('--threading')
+ valatask=self.valatask
+ self.is_lib='cprogram'not in self.features
+ if self.is_lib:
+ addflags('--library=%s'%self.target)
+ h_node=self.path.find_or_declare('%s.h'%self.target)
+ valatask.outputs.append(h_node)
+ addflags('--header=%s'%h_node.name)
+ valatask.outputs.append(self.path.find_or_declare('%s.vapi'%self.target))
+ if getattr(self,'gir',None):
+ gir_node=self.path.find_or_declare('%s.gir'%self.gir)
+ addflags('--gir=%s'%gir_node.name)
+ valatask.outputs.append(gir_node)
+ self.vala_target_glib=getattr(self,'vala_target_glib',getattr(Options.options,'vala_target_glib',None))
+ if self.vala_target_glib:
+ addflags('--target-glib=%s'%self.vala_target_glib)
+ addflags(['--define=%s'%x for x in getattr(self,'vala_defines',[])])
+ packages_private=Utils.to_list(getattr(self,'packages_private',[]))
+ addflags(['--pkg=%s'%x for x in packages_private])
+ def _get_api_version():
+ api_version='1.0'
+ if hasattr(Context.g_module,'API_VERSION'):
+ version=Context.g_module.API_VERSION.split(".")
+ if version[0]=="0":
+ api_version="0."+version[1]
+ else:
+ api_version=version[0]+".0"
+ return api_version
+ self.includes=Utils.to_list(getattr(self,'includes',[]))
+ self.uselib=self.to_list(getattr(self,'uselib',[]))
+ valatask.install_path=getattr(self,'install_path','')
+ valatask.vapi_path=getattr(self,'vapi_path','${DATAROOTDIR}/vala/vapi')
+ valatask.pkg_name=getattr(self,'pkg_name',self.env['PACKAGE'])
+ valatask.header_path=getattr(self,'header_path','${INCLUDEDIR}/%s-%s'%(valatask.pkg_name,_get_api_version()))
+ valatask.install_binding=getattr(self,'install_binding',True)
+ self.packages=packages=Utils.to_list(getattr(self,'packages',[]))
+ self.vapi_dirs=vapi_dirs=Utils.to_list(getattr(self,'vapi_dirs',[]))
+ includes=[]
+ if hasattr(self,'use'):
+ local_packages=Utils.to_list(self.use)[:]
+ seen=[]
+ while len(local_packages)>0:
+ package=local_packages.pop()
+ if package in seen:
+ continue
+ seen.append(package)
+ try:
+ package_obj=self.bld.get_tgen_by_name(package)
+ except Errors.WafError:
+ continue
+ package_name=package_obj.target
+ package_node=package_obj.path
+ package_dir=package_node.path_from(self.path)
+ for task in package_obj.tasks:
+ for output in task.outputs:
+ if output.name==package_name+".vapi":
+ valatask.set_run_after(task)
+ if package_name not in packages:
+ packages.append(package_name)
+ if package_dir not in vapi_dirs:
+ vapi_dirs.append(package_dir)
+ if package_dir not in includes:
+ includes.append(package_dir)
+ if hasattr(package_obj,'use'):
+ lst=self.to_list(package_obj.use)
+ lst.reverse()
+ local_packages=[pkg for pkg in lst if pkg not in seen]+local_packages
+ addflags(['--pkg=%s'%p for p in packages])
+ for vapi_dir in vapi_dirs:
+ v_node=self.path.find_dir(vapi_dir)
+ if not v_node:
+ Logs.warn('Unable to locate Vala API directory: %r'%vapi_dir)
+ else:
+ addflags('--vapidir=%s'%v_node.abspath())
+ addflags('--vapidir=%s'%v_node.get_bld().abspath())
+ self.dump_deps_node=None
+ if self.is_lib and self.packages:
+ self.dump_deps_node=self.path.find_or_declare('%s.deps'%self.target)
+ valatask.outputs.append(self.dump_deps_node)
+ self.includes.append(self.bld.srcnode.abspath())
+ self.includes.append(self.bld.bldnode.abspath())
+ for include in includes:
+ try:
+ self.includes.append(self.path.find_dir(include).abspath())
+ self.includes.append(self.path.find_dir(include).get_bld().abspath())
+ except AttributeError:
+ Logs.warn("Unable to locate include directory: '%s'"%include)
+ if self.is_lib and valatask.install_binding:
+ headers_list=[o for o in valatask.outputs if o.suffix()==".h"]
+ try:
+ self.install_vheader.source=headers_list
+ except AttributeError:
+ self.install_vheader=self.bld.install_files(valatask.header_path,headers_list,self.env)
+ vapi_list=[o for o in valatask.outputs if(o.suffix()in(".vapi",".deps"))]
+ try:
+ self.install_vapi.source=vapi_list
+ except AttributeError:
+ self.install_vapi=self.bld.install_files(valatask.vapi_path,vapi_list,self.env)
+ gir_list=[o for o in valatask.outputs if o.suffix()=='.gir']
+ try:
+ self.install_gir.source=gir_list
+ except AttributeError:
+ self.install_gir=self.bld.install_files(getattr(self,'gir_path','${DATAROOTDIR}/gir-1.0'),gir_list,self.env)
+ at extension('.vala','.gs')
+def vala_file(self,node):
+ try:
+ valatask=self.valatask
+ except AttributeError:
+ valatask=self.valatask=self.create_task('valac')
+ self.init_vala_task()
+ valatask.inputs.append(node)
+ c_node=node.change_ext('.c')
+ valatask.outputs.append(c_node)
+ self.source.append(c_node)
+ at conf
+def find_valac(self,valac_name,min_version):
+ valac=self.find_program(valac_name,var='VALAC')
+ try:
+ output=self.cmd_and_log(valac+' --version')
+ except Exception:
+ valac_version=None
+ else:
+ ver=re.search(r'\d+.\d+.\d+',output).group(0).split('.')
+ valac_version=tuple([int(x)for x in ver])
+ self.msg('Checking for %s version >= %r'%(valac_name,min_version),valac_version,valac_version and valac_version>=min_version)
+ if valac and valac_version<min_version:
+ self.fatal("%s version %r is too old, need >= %r"%(valac_name,valac_version,min_version))
+ self.env['VALAC_VERSION']=valac_version
+ return valac
+ at conf
+def check_vala(self,min_version=(0,8,0),branch=None):
+ if not branch:
+ branch=min_version[:2]
+ try:
+ find_valac(self,'valac-%d.%d'%(branch[0],branch[1]),min_version)
+ except self.errors.ConfigurationError:
+ find_valac(self,'valac',min_version)
+ at conf
+def check_vala_deps(self):
+ if not self.env['HAVE_GOBJECT']:
+ pkg_args={'package':'gobject-2.0','uselib_store':'GOBJECT','args':'--cflags --libs'}
+ if getattr(Options.options,'vala_target_glib',None):
+ pkg_args['atleast_version']=Options.options.vala_target_glib
+ self.check_cfg(**pkg_args)
+ if not self.env['HAVE_GTHREAD']:
+ pkg_args={'package':'gthread-2.0','uselib_store':'GTHREAD','args':'--cflags --libs'}
+ if getattr(Options.options,'vala_target_glib',None):
+ pkg_args['atleast_version']=Options.options.vala_target_glib
+ self.check_cfg(**pkg_args)
+def configure(self):
+ self.load('gnu_dirs')
+ self.check_vala_deps()
+ self.check_vala()
+ self.env.VALAFLAGS=['-C','--quiet']
+def options(opt):
+ opt.load('gnu_dirs')
+ valaopts=opt.add_option_group('Vala Compiler Options')
+ valaopts.add_option('--vala-target-glib',default=None,dest='vala_target_glib',metavar='MAJOR.MINOR',help='Target version of glib for Vala GObject code generation')
diff --git a/waflib/Tools/waf_unit_test.py b/waflib/Tools/waf_unit_test.py
new file mode 100644
index 0000000..f461be1
--- /dev/null
+++ b/waflib/Tools/waf_unit_test.py
@@ -0,0 +1,95 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import os,sys
+from waflib.TaskGen import feature,after_method
+from waflib import Utils,Task,Logs,Options
+testlock=Utils.threading.Lock()
+ at feature('test')
+ at after_method('apply_link')
+def make_test(self):
+ if getattr(self,'link_task',None):
+ self.create_task('utest',self.link_task.outputs)
+class utest(Task.Task):
+ color='PINK'
+ after=['vnum','inst']
+ vars=[]
+ def runnable_status(self):
+ if getattr(Options.options,'no_tests',False):
+ return Task.SKIP_ME
+ ret=super(utest,self).runnable_status()
+ if ret==Task.SKIP_ME:
+ if getattr(Options.options,'all_tests',False):
+ return Task.RUN_ME
+ return ret
+ def run(self):
+ filename=self.inputs[0].abspath()
+ self.ut_exec=getattr(self.generator,'ut_exec',[filename])
+ if getattr(self.generator,'ut_fun',None):
+ self.generator.ut_fun(self)
+ try:
+ fu=getattr(self.generator.bld,'all_test_paths')
+ except AttributeError:
+ fu=os.environ.copy()
+ lst=[]
+ for g in self.generator.bld.groups:
+ for tg in g:
+ if getattr(tg,'link_task',None):
+ lst.append(tg.link_task.outputs[0].parent.abspath())
+ def add_path(dct,path,var):
+ dct[var]=os.pathsep.join(Utils.to_list(path)+[os.environ.get(var,'')])
+ if Utils.is_win32:
+ add_path(fu,lst,'PATH')
+ elif Utils.unversioned_sys_platform()=='darwin':
+ add_path(fu,lst,'DYLD_LIBRARY_PATH')
+ add_path(fu,lst,'LD_LIBRARY_PATH')
+ else:
+ add_path(fu,lst,'LD_LIBRARY_PATH')
+ self.generator.bld.all_test_paths=fu
+ cwd=getattr(self.generator,'ut_cwd','')or self.inputs[0].parent.abspath()
+ testcmd=getattr(Options.options,'testcmd',False)
+ if testcmd:
+ self.ut_exec=(testcmd%self.ut_exec[0]).split(' ')
+ proc=Utils.subprocess.Popen(self.ut_exec,cwd=cwd,env=fu,stderr=Utils.subprocess.PIPE,stdout=Utils.subprocess.PIPE)
+ (stdout,stderr)=proc.communicate()
+ tup=(filename,proc.returncode,stdout,stderr)
+ self.generator.utest_result=tup
+ testlock.acquire()
+ try:
+ bld=self.generator.bld
+ Logs.debug("ut: %r",tup)
+ try:
+ bld.utest_results.append(tup)
+ except AttributeError:
+ bld.utest_results=[tup]
+ finally:
+ testlock.release()
+def summary(bld):
+ lst=getattr(bld,'utest_results',[])
+ if lst:
+ Logs.pprint('CYAN','execution summary')
+ total=len(lst)
+ tfail=len([x for x in lst if x[1]])
+ Logs.pprint('CYAN',' tests that pass %d/%d'%(total-tfail,total))
+ for(f,code,out,err)in lst:
+ if not code:
+ Logs.pprint('CYAN',' %s'%f)
+ Logs.pprint('CYAN',' tests that fail %d/%d'%(tfail,total))
+ for(f,code,out,err)in lst:
+ if code:
+ Logs.pprint('CYAN',' %s'%f)
+def set_exit_code(bld):
+ lst=getattr(bld,'utest_results',[])
+ for(f,code,out,err)in lst:
+ if code:
+ msg=[]
+ if out:
+ msg.append('stdout:%s%s'%(os.linesep,out.decode('utf-8')))
+ if err:
+ msg.append('stderr:%s%s'%(os.linesep,err.decode('utf-8')))
+ bld.fatal(os.linesep.join(msg))
+def options(opt):
+ opt.add_option('--notests',action='store_true',default=False,help='Exec no unit tests',dest='no_tests')
+ opt.add_option('--alltests',action='store_true',default=False,help='Exec all unit tests',dest='all_tests')
+ opt.add_option('--testcmd',action='store',default=False,help='Run the unit tests using the test-cmd string'' example "--test-cmd="valgrind --error-exitcode=1'' %s" to run under valgrind',dest='testcmd')
diff --git a/waflib/Tools/winres.py b/waflib/Tools/winres.py
new file mode 100644
index 0000000..88904af
--- /dev/null
+++ b/waflib/Tools/winres.py
@@ -0,0 +1,85 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import re,traceback
+from waflib import Task,Logs,Utils
+from waflib.TaskGen import extension
+from waflib.Tools import c_preproc
+ at extension('.rc')
+def rc_file(self,node):
+ obj_ext='.rc.o'
+ if self.env['WINRC_TGT_F']=='/fo':
+ obj_ext='.res'
+ rctask=self.create_task('winrc',node,node.change_ext(obj_ext))
+ try:
+ self.compiled_tasks.append(rctask)
+ except AttributeError:
+ self.compiled_tasks=[rctask]
+re_lines=re.compile('(?:^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*?)\s*$)|''(?:^\w+[ \t]*(ICON|BITMAP|CURSOR|HTML|FONT|MESSAGETABLE|TYPELIB|REGISTRY|D3DFX)[ \t]*(.*?)\s*$)',re.IGNORECASE|re.MULTILINE)
+class rc_parser(c_preproc.c_parser):
+ def filter_comments(self,filepath):
+ code=Utils.readf(filepath)
+ if c_preproc.use_trigraphs:
+ for(a,b)in c_preproc.trig_def:code=code.split(a).join(b)
+ code=c_preproc.re_nl.sub('',code)
+ code=c_preproc.re_cpp.sub(c_preproc.repl,code)
+ ret=[]
+ for m in re.finditer(re_lines,code):
+ if m.group(2):
+ ret.append((m.group(2),m.group(3)))
+ else:
+ ret.append(('include',m.group(5)))
+ return ret
+ def addlines(self,node):
+ self.currentnode_stack.append(node.parent)
+ filepath=node.abspath()
+ self.count_files+=1
+ if self.count_files>c_preproc.recursion_limit:
+ raise c_preproc.PreprocError("recursion limit exceeded")
+ pc=self.parse_cache
+ Logs.debug('preproc: reading file %r',filepath)
+ try:
+ lns=pc[filepath]
+ except KeyError:
+ pass
+ else:
+ self.lines.extend(lns)
+ return
+ try:
+ lines=self.filter_comments(filepath)
+ lines.append((c_preproc.POPFILE,''))
+ lines.reverse()
+ pc[filepath]=lines
+ self.lines.extend(lines)
+ except IOError:
+ raise c_preproc.PreprocError("could not read the file %s"%filepath)
+ except Exception:
+ if Logs.verbose>0:
+ Logs.error("parsing %s failed"%filepath)
+ traceback.print_exc()
+class winrc(Task.Task):
+ run_str='${WINRC} ${WINRCFLAGS} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${WINRC_TGT_F} ${TGT} ${WINRC_SRC_F} ${SRC}'
+ color='BLUE'
+ def scan(self):
+ tmp=rc_parser(self.generator.includes_nodes)
+ tmp.start(self.inputs[0],self.env)
+ nodes=tmp.nodes
+ names=tmp.names
+ if Logs.verbose:
+ Logs.debug('deps: deps for %s: %r; unresolved %r'%(str(self),nodes,names))
+ return(nodes,names)
+def configure(conf):
+ v=conf.env
+ v['WINRC_TGT_F']='-o'
+ v['WINRC_SRC_F']='-i'
+ if not conf.env.WINRC:
+ if v.CC_NAME=='msvc':
+ conf.find_program('RC',var='WINRC',path_list=v['PATH'])
+ v['WINRC_TGT_F']='/fo'
+ v['WINRC_SRC_F']=''
+ else:
+ conf.find_program('windres',var='WINRC',path_list=v['PATH'])
+ if not conf.env.WINRC:
+ conf.fatal('winrc was not found!')
+ v['WINRCFLAGS']=[]
diff --git a/waflib/Tools/xlc.py b/waflib/Tools/xlc.py
new file mode 100644
index 0000000..fbf0fcf
--- /dev/null
+++ b/waflib/Tools/xlc.py
@@ -0,0 +1,45 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+from waflib.Tools import ccroot,ar
+from waflib.Configure import conf
+ at conf
+def find_xlc(conf):
+ cc=conf.find_program(['xlc_r','xlc'],var='CC')
+ cc=conf.cmd_to_list(cc)
+ conf.get_xlc_version(cc)
+ conf.env.CC_NAME='xlc'
+ conf.env.CC=cc
+ at conf
+def xlc_common_flags(conf):
+ v=conf.env
+ v['CC_SRC_F']=[]
+ v['CC_TGT_F']=['-c','-o']
+ if not v['LINK_CC']:v['LINK_CC']=v['CC']
+ v['CCLNK_SRC_F']=[]
+ v['CCLNK_TGT_F']=['-o']
+ v['CPPPATH_ST']='-I%s'
+ v['DEFINES_ST']='-D%s'
+ v['LIB_ST']='-l%s'
+ v['LIBPATH_ST']='-L%s'
+ v['STLIB_ST']='-l%s'
+ v['STLIBPATH_ST']='-L%s'
+ v['RPATH_ST']='-Wl,-rpath,%s'
+ v['SONAME_ST']=[]
+ v['SHLIB_MARKER']=[]
+ v['STLIB_MARKER']=[]
+ v['LINKFLAGS_cprogram']=['-Wl,-brtl']
+ v['cprogram_PATTERN']='%s'
+ v['CFLAGS_cshlib']=['-fPIC']
+ v['LINKFLAGS_cshlib']=['-G','-Wl,-brtl,-bexpfull']
+ v['cshlib_PATTERN']='lib%s.so'
+ v['LINKFLAGS_cstlib']=[]
+ v['cstlib_PATTERN']='lib%s.a'
+def configure(conf):
+ conf.find_xlc()
+ conf.find_ar()
+ conf.xlc_common_flags()
+ conf.cc_load_tools()
+ conf.cc_add_flags()
+ conf.link_add_flags()
diff --git a/waflib/Tools/xlcxx.py b/waflib/Tools/xlcxx.py
new file mode 100644
index 0000000..b7efb23
--- /dev/null
+++ b/waflib/Tools/xlcxx.py
@@ -0,0 +1,45 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+from waflib.Tools import ccroot,ar
+from waflib.Configure import conf
+ at conf
+def find_xlcxx(conf):
+ cxx=conf.find_program(['xlc++_r','xlc++'],var='CXX')
+ cxx=conf.cmd_to_list(cxx)
+ conf.get_xlc_version(cxx)
+ conf.env.CXX_NAME='xlc++'
+ conf.env.CXX=cxx
+ at conf
+def xlcxx_common_flags(conf):
+ v=conf.env
+ v['CXX_SRC_F']=[]
+ v['CXX_TGT_F']=['-c','-o']
+ if not v['LINK_CXX']:v['LINK_CXX']=v['CXX']
+ v['CXXLNK_SRC_F']=[]
+ v['CXXLNK_TGT_F']=['-o']
+ v['CPPPATH_ST']='-I%s'
+ v['DEFINES_ST']='-D%s'
+ v['LIB_ST']='-l%s'
+ v['LIBPATH_ST']='-L%s'
+ v['STLIB_ST']='-l%s'
+ v['STLIBPATH_ST']='-L%s'
+ v['RPATH_ST']='-Wl,-rpath,%s'
+ v['SONAME_ST']=[]
+ v['SHLIB_MARKER']=[]
+ v['STLIB_MARKER']=[]
+ v['LINKFLAGS_cxxprogram']=['-Wl,-brtl']
+ v['cxxprogram_PATTERN']='%s'
+ v['CXXFLAGS_cxxshlib']=['-fPIC']
+ v['LINKFLAGS_cxxshlib']=['-G','-Wl,-brtl,-bexpfull']
+ v['cxxshlib_PATTERN']='lib%s.so'
+ v['LINKFLAGS_cxxstlib']=[]
+ v['cxxstlib_PATTERN']='lib%s.a'
+def configure(conf):
+ conf.find_xlcxx()
+ conf.find_ar()
+ conf.xlcxx_common_flags()
+ conf.cxx_load_tools()
+ conf.cxx_add_flags()
+ conf.link_add_flags()
diff --git a/waflib/Utils.py b/waflib/Utils.py
new file mode 100644
index 0000000..9532a3b
--- /dev/null
+++ b/waflib/Utils.py
@@ -0,0 +1,412 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import os,sys,errno,traceback,inspect,re,shutil,datetime,gc
+import subprocess
+try:
+ from collections import deque
+except ImportError:
+ class deque(list):
+ def popleft(self):
+ return self.pop(0)
+try:
+ import _winreg as winreg
+except ImportError:
+ try:
+ import winreg
+ except ImportError:
+ winreg=None
+from waflib import Errors
+try:
+ from collections import UserDict
+except ImportError:
+ from UserDict import UserDict
+try:
+ from hashlib import md5
+except ImportError:
+ try:
+ from md5 import md5
+ except ImportError:
+ pass
+try:
+ import threading
+except ImportError:
+ class threading(object):
+ pass
+ class Lock(object):
+ def acquire(self):
+ pass
+ def release(self):
+ pass
+ threading.Lock=threading.Thread=Lock
+else:
+ run_old=threading.Thread.run
+ def run(*args,**kwargs):
+ try:
+ run_old(*args,**kwargs)
+ except(KeyboardInterrupt,SystemExit):
+ raise
+ except Exception:
+ sys.excepthook(*sys.exc_info())
+ threading.Thread.run=run
+SIG_NIL='iluvcuteoverload'
+O644=420
+O755=493
+rot_chr=['\\','|','/','-']
+rot_idx=0
+try:
+ from collections import defaultdict
+except ImportError:
+ class defaultdict(dict):
+ def __init__(self,default_factory):
+ super(defaultdict,self).__init__()
+ self.default_factory=default_factory
+ def __getitem__(self,key):
+ try:
+ return super(defaultdict,self).__getitem__(key)
+ except KeyError:
+ value=self.default_factory()
+ self[key]=value
+ return value
+is_win32=sys.platform in('win32','cli')
+indicator='\x1b[K%s%s%s\r'
+if is_win32 and'NOCOLOR'in os.environ:
+ indicator='%s%s%s\r'
+def readf(fname,m='r',encoding='ISO8859-1'):
+ if sys.hexversion>0x3000000 and not'b'in m:
+ m+='b'
+ f=open(fname,m)
+ try:
+ txt=f.read()
+ finally:
+ f.close()
+ txt=txt.decode(encoding)
+ else:
+ f=open(fname,m)
+ try:
+ txt=f.read()
+ finally:
+ f.close()
+ return txt
+def writef(fname,data,m='w',encoding='ISO8859-1'):
+ if sys.hexversion>0x3000000 and not'b'in m:
+ data=data.encode(encoding)
+ m+='b'
+ f=open(fname,m)
+ try:
+ f.write(data)
+ finally:
+ f.close()
+def h_file(fname):
+ f=open(fname,'rb')
+ m=md5()
+ try:
+ while fname:
+ fname=f.read(200000)
+ m.update(fname)
+ finally:
+ f.close()
+ return m.digest()
+if hasattr(os,'O_NOINHERIT'):
+ def readf_win32(f,m='r',encoding='ISO8859-1'):
+ flags=os.O_NOINHERIT|os.O_RDONLY
+ if'b'in m:
+ flags|=os.O_BINARY
+ if'+'in m:
+ flags|=os.O_RDWR
+ try:
+ fd=os.open(f,flags)
+ except OSError:
+ raise IOError('Cannot read from %r'%f)
+ if sys.hexversion>0x3000000 and not'b'in m:
+ m+='b'
+ f=os.fdopen(fd,m)
+ try:
+ txt=f.read()
+ finally:
+ f.close()
+ txt=txt.decode(encoding)
+ else:
+ f=os.fdopen(fd,m)
+ try:
+ txt=f.read()
+ finally:
+ f.close()
+ return txt
+ def writef_win32(f,data,m='w',encoding='ISO8859-1'):
+ if sys.hexversion>0x3000000 and not'b'in m:
+ data=data.encode(encoding)
+ m+='b'
+ flags=os.O_CREAT|os.O_TRUNC|os.O_WRONLY|os.O_NOINHERIT
+ if'b'in m:
+ flags|=os.O_BINARY
+ if'+'in m:
+ flags|=os.O_RDWR
+ try:
+ fd=os.open(f,flags)
+ except OSError:
+ raise IOError('Cannot write to %r'%f)
+ f=os.fdopen(fd,m)
+ try:
+ f.write(data)
+ finally:
+ f.close()
+ def h_file_win32(fname):
+ try:
+ fd=os.open(fname,os.O_BINARY|os.O_RDONLY|os.O_NOINHERIT)
+ except OSError:
+ raise IOError('Cannot read from %r'%fname)
+ f=os.fdopen(fd,'rb')
+ m=md5()
+ try:
+ while fname:
+ fname=f.read(200000)
+ m.update(fname)
+ finally:
+ f.close()
+ return m.digest()
+ readf_old=readf
+ writef_old=writef
+ h_file_old=h_file
+ readf=readf_win32
+ writef=writef_win32
+ h_file=h_file_win32
+try:
+ x=''.encode('hex')
+except LookupError:
+ import binascii
+ def to_hex(s):
+ ret=binascii.hexlify(s)
+ if not isinstance(ret,str):
+ ret=ret.decode('utf-8')
+ return ret
+else:
+ def to_hex(s):
+ return s.encode('hex')
+to_hex.__doc__="""
+Return the hexadecimal representation of a string
+
+:param s: string to convert
+:type s: string
+"""
+listdir=os.listdir
+if is_win32:
+ def listdir_win32(s):
+ if not s:
+ try:
+ import ctypes
+ except ImportError:
+ return[x+':\\'for x in list('ABCDEFGHIJKLMNOPQRSTUVWXYZ')]
+ else:
+ dlen=4
+ maxdrives=26
+ buf=ctypes.create_string_buffer(maxdrives*dlen)
+ ndrives=ctypes.windll.kernel32.GetLogicalDriveStringsA(maxdrives*dlen,ctypes.byref(buf))
+ return[str(buf.raw[4*i:4*i+2].decode('ascii'))for i in range(int(ndrives/dlen))]
+ if len(s)==2 and s[1]==":":
+ s+=os.sep
+ if not os.path.isdir(s):
+ e=OSError('%s is not a directory'%s)
+ e.errno=errno.ENOENT
+ raise e
+ return os.listdir(s)
+ listdir=listdir_win32
+def num2ver(ver):
+ if isinstance(ver,str):
+ ver=tuple(ver.split('.'))
+ if isinstance(ver,tuple):
+ ret=0
+ for i in range(4):
+ if i<len(ver):
+ ret+=256**(3-i)*int(ver[i])
+ return ret
+ return ver
+def ex_stack():
+ exc_type,exc_value,tb=sys.exc_info()
+ exc_lines=traceback.format_exception(exc_type,exc_value,tb)
+ return''.join(exc_lines)
+def to_list(sth):
+ if isinstance(sth,str):
+ return sth.split()
+ else:
+ return sth
+re_nl=re.compile('\r*\n',re.M)
+def str_to_dict(txt):
+ tbl={}
+ lines=re_nl.split(txt)
+ for x in lines:
+ x=x.strip()
+ if not x or x.startswith('#')or x.find('=')<0:
+ continue
+ tmp=x.split('=')
+ tbl[tmp[0].strip()]='='.join(tmp[1:]).strip()
+ return tbl
+def split_path(path):
+ return path.split('/')
+def split_path_cygwin(path):
+ if path.startswith('//'):
+ ret=path.split('/')[2:]
+ ret[0]='/'+ret[0]
+ return ret
+ return path.split('/')
+re_sp=re.compile('[/\\\\]')
+def split_path_win32(path):
+ if path.startswith('\\\\'):
+ ret=re.split(re_sp,path)[2:]
+ ret[0]='\\'+ret[0]
+ return ret
+ return re.split(re_sp,path)
+if sys.platform=='cygwin':
+ split_path=split_path_cygwin
+elif is_win32:
+ split_path=split_path_win32
+split_path.__doc__="""
+Split a path by / or \\. This function is not like os.path.split
+
+:type path: string
+:param path: path to split
+:return: list of strings
+"""
+def check_dir(path):
+ if not os.path.isdir(path):
+ try:
+ os.makedirs(path)
+ except OSError ,e:
+ if not os.path.isdir(path):
+ raise Errors.WafError('Cannot create the folder %r'%path,ex=e)
+def def_attrs(cls,**kw):
+ for k,v in kw.items():
+ if not hasattr(cls,k):
+ setattr(cls,k,v)
+def quote_define_name(s):
+ fu=re.compile("[^a-zA-Z0-9]").sub("_",s)
+ fu=fu.upper()
+ return fu
+def h_list(lst):
+ m=md5()
+ m.update(str(lst))
+ return m.digest()
+def h_fun(fun):
+ try:
+ return fun.code
+ except AttributeError:
+ try:
+ h=inspect.getsource(fun)
+ except IOError:
+ h="nocode"
+ try:
+ fun.code=h
+ except AttributeError:
+ pass
+ return h
+reg_subst=re.compile(r"(\\\\)|(\$\$)|\$\{([^}]+)\}")
+def subst_vars(expr,params):
+ def repl_var(m):
+ if m.group(1):
+ return'\\'
+ if m.group(2):
+ return'$'
+ try:
+ return params.get_flat(m.group(3))
+ except AttributeError:
+ return params[m.group(3)]
+ return reg_subst.sub(repl_var,expr)
+def destos_to_binfmt(key):
+ if key=='darwin':
+ return'mac-o'
+ elif key in('win32','cygwin','uwin','msys'):
+ return'pe'
+ return'elf'
+def unversioned_sys_platform():
+ s=sys.platform
+ if s=='java':
+ from java.lang import System
+ s=System.getProperty('os.name')
+ if s=='Mac OS X':
+ return'darwin'
+ elif s.startswith('Windows '):
+ return'win32'
+ elif s=='OS/2':
+ return'os2'
+ elif s=='HP-UX':
+ return'hpux'
+ elif s in('SunOS','Solaris'):
+ return'sunos'
+ else:s=s.lower()
+ if s=='powerpc':
+ return'darwin'
+ if s=='win32'or s.endswith('os2')and s!='sunos2':return s
+ return re.split('\d+$',s)[0]
+def nada(*k,**kw):
+ pass
+class Timer(object):
+ def __init__(self):
+ self.start_time=datetime.datetime.utcnow()
+ def __str__(self):
+ delta=datetime.datetime.utcnow()-self.start_time
+ days=int(delta.days)
+ hours=delta.seconds//3600
+ minutes=(delta.seconds-hours*3600)//60
+ seconds=delta.seconds-hours*3600-minutes*60+float(delta.microseconds)/1000/1000
+ result=''
+ if days:
+ result+='%dd'%days
+ if days or hours:
+ result+='%dh'%hours
+ if days or hours or minutes:
+ result+='%dm'%minutes
+ return'%s%.3fs'%(result,seconds)
+if is_win32:
+ old=shutil.copy2
+ def copy2(src,dst):
+ old(src,dst)
+ shutil.copystat(src,dst)
+ setattr(shutil,'copy2',copy2)
+if os.name=='java':
+ try:
+ gc.disable()
+ gc.enable()
+ except NotImplementedError:
+ gc.disable=gc.enable
+def read_la_file(path):
+ sp=re.compile(r'^([^=]+)=\'(.*)\'$')
+ dc={}
+ for line in readf(path).splitlines():
+ try:
+ _,left,right,_=sp.split(line.strip())
+ dc[left]=right
+ except ValueError:
+ pass
+ return dc
+def nogc(fun):
+ def f(*k,**kw):
+ try:
+ gc.disable()
+ ret=fun(*k,**kw)
+ finally:
+ gc.enable()
+ return ret
+ f.__doc__=fun.__doc__
+ return f
+def run_once(fun):
+ cache={}
+ def wrap(k):
+ try:
+ return cache[k]
+ except KeyError:
+ ret=fun(k)
+ cache[k]=ret
+ return ret
+ wrap.__cache__=cache
+ return wrap
+def get_registry_app_path(key,filename):
+ if not winreg:
+ return None
+ try:
+ result=winreg.QueryValue(key,"Software\\Microsoft\\Windows\\CurrentVersion\\App Paths\\%s.exe"%filename[0])
+ except WindowsError:
+ pass
+ else:
+ if os.path.isfile(result):
+ return result
diff --git a/waflib/__init__.py b/waflib/__init__.py
new file mode 100644
index 0000000..efeed79
--- /dev/null
+++ b/waflib/__init__.py
@@ -0,0 +1,4 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
diff --git a/waflib/ansiterm.py b/waflib/ansiterm.py
new file mode 100644
index 0000000..0de6171
--- /dev/null
+++ b/waflib/ansiterm.py
@@ -0,0 +1,177 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import sys,os
+try:
+ if not(sys.stderr.isatty()and sys.stdout.isatty()):
+ raise ValueError('not a tty')
+ from ctypes import*
+ class COORD(Structure):
+ _fields_=[("X",c_short),("Y",c_short)]
+ class SMALL_RECT(Structure):
+ _fields_=[("Left",c_short),("Top",c_short),("Right",c_short),("Bottom",c_short)]
+ class CONSOLE_SCREEN_BUFFER_INFO(Structure):
+ _fields_=[("Size",COORD),("CursorPosition",COORD),("Attributes",c_short),("Window",SMALL_RECT),("MaximumWindowSize",COORD)]
+ class CONSOLE_CURSOR_INFO(Structure):
+ _fields_=[('dwSize',c_ulong),('bVisible',c_int)]
+ sbinfo=CONSOLE_SCREEN_BUFFER_INFO()
+ csinfo=CONSOLE_CURSOR_INFO()
+ hconsole=windll.kernel32.GetStdHandle(-11)
+ windll.kernel32.GetConsoleScreenBufferInfo(hconsole,byref(sbinfo))
+ if sbinfo.Size.X<9 or sbinfo.Size.Y<9:raise ValueError('small console')
+ windll.kernel32.GetConsoleCursorInfo(hconsole,byref(csinfo))
+except Exception:
+ pass
+else:
+ import re,threading
+ is_vista=getattr(sys,"getwindowsversion",None)and sys.getwindowsversion()[0]>=6
+ try:
+ _type=unicode
+ except NameError:
+ _type=str
+ to_int=lambda number,default:number and int(number)or default
+ wlock=threading.Lock()
+ STD_OUTPUT_HANDLE=-11
+ STD_ERROR_HANDLE=-12
+ class AnsiTerm(object):
+ def __init__(self):
+ self.encoding=sys.stdout.encoding
+ self.hconsole=windll.kernel32.GetStdHandle(STD_OUTPUT_HANDLE)
+ self.cursor_history=[]
+ self.orig_sbinfo=CONSOLE_SCREEN_BUFFER_INFO()
+ self.orig_csinfo=CONSOLE_CURSOR_INFO()
+ windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole,byref(self.orig_sbinfo))
+ windll.kernel32.GetConsoleCursorInfo(hconsole,byref(self.orig_csinfo))
+ def screen_buffer_info(self):
+ sbinfo=CONSOLE_SCREEN_BUFFER_INFO()
+ windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole,byref(sbinfo))
+ return sbinfo
+ def clear_line(self,param):
+ mode=param and int(param)or 0
+ sbinfo=self.screen_buffer_info()
+ if mode==1:
+ line_start=COORD(0,sbinfo.CursorPosition.Y)
+ line_length=sbinfo.Size.X
+ elif mode==2:
+ line_start=COORD(sbinfo.CursorPosition.X,sbinfo.CursorPosition.Y)
+ line_length=sbinfo.Size.X-sbinfo.CursorPosition.X
+ else:
+ line_start=sbinfo.CursorPosition
+ line_length=sbinfo.Size.X-sbinfo.CursorPosition.X
+ chars_written=c_int()
+ windll.kernel32.FillConsoleOutputCharacterA(self.hconsole,c_wchar(' '),line_length,line_start,byref(chars_written))
+ windll.kernel32.FillConsoleOutputAttribute(self.hconsole,sbinfo.Attributes,line_length,line_start,byref(chars_written))
+ def clear_screen(self,param):
+ mode=to_int(param,0)
+ sbinfo=self.screen_buffer_info()
+ if mode==1:
+ clear_start=COORD(0,0)
+ clear_length=sbinfo.CursorPosition.X*sbinfo.CursorPosition.Y
+ elif mode==2:
+ clear_start=COORD(0,0)
+ clear_length=sbinfo.Size.X*sbinfo.Size.Y
+ windll.kernel32.SetConsoleCursorPosition(self.hconsole,clear_start)
+ else:
+ clear_start=sbinfo.CursorPosition
+ clear_length=((sbinfo.Size.X-sbinfo.CursorPosition.X)+sbinfo.Size.X*(sbinfo.Size.Y-sbinfo.CursorPosition.Y))
+ chars_written=c_int()
+ windll.kernel32.FillConsoleOutputCharacterA(self.hconsole,c_wchar(' '),clear_length,clear_start,byref(chars_written))
+ windll.kernel32.FillConsoleOutputAttribute(self.hconsole,sbinfo.Attributes,clear_length,clear_start,byref(chars_written))
+ def push_cursor(self,param):
+ sbinfo=self.screen_buffer_info()
+ self.cursor_history.append(sbinfo.CursorPosition)
+ def pop_cursor(self,param):
+ if self.cursor_history:
+ old_pos=self.cursor_history.pop()
+ windll.kernel32.SetConsoleCursorPosition(self.hconsole,old_pos)
+ def set_cursor(self,param):
+ y,sep,x=param.partition(';')
+ x=to_int(x,1)-1
+ y=to_int(y,1)-1
+ sbinfo=self.screen_buffer_info()
+ new_pos=COORD(min(max(0,x),sbinfo.Size.X),min(max(0,y),sbinfo.Size.Y))
+ windll.kernel32.SetConsoleCursorPosition(self.hconsole,new_pos)
+ def set_column(self,param):
+ x=to_int(param,1)-1
+ sbinfo=self.screen_buffer_info()
+ new_pos=COORD(min(max(0,x),sbinfo.Size.X),sbinfo.CursorPosition.Y)
+ windll.kernel32.SetConsoleCursorPosition(self.hconsole,new_pos)
+ def move_cursor(self,x_offset=0,y_offset=0):
+ sbinfo=self.screen_buffer_info()
+ new_pos=COORD(min(max(0,sbinfo.CursorPosition.X+x_offset),sbinfo.Size.X),min(max(0,sbinfo.CursorPosition.Y+y_offset),sbinfo.Size.Y))
+ windll.kernel32.SetConsoleCursorPosition(self.hconsole,new_pos)
+ def move_up(self,param):
+ self.move_cursor(y_offset=-to_int(param,1))
+ def move_down(self,param):
+ self.move_cursor(y_offset=to_int(param,1))
+ def move_left(self,param):
+ self.move_cursor(x_offset=-to_int(param,1))
+ def move_right(self,param):
+ self.move_cursor(x_offset=to_int(param,1))
+ def next_line(self,param):
+ sbinfo=self.screen_buffer_info()
+ self.move_cursor(x_offset=-sbinfo.CursorPosition.X,y_offset=to_int(param,1))
+ def prev_line(self,param):
+ sbinfo=self.screen_buffer_info()
+ self.move_cursor(x_offset=-sbinfo.CursorPosition.X,y_offset=-to_int(param,1))
+ def rgb2bgr(self,c):
+ return((c&1)<<2)|(c&2)|((c&4)>>2)
+ def set_color(self,param):
+ cols=param.split(';')
+ sbinfo=CONSOLE_SCREEN_BUFFER_INFO()
+ windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole,byref(sbinfo))
+ attr=sbinfo.Attributes
+ for c in cols:
+ if is_vista:
+ c=int(c)
+ else:
+ c=to_int(c,0)
+ if c in range(30,38):
+ attr=(attr&0xfff0)|self.rgb2bgr(c-30)
+ elif c in range(40,48):
+ attr=(attr&0xff0f)|(self.rgb2bgr(c-40)<<4)
+ elif c==0:
+ attr=self.orig_sbinfo.Attributes
+ elif c==1:
+ attr|=0x08
+ elif c==4:
+ attr|=0x80
+ elif c==7:
+ attr=(attr&0xff88)|((attr&0x70)>>4)|((attr&0x07)<<4)
+ windll.kernel32.SetConsoleTextAttribute(self.hconsole,attr)
+ def show_cursor(self,param):
+ csinfo.bVisible=1
+ windll.kernel32.SetConsoleCursorInfo(self.hconsole,byref(csinfo))
+ def hide_cursor(self,param):
+ csinfo.bVisible=0
+ windll.kernel32.SetConsoleCursorInfo(self.hconsole,byref(csinfo))
+ ansi_command_table={'A':move_up,'B':move_down,'C':move_right,'D':move_left,'E':next_line,'F':prev_line,'G':set_column,'H':set_cursor,'f':set_cursor,'J':clear_screen,'K':clear_line,'h':show_cursor,'l':hide_cursor,'m':set_color,'s':push_cursor,'u':pop_cursor,}
+ ansi_tokens=re.compile('(?:\x1b\[([0-9?;]*)([a-zA-Z])|([^\x1b]+))')
+ def write(self,text):
+ try:
+ wlock.acquire()
+ for param,cmd,txt in self.ansi_tokens.findall(text):
+ if cmd:
+ cmd_func=self.ansi_command_table.get(cmd)
+ if cmd_func:
+ cmd_func(self,param)
+ else:
+ self.writeconsole(txt)
+ finally:
+ wlock.release()
+ def writeconsole(self,txt):
+ chars_written=c_int()
+ writeconsole=windll.kernel32.WriteConsoleA
+ if isinstance(txt,_type):
+ writeconsole=windll.kernel32.WriteConsoleW
+ TINY_STEP=3000
+ for x in range(0,len(txt),TINY_STEP):
+ tiny=txt[x:x+TINY_STEP]
+ writeconsole(self.hconsole,tiny,len(tiny),byref(chars_written),None)
+ def flush(self):
+ pass
+ def isatty(self):
+ return True
+ sys.stderr=sys.stdout=AnsiTerm()
+ os.environ['TERM']='vt100'
diff --git a/waflib/extras/__init__.py b/waflib/extras/__init__.py
new file mode 100644
index 0000000..efeed79
--- /dev/null
+++ b/waflib/extras/__init__.py
@@ -0,0 +1,4 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
diff --git a/waflib/extras/compat15.py b/waflib/extras/compat15.py
new file mode 100644
index 0000000..3343afc
--- /dev/null
+++ b/waflib/extras/compat15.py
@@ -0,0 +1,220 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import sys
+from waflib import ConfigSet,Logs,Options,Scripting,Task,Build,Configure,Node,Runner,TaskGen,Utils,Errors,Context
+sys.modules['Environment']=ConfigSet
+ConfigSet.Environment=ConfigSet.ConfigSet
+sys.modules['Logs']=Logs
+sys.modules['Options']=Options
+sys.modules['Scripting']=Scripting
+sys.modules['Task']=Task
+sys.modules['Build']=Build
+sys.modules['Configure']=Configure
+sys.modules['Node']=Node
+sys.modules['Runner']=Runner
+sys.modules['TaskGen']=TaskGen
+sys.modules['Utils']=Utils
+from waflib.Tools import c_preproc
+sys.modules['preproc']=c_preproc
+from waflib.Tools import c_config
+sys.modules['config_c']=c_config
+ConfigSet.ConfigSet.copy=ConfigSet.ConfigSet.derive
+ConfigSet.ConfigSet.set_variant=Utils.nada
+Build.BuildContext.add_subdirs=Build.BuildContext.recurse
+Build.BuildContext.new_task_gen=Build.BuildContext.__call__
+Build.BuildContext.is_install=0
+Node.Node.relpath_gen=Node.Node.path_from
+def name_to_obj(self,s,env=None):
+ Logs.warn('compat: change "name_to_obj(name, env)" by "get_tgen_by_name(name)"')
+ return self.get_tgen_by_name(s)
+Build.BuildContext.name_to_obj=name_to_obj
+def env_of_name(self,name):
+ try:
+ return self.all_envs[name]
+ except KeyError:
+ Logs.error('no such environment: '+name)
+ return None
+Build.BuildContext.env_of_name=env_of_name
+def set_env_name(self,name,env):
+ self.all_envs[name]=env
+ return env
+Configure.ConfigurationContext.set_env_name=set_env_name
+def retrieve(self,name,fromenv=None):
+ try:
+ env=self.all_envs[name]
+ except KeyError:
+ env=ConfigSet.ConfigSet()
+ self.prepare_env(env)
+ self.all_envs[name]=env
+ else:
+ if fromenv:Logs.warn("The environment %s may have been configured already"%name)
+ return env
+Configure.ConfigurationContext.retrieve=retrieve
+Configure.ConfigurationContext.sub_config=Configure.ConfigurationContext.recurse
+Configure.ConfigurationContext.check_tool=Configure.ConfigurationContext.load
+Configure.conftest=Configure.conf
+Configure.ConfigurationError=Errors.ConfigurationError
+Options.OptionsContext.sub_options=Options.OptionsContext.recurse
+Options.OptionsContext.tool_options=Context.Context.load
+Options.Handler=Options.OptionsContext
+Task.simple_task_type=Task.task_type_from_func=Task.task_factory
+Task.TaskBase.classes=Task.classes
+def setitem(self,key,value):
+ if key.startswith('CCFLAGS'):
+ key=key[1:]
+ self.table[key]=value
+ConfigSet.ConfigSet.__setitem__=setitem
+ at TaskGen.feature('d')
+ at TaskGen.before('apply_incpaths')
+def old_importpaths(self):
+ if getattr(self,'importpaths',[]):
+ self.includes=self.importpaths
+from waflib import Context
+eld=Context.load_tool
+def load_tool(*k,**kw):
+ ret=eld(*k,**kw)
+ if'set_options'in ret.__dict__:
+ Logs.warn('compat: rename "set_options" to options')
+ ret.options=ret.set_options
+ if'detect'in ret.__dict__:
+ Logs.warn('compat: rename "detect" to "configure"')
+ ret.configure=ret.detect
+ return ret
+Context.load_tool=load_tool
+rev=Context.load_module
+def load_module(path):
+ ret=rev(path)
+ if'set_options'in ret.__dict__:
+ Logs.warn('compat: rename "set_options" to "options" (%r)'%path)
+ ret.options=ret.set_options
+ if'srcdir'in ret.__dict__:
+ Logs.warn('compat: rename "srcdir" to "top" (%r)'%path)
+ ret.top=ret.srcdir
+ if'blddir'in ret.__dict__:
+ Logs.warn('compat: rename "blddir" to "out" (%r)'%path)
+ ret.out=ret.blddir
+ return ret
+Context.load_module=load_module
+old_post=TaskGen.task_gen.post
+def post(self):
+ self.features=self.to_list(self.features)
+ if'cc'in self.features:
+ Logs.warn('compat: the feature cc does not exist anymore (use "c")')
+ self.features.remove('cc')
+ self.features.append('c')
+ if'cstaticlib'in self.features:
+ Logs.warn('compat: the feature cstaticlib does not exist anymore (use "cstlib" or "cxxstlib")')
+ self.features.remove('cstaticlib')
+ self.features.append(('cxx'in self.features)and'cxxstlib'or'cstlib')
+ if getattr(self,'ccflags',None):
+ Logs.warn('compat: "ccflags" was renamed to "cflags"')
+ self.cflags=self.ccflags
+ return old_post(self)
+TaskGen.task_gen.post=post
+def waf_version(*k,**kw):
+ Logs.warn('wrong version (waf_version was removed in waf 1.6)')
+Utils.waf_version=waf_version
+import os
+ at TaskGen.feature('c','cxx','d')
+ at TaskGen.before('apply_incpaths','propagate_uselib_vars')
+ at TaskGen.after('apply_link','process_source')
+def apply_uselib_local(self):
+ env=self.env
+ from waflib.Tools.ccroot import stlink_task
+ self.uselib=self.to_list(getattr(self,'uselib',[]))
+ self.includes=self.to_list(getattr(self,'includes',[]))
+ names=self.to_list(getattr(self,'uselib_local',[]))
+ get=self.bld.get_tgen_by_name
+ seen=set([])
+ tmp=Utils.deque(names)
+ if tmp:
+ Logs.warn('compat: "uselib_local" is deprecated, replace by "use"')
+ while tmp:
+ lib_name=tmp.popleft()
+ if lib_name in seen:
+ continue
+ y=get(lib_name)
+ y.post()
+ seen.add(lib_name)
+ if getattr(y,'uselib_local',None):
+ for x in self.to_list(getattr(y,'uselib_local',[])):
+ obj=get(x)
+ obj.post()
+ if getattr(obj,'link_task',None):
+ if not isinstance(obj.link_task,stlink_task):
+ tmp.append(x)
+ if getattr(y,'link_task',None):
+ link_name=y.target[y.target.rfind(os.sep)+1:]
+ if isinstance(y.link_task,stlink_task):
+ env.append_value('STLIB',[link_name])
+ else:
+ env.append_value('LIB',[link_name])
+ self.link_task.set_run_after(y.link_task)
+ self.link_task.dep_nodes+=y.link_task.outputs
+ tmp_path=y.link_task.outputs[0].parent.bldpath()
+ if not tmp_path in env['LIBPATH']:
+ env.prepend_value('LIBPATH',[tmp_path])
+ for v in self.to_list(getattr(y,'uselib',[])):
+ if not env['STLIB_'+v]:
+ if not v in self.uselib:
+ self.uselib.insert(0,v)
+ if getattr(y,'export_includes',None):
+ self.includes.extend(y.to_incnodes(y.export_includes))
+ at TaskGen.feature('cprogram','cxxprogram','cstlib','cxxstlib','cshlib','cxxshlib','dprogram','dstlib','dshlib')
+ at TaskGen.after('apply_link')
+def apply_objdeps(self):
+ names=getattr(self,'add_objects',[])
+ if not names:
+ return
+ names=self.to_list(names)
+ get=self.bld.get_tgen_by_name
+ seen=[]
+ while names:
+ x=names[0]
+ if x in seen:
+ names=names[1:]
+ continue
+ y=get(x)
+ if getattr(y,'add_objects',None):
+ added=0
+ lst=y.to_list(y.add_objects)
+ lst.reverse()
+ for u in lst:
+ if u in seen:continue
+ added=1
+ names=[u]+names
+ if added:continue
+ y.post()
+ seen.append(x)
+ for t in getattr(y,'compiled_tasks',[]):
+ self.link_task.inputs.extend(t.outputs)
+ at TaskGen.after('apply_link')
+def process_obj_files(self):
+ if not hasattr(self,'obj_files'):
+ return
+ for x in self.obj_files:
+ node=self.path.find_resource(x)
+ self.link_task.inputs.append(node)
+ at TaskGen.taskgen_method
+def add_obj_file(self,file):
+ if not hasattr(self,'obj_files'):self.obj_files=[]
+ if not'process_obj_files'in self.meths:self.meths.append('process_obj_files')
+ self.obj_files.append(file)
+old_define=Configure.ConfigurationContext.__dict__['define']
+ at Configure.conf
+def define(self,key,val,quote=True):
+ old_define(self,key,val,quote)
+ if key.startswith('HAVE_'):
+ self.env[key]=1
+old_undefine=Configure.ConfigurationContext.__dict__['undefine']
+ at Configure.conf
+def undefine(self,key):
+ old_undefine(self,key)
+ if key.startswith('HAVE_'):
+ self.env[key]=0
+def set_incdirs(self,val):
+ Logs.warn('compat: change "export_incdirs" by "export_includes"')
+ self.export_includes=val
+TaskGen.task_gen.export_incdirs=property(None,set_incdirs)
diff --git a/waflib/fixpy2.py b/waflib/fixpy2.py
new file mode 100644
index 0000000..98f7036
--- /dev/null
+++ b/waflib/fixpy2.py
@@ -0,0 +1,53 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import os
+all_modifs={}
+def fixdir(dir):
+ global all_modifs
+ for k in all_modifs:
+ for v in all_modifs[k]:
+ modif(os.path.join(dir,'waflib'),k,v)
+def modif(dir,name,fun):
+ if name=='*':
+ lst=[]
+ for y in'. Tools extras'.split():
+ for x in os.listdir(os.path.join(dir,y)):
+ if x.endswith('.py'):
+ lst.append(y+os.sep+x)
+ for x in lst:
+ modif(dir,x,fun)
+ return
+ filename=os.path.join(dir,name)
+ f=open(filename,'r')
+ try:
+ txt=f.read()
+ finally:
+ f.close()
+ txt=fun(txt)
+ f=open(filename,'w')
+ try:
+ f.write(txt)
+ finally:
+ f.close()
+def subst(*k):
+ def do_subst(fun):
+ global all_modifs
+ for x in k:
+ try:
+ all_modifs[x].append(fun)
+ except KeyError:
+ all_modifs[x]=[fun]
+ return fun
+ return do_subst
+ at subst('*')
+def r1(code):
+ code=code.replace(',e:',',e:')
+ code=code.replace("",'')
+ code=code.replace('','')
+ return code
+ at subst('Runner.py')
+def r4(code):
+ code=code.replace('next(self.biter)','self.biter.next()')
+ return code
diff --git a/wscript b/wscript
new file mode 100644
index 0000000..50f098c
--- /dev/null
+++ b/wscript
@@ -0,0 +1,148 @@
+APPNAME = 'sprai'
+VERSION = '0.9.9.12'
+
+srcdir = '.'
+blddir = 'build'
+
+def options(opt):
+ opt.load('compiler_c perl')
+
+def configure(conf):
+ conf.load('compiler_c perl')
+
+# if conf.check_cc(lib = 'z'):
+# pass
+# else:
+# conf.fatal("zlib does not exist.")
+
+ conf.check_perl_version()
+
+# if conf.check_perl_module('Compress::Zlib') is None:
+# conf.fatal("Perl module 'Compress::Zlib' does not exist.")
+# else:
+# pass
+
+ conf.check_perl_module('Statistics::Descriptive')
+
+def build(bld):
+ bld.install_files(
+ '${PREFIX}/bin', [
+ 'ca_ikki_v5.pl',
+ 'dumbbell_filter.pl',
+# 'ezez4qsub_v9.pl',
+# 'ezez4qsub_v8_iter.pl',
+ 'ezez4qsub_vx1.pl',
+# 'ezez_v8.pl',
+# 'ezez_v7_iter.pl',
+ 'ezez_vx1.pl',
+ 'fa2fq.pl',
+ 'fq2fa.pl',
+ 'fq2idfq.pl',
+ 'fqfilt.pl',
+ 'get_top_20x_fa.pl',
+# 'mira_ikki.pl',
+ 'partition_fa.pl',
+# 'ezez4makefile_v4.pl',
+ 'get_target_fasta_records.pl',
+ 'dfq2fq_v2.pl',
+ 'extract_fq.pl',
+ 'check_redundancy.pl',
+ 'check_circularity.pl',
+ 'bfmtx2m4.pl',
+# 'sprai_dagcon_v3.py',
+# 'sprai_dagcon.cfg',
+# 're2cons.pl',
+# 'count_chars.pl'
+ ], chmod=0755)
+
+ bld.program(
+ source='bfmt72s.c',
+ target='bfmt72s',
+
+ includes='.',
+
+# lib=['z'],
+
+ install_path = '${PREFIX}/bin',
+ cflags = ['-O3', '-Wall'],
+ dflags = ['-g'],
+ )
+
+ bld.program(
+ source='nss2v_v3.c',
+ target='nss2v_v3',
+
+ includes='.',
+
+ install_path = '${PREFIX}/bin',
+ cflags = ['-O3', '-Wall'],
+ dflags = ['-g'],
+ )
+
+ bld.program(
+ source='myrealigner.c',
+ target='myrealigner',
+
+ includes='.',
+
+ install_path = '${PREFIX}/bin',
+ cflags = ['-O3', '-Wall'],
+ dflags = ['-g'],
+ )
+
+ bld.program(
+ source='m52bfmt7.c',
+ target='m52bfmt7',
+
+ includes='.',
+
+ install_path = '${PREFIX}/bin',
+ cflags = ['-O3', '-Wall'],
+ dflags = ['-g'],
+ )
+
+def dist(ctx):
+ ctx.algo = 'tar.gz'
+ ctx.files = ctx.path.ant_glob([
+ 'ca_ikki_v5.pl',
+ 'dfq2fq_v2.pl',
+ 'dumbbell_filter.pl',
+# 'ezez4qsub_v9.pl',
+# 'ezez4qsub_v8_iter.pl',
+ 'ezez4qsub_vx1.pl',
+# 'ezez_v8.pl',
+# 'ezez_v7_iter.pl',
+ 'ezez_vx1.pl',
+ 'fa2fq.pl',
+ 'fq2fa.pl',
+ 'fq2idfq.pl',
+ 'fqfilt.pl',
+ 'get_top_20x_fa.pl',
+# 'mira_ikki.pl',
+ 'partition_fa.pl',
+# 'ezez4makefile_v4.pl',
+ 'get_target_fasta_records.pl',
+ 'doc/_build/html/**',
+ 'configure',
+ 'pbasm.spec',
+# 'myasm.spec',
+ 'ec.spec',
+# 'ec_iter.spec',
+ 'bfmt72s.c',
+ 'col2fqcell.h',
+ 'LICENSE.txt',
+ 'myrealigner.c',
+ 'nss2v_v3.c',
+ 'waf',
+ 'wscript',
+ 'm52bfmt7.c',
+ 'bfmtx2m4.pl',
+ 'extract_fq.pl',
+ 'check_redundancy.pl',
+ 'check_circularity.pl',
+# 'count_chars.pl',
+# 'sprai_dagcon_v3.py',
+# 'sprai_dagcon.cfg',
+# 're2cons.pl',
+ 'makefile'
+ ])
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-med/sprai.git
More information about the debian-med-commit
mailing list