[med-svn] [sprai] 01/01: Imported Upstream version 0.9.9.14
Afif Elghraoui
afif at moszumanska.debian.org
Wed Apr 27 03:00:16 UTC 2016
This is an automated email from the git hooks/post-receive script.
afif pushed a commit to annotated tag upstream/0.9.9.14
in repository sprai.
commit 9db4193ba140b67615019c196c04aac6a8e01ee8
Author: Afif Elghraoui <afif at ghraoui.name>
Date: Tue Apr 26 18:44:33 2016 -0700
Imported Upstream version 0.9.9.14
---
ChangeLog.txt | 1 +
ca_ikki_v5.pl | 4 +-
doc/_build/html/_sources/Download.txt | 4 +
doc/_build/html/_sources/Example.txt | 28 ++
doc/_build/html/_sources/README.txt | 37 +-
doc/_build/html/_sources/index.txt | 4 +
ezez4qsub_vx1.pl | 120 +++---
ezez_vx1.pl | 25 +-
makefile | 2 +-
waf | Bin 4002 -> 90798 bytes
waflib/Build.py | 769 ----------------------------------
waflib/ConfigSet.py | 152 -------
waflib/Configure.py | 317 --------------
waflib/Context.py | 319 --------------
waflib/Errors.py | 37 --
waflib/Logs.py | 176 --------
waflib/Node.py | 466 --------------------
waflib/Options.py | 135 ------
waflib/Runner.py | 197 ---------
waflib/Scripting.py | 373 -----------------
waflib/Task.py | 677 ------------------------------
waflib/TaskGen.py | 400 ------------------
waflib/Tools/__init__.py | 4 -
waflib/Tools/ar.py | 11 -
waflib/Tools/asm.py | 25 --
waflib/Tools/bison.py | 28 --
waflib/Tools/c.py | 24 --
waflib/Tools/c_aliases.py | 55 ---
waflib/Tools/c_config.py | 728 --------------------------------
waflib/Tools/c_osx.py | 120 ------
waflib/Tools/c_preproc.py | 604 --------------------------
waflib/Tools/c_tests.py | 153 -------
waflib/Tools/ccroot.py | 391 -----------------
waflib/Tools/compiler_c.py | 39 --
waflib/Tools/compiler_cxx.py | 39 --
waflib/Tools/compiler_d.py | 29 --
waflib/Tools/compiler_fc.py | 43 --
waflib/Tools/cs.py | 132 ------
waflib/Tools/cxx.py | 26 --
waflib/Tools/d.py | 54 ---
waflib/Tools/d_config.py | 52 ---
waflib/Tools/d_scan.py | 133 ------
waflib/Tools/dbus.py | 29 --
waflib/Tools/dmd.py | 51 ---
waflib/Tools/errcheck.py | 161 -------
waflib/Tools/fc.py | 116 -----
waflib/Tools/fc_config.py | 285 -------------
waflib/Tools/fc_scan.py | 68 ---
waflib/Tools/flex.py | 32 --
waflib/Tools/g95.py | 55 ---
waflib/Tools/gas.py | 12 -
waflib/Tools/gcc.py | 97 -----
waflib/Tools/gdc.py | 36 --
waflib/Tools/gfortran.py | 69 ---
waflib/Tools/glib2.py | 173 --------
waflib/Tools/gnu_dirs.py | 65 ---
waflib/Tools/gxx.py | 97 -----
waflib/Tools/icc.py | 30 --
waflib/Tools/icpc.py | 29 --
waflib/Tools/ifort.py | 49 ---
waflib/Tools/intltool.py | 77 ----
waflib/Tools/irixcc.py | 48 ---
waflib/Tools/javaw.py | 311 --------------
waflib/Tools/kde4.py | 48 ---
waflib/Tools/ldc2.py | 37 --
waflib/Tools/lua.py | 18 -
waflib/Tools/msvc.py | 726 --------------------------------
waflib/Tools/nasm.py | 14 -
waflib/Tools/perl.py | 80 ----
waflib/Tools/python.py | 340 ---------------
waflib/Tools/qt4.py | 437 -------------------
waflib/Tools/ruby.py | 103 -----
waflib/Tools/suncc.py | 53 ---
waflib/Tools/suncxx.py | 54 ---
waflib/Tools/tex.py | 250 -----------
waflib/Tools/vala.py | 201 ---------
waflib/Tools/waf_unit_test.py | 95 -----
waflib/Tools/winres.py | 85 ----
waflib/Tools/xlc.py | 45 --
waflib/Tools/xlcxx.py | 45 --
waflib/Utils.py | 412 ------------------
waflib/__init__.py | 4 -
waflib/ansiterm.py | 177 --------
waflib/extras/__init__.py | 4 -
waflib/extras/compat15.py | 220 ----------
waflib/fixpy2.py | 53 ---
wscript | 3 +-
87 files changed, 164 insertions(+), 12163 deletions(-)
diff --git a/ChangeLog.txt b/ChangeLog.txt
new file mode 100644
index 0000000..dd03243
--- /dev/null
+++ b/ChangeLog.txt
@@ -0,0 +1 @@
+Please see doc/_build/html/index.html
diff --git a/ca_ikki_v5.pl b/ca_ikki_v5.pl
index b382c09..e4d6e2a 100755
--- a/ca_ikki_v5.pl
+++ b/ca_ikki_v5.pl
@@ -19,11 +19,11 @@ my @msg=(
"USAGE: <this> <asm.spec> estimated_genome_size",
#"[-from integer]",
#"[-to integer ]",
-"[-d directory in which fin.fq.gzs exist (default: $fastqdir)]",
+"[-d directory in which fin.idfq.gzs exist (default: $fastqdir)]",
"[-ca_path /path/to/your/wgs/Linux-amd64/bin (default: $ca_path)]",
#"[-tmp_dir temporary directory (default: $tmp_dir)]",
"[-out_dir output directory (default: $out_dir)]",
-"[-sprai_path the path to sprai installed (default: $sprai_path)]",
+"[-sprai_path the path to get_top_20x_fa.pl installed (default: $sprai_path)]",
"[-coverage int : use longer than N(coverage) reads for assembly (default: $coverage)]",
"",
"[-raw_fastq in.fq : use all reads in in.fq (default: off)]",
diff --git a/doc/_build/html/_sources/Download.txt b/doc/_build/html/_sources/Download.txt
index 3c584c0..3edb882 100644
--- a/doc/_build/html/_sources/Download.txt
+++ b/doc/_build/html/_sources/Download.txt
@@ -1,6 +1,10 @@
========
Download
========
+http://zombie.cb.k.u-tokyo.ac.jp/sprai/dist/sprai-0.9.9.14.tar.gz
+
+http://zombie.cb.k.u-tokyo.ac.jp/sprai/dist/sprai-0.9.9.13.tar.gz
+
http://zombie.cb.k.u-tokyo.ac.jp/sprai/dist/sprai-0.9.9.12.tar.gz
http://zombie.cb.k.u-tokyo.ac.jp/sprai/dist/sprai-0.9.9.11.tar.gz
diff --git a/doc/_build/html/_sources/Example.txt b/doc/_build/html/_sources/Example.txt
index 165b3b0..dcf1860 100644
--- a/doc/_build/html/_sources/Example.txt
+++ b/doc/_build/html/_sources/Example.txt
@@ -18,6 +18,9 @@ Go to `pacbiotoca wiki <http://sourceforge.net/apps/mediawiki/wgs-assembler/inde
tar xvzf sampleData.tar.gz
Convert fasta to fastq. You can use a fa2fq.pl script in Sprai.
+
+(Sprai version 0.9.9.13 or newer can be fed both fastq and fasta format. So you can skip converting fasta to fastq.)
+
::
fa2fq.pl sampleData/pacbio.filtered_subreads.fasta > pacbio.filtered_subreads.fq
@@ -61,6 +64,8 @@ If you do not know it, set 0.
*trim* is the number of nucleotides Sprai cut from both sides of alignments.
+*ca_path* is the path to your wgs-assembler (Celera Assembler) installed.
+
*word_size* is used by blastn.
Correct errors & assemble
@@ -83,6 +88,29 @@ Sprai extracts longest 20X reads of the *estimated_genome_size* from *c01.fin.id
And feed them to Celera Assembler.
Celera Assembler outputs files into *CA* directory.
+If you only correct errors and don't assemble, do
+
+::
+
+ ezez_vx1.pl ec.spec -ec_only > log.txt 2>&1 &
+
+or
+
+::
+
+ ezez_vx1.pl ec.spec > log.txt 2>&1 &
+
+After error correction, if you want to assemble corrected reads using Celera Assembler, do
+
+::
+
+ ca_ikki_v5.pl pbasm.spec estimated_genome_size \
+ -d directory in which fin.idfq.gzs exist \
+ -ca_path /path/to/your/wgs/Linux-amd64/bin \
+ -sprai_path the path to get_top_20x_fa.pl installed
+
+
+
Find contigs
===================
You will find contigs in a *CA/9-terminator/asm.ctg.fasta* file.
diff --git a/doc/_build/html/_sources/README.txt b/doc/_build/html/_sources/README.txt
index bc64a07..05e9e18 100644
--- a/doc/_build/html/_sources/README.txt
+++ b/doc/_build/html/_sources/README.txt
@@ -179,11 +179,13 @@ Edit *ec.spec*, and give *ca_path* parameter, which is the directory (full-path)
.. You can confirm what will happen by using fs2ctg_v4.pl with '-n' option.
Then, type the following commands
+
::
ezez_vx1.pl ec.spec pbasm.spec > log 2>&1 &
This will do sequencing-error correction, and contigs will be created.
+Note that parameter files for Sprai and Celera assembler are independent; you can run Celera with multiple nodes (machines) even with Sprai single node mode.
If you only need error-corrected reads and do not want Sprai (Celera assembler) to assemble them, do as follows
@@ -195,7 +197,27 @@ If you only need error-corrected reads and do not want Sprai (Celera assembler)
ezez_vx1.pl ec.spec /dev/null -ec_only > log 2>&1 &
-Note that parameter files for Sprai and Celera assembler are independent; you can run Celera with multiple nodes (machines) even with Sprai single node mode.
+or
+
+::
+
+ ezez_vx1.pl ec.spec -ec_only > log 2>&1 &
+
+or
+
+::
+
+ ezez_vx1.pl ec.spec > log 2>&1 &
+
+
+After error correction, if you want to assemble corrected reads using Celera Assembler, do
+
+::
+
+ ca_ikki_v5.pl pbasm.spec estimated_genome_size \
+ -d directory in which fin.idfq.gzs exist \
+ -ca_path /path/to/your/wgs/Linux-amd64/bin \
+ -sprai_path the path to get_top_20x_fa.pl installed
Multi-node mode 1 (qsub mode)
------------------------------
@@ -216,10 +238,23 @@ To correct sequencing errors of PacBio Continuous Long Reads and also would like
.. ezez4makefile.pl ec.spec asm.spec > ezez4makefile.log 2>&1 && make &
If you only use error-corrected reads and do not want Sprai (Celera assembler) to assemble them, do as follows
+
::
ezez4qsub_vx1.pl ec.spec /dev/null -ec_only > log 2>&1 &
+or
+
+::
+
+ ezez4qsub_vx1.pl ec.spec -ec_only > log 2>&1 &
+
+or
+
+::
+
+ ezez4qsub_vx1.pl ec.spec > log 2>&1 &
+
.. \or
.. ::
diff --git a/doc/_build/html/_sources/index.txt b/doc/_build/html/_sources/index.txt
index 0f79d24..b729ca9 100644
--- a/doc/_build/html/_sources/index.txt
+++ b/doc/_build/html/_sources/index.txt
@@ -20,6 +20,10 @@ Contents
Changelogs
=============
+2016.4.15: v0.9.9.14: -ec_only mode can be run without a spec file of Celera Assembler. (Thanks to Afif Elghraoui for a report)
+
+2016.4.13: v0.9.9.13: myrealigner.c: dynamic memory allocation for element & col pools. (Thanks to Tomoaki Nishiyama for code modifications)
+
2016.4.12: v0.9.9.12: Sprai is released under MIT license. See LICENSE.txt .
2015.10.20: v0.9.9.11: nss2v_v3.c: variable max read length. (Thanks to Tomoaki Nishiyama for code modifications)
diff --git a/ezez4qsub_vx1.pl b/ezez4qsub_vx1.pl
index 233994d..5f5e5c9 100755
--- a/ezez4qsub_vx1.pl
+++ b/ezez4qsub_vx1.pl
@@ -43,33 +43,41 @@ if($DEBUG){
my %params;
-my @emsgs=(
+my @msgs=(
'USAGE: <this> <ec.spec> <asm.spec>',
- '[-debug: outputs intermediate files (not implemented)]',
+ "or: <this> <ec.spec> -ec_only",
+ #'[-debug: outputs intermediate files (not implemented)]',
'[-n: outputs qsub scripts and does NOT qsub]',
'[-now yyyymmdd_hhmmss: use a XXX_yyyymmdd_hhmmss directories, detect unfinished jobs and restart at the appropriate stage.]',
- '[-ec_only: not assemble]',
+ "[-ec_only: does error correction and does NOT assemble]",
);
-if(@ARGV != 2){
- my $msg = join "\n\t", at emsgs;
- die "$msg\n";
+if(@ARGV == 0 || @ARGV > 2){
+ my $msg = join "\n\t", at msgs;
+ printf STDERR ("%s\n",$msg);
+ exit(1);
+}
+if(@ARGV == 1 && !$opt_ec_only){
+ printf STDERR ("WARNING: %s\n", "-ec_only was added");
+ $opt_ec_only = 1;
}
my $pwd = `pwd`;
chomp $pwd;
-my $asm_spec = $ARGV[1];
-$asm_spec =~ s/^\s+//;
-if(!-e $asm_spec){
- die "$asm_spec does not exist.\n";
-}
-
-if($asm_spec =~ /^\//){
- # real path; do nothing
-}
-else{
- $asm_spec = "$pwd/$asm_spec";
+my $asm_spec="";
+if(@ARGV == 2){
+ $asm_spec = $ARGV[1];
+ $asm_spec =~ s/^\s+//;
+ if(!-e $asm_spec){
+ die "$asm_spec does not exist.\n";
+ }
+ if($asm_spec =~ /^\//){
+ # real path; do nothing
+ }
+ else{
+ $asm_spec = "$pwd/$asm_spec";
+ }
}
{
@@ -242,7 +250,9 @@ printf STDERR ("evalue %g\n",$evalue);
printf STDERR ("num_threads %d\n",$num_threads);
printf STDERR ("valid_voters %s\n",$valid_voters);
printf STDERR ("trim %d\n",$trim);
-printf STDERR ("ca_path %s\n",$ca_path);
+if($ca_path){
+ printf STDERR ("ca_path %s\n",$ca_path);
+}
printf STDERR ("blast_path %s\n",$blast_path);
printf STDERR ("sprai_path %s\n",$sprai_path);
if($queue_req){
@@ -315,6 +325,10 @@ if(!-e "$bindir/bfmt72s"){
if(!-e "$path2blast/blastn"){
die "$path2blast/blastn does not exist in $path2blast\n"
}
+if(!$opt_ec_only && !-e $ca_path){
+ die "ca_path $ca_path does not exist.\n";
+}
+
$scriptdir = "$pwd/${scriptdir}_$now";
if(!-d $scriptdir){
@@ -666,7 +680,7 @@ for(my $index=$from; $index<$to; ++$index){
}
#my $PG3="$bindir/nss2v_v3 -v $valid_voters -q";
my $PG3="$bindir/nss2v_v3 -v $valid_voters";
- my $PG4="$bindir/myrealigner -f -B $valid_voters -b 3 -d 0.5";
+ my $PG4="$bindir/myrealigner -f -B $valid_voters -b 3 -d 0.5 -l 131072";
#my $PG3="$bindir/nss2v_v3 -q";
#my $PG3="$bindir/nss2v_v3 -q -s";
#my $PG3="$bindir/nss2v_v3 -v $valid_voters";
@@ -1187,25 +1201,26 @@ for(my $index=$from; $index<$to; ++$index){
if(!$opt_ec_only){
push(@post_array_jobs,$script);
push(@do_qsub_postaj,$do_qsub);
- }
- open my $fh, ">", $script or die $!;
- printf $fh ("#!/bin/bash\n");
- printf $fh ("#\$ -S /bin/bash\n");
- printf $fh ("#\$ -cwd\n");
- printf $fh ("#\$ -V\n");
- my $jobname = sprintf("$preprefix%02d_fastqToCA_$now",$index);
- push @runCA_holdjids,$jobname;
- printf $fh ("#\$ -N $jobname\n");
- if($queue_req){
- printf $fh ("#\$ $queue_req\n");
- }
- printf $fh ("#\$ -o $logdir\n");
- printf $fh ("#\$ -e $logdir\n");
- printf $fh ("#\$ -hold_jid $holdlist\n");
- printf $fh ("time ($command)\n");
+ open my $fh, ">", $script or die $!;
- close $fh;
+ printf $fh ("#!/bin/bash\n");
+ printf $fh ("#\$ -S /bin/bash\n");
+ printf $fh ("#\$ -cwd\n");
+ printf $fh ("#\$ -V\n");
+ my $jobname = sprintf("$preprefix%02d_fastqToCA_$now",$index);
+ push @runCA_holdjids,$jobname;
+ printf $fh ("#\$ -N $jobname\n");
+ if($queue_req){
+ printf $fh ("#\$ $queue_req\n");
+ }
+ printf $fh ("#\$ -o $logdir\n");
+ printf $fh ("#\$ -e $logdir\n");
+ printf $fh ("#\$ -hold_jid $holdlist\n");
+ printf $fh ("time ($command)\n");
+
+ close $fh;
+ }
}
if($index+1 == $to)
{
@@ -1244,24 +1259,25 @@ for(my $index=$from; $index<$to; ++$index){
if(!$opt_ec_only){
push(@post_array_jobs,$script);
push(@do_qsub_postaj,$do_qsub);
- }
- open my $fh, ">", $script or die $!;
- printf $fh ("#!/bin/bash\n");
- printf $fh ("#\$ -S /bin/bash\n");
- printf $fh ("#\$ -cwd\n");
- printf $fh ("#\$ -V\n");
- my $jobname = sprintf("$preprefix%02d_runCA_$now",$index);
- printf $fh ("#\$ -N $jobname\n");
- if($queue_req){
- printf $fh ("#\$ $queue_req\n");
- }
- printf $fh ("#\$ -o $logdir\n");
- printf $fh ("#\$ -e $logdir\n");
- printf $fh ("#\$ -hold_jid $holdlist\n");
- printf $fh ("time ($command)\n");
+ open my $fh, ">", $script or die $!;
- close $fh;
+ printf $fh ("#!/bin/bash\n");
+ printf $fh ("#\$ -S /bin/bash\n");
+ printf $fh ("#\$ -cwd\n");
+ printf $fh ("#\$ -V\n");
+ my $jobname = sprintf("$preprefix%02d_runCA_$now",$index);
+ printf $fh ("#\$ -N $jobname\n");
+ if($queue_req){
+ printf $fh ("#\$ $queue_req\n");
+ }
+ printf $fh ("#\$ -o $logdir\n");
+ printf $fh ("#\$ -e $logdir\n");
+ printf $fh ("#\$ -hold_jid $holdlist\n");
+ printf $fh ("time ($command)\n");
+
+ close $fh;
+ }
}
# print STDERR "post_array_jobs printed\n";
diff --git a/ezez_vx1.pl b/ezez_vx1.pl
index afa7827..35d7c01 100755
--- a/ezez_vx1.pl
+++ b/ezez_vx1.pl
@@ -31,19 +31,28 @@ my %params;
my @msgs = (
"USAGE: <this> <ec.spec> <asm.spec>",
+ "or: <this> <ec.spec> -ec_only",
"[-n: only shows parameters in ec.spec and exit.]",
"[-ec_only: does error correction and does NOT assemble]",
#'[-debug: outputs intermediate files (not implemented)]',
'[-now yyyymmdd_hhmmss: use a result_yyyymmdd_hhmmss directory, detect unfinished jobs and restart at the appropriate stage.]',
);
-if(@ARGV != 2){
+if(@ARGV == 0 || @ARGV > 2){
my $msg = join "\n\t", at msgs;
printf STDERR ("%s\n",$msg);
exit(1);
}
+if(@ARGV == 1 && !$opt_ec_only){
+ printf STDERR ("WARNING: %s\n", "-ec_only was added");
+ $opt_ec_only = 1;
+}
+#print "@ARGV\n";
-my $spec=$ARGV[1];
+my $spec="";
+if(@ARGV == 2){
+ $spec=$ARGV[1];
+}
{
my $ec_spec = $ARGV[0];
@@ -80,7 +89,7 @@ my $max_target_seqs=100;
my $valid_voters=11;
my $trim=42;
my $estimated_genome_size=0;
-my $ca_path="/home/imai/wgs-7.0/Linux-amd64/bin/";
+my $ca_path="";
my $word_size=0;
my $min_len_for_query=1;
my $max_len_for_query=1000000000000000;
@@ -187,7 +196,9 @@ printf STDERR ("evalue %g\n",$evalue);
printf STDERR ("num_threads %d\n",$num_threads);
printf STDERR ("valid_voters %s\n",$valid_voters);
printf STDERR ("trim %d\n",$trim);
-printf STDERR ("ca_path %s\n",$ca_path);
+if($ca_path){
+ printf STDERR ("ca_path %s\n",$ca_path);
+}
if($sprai_path){
printf STDERR ("sprai_path %s\n",$sprai_path);
}
@@ -211,8 +222,8 @@ if($max_target_seqs){
}
printf STDERR ("#>- params -<#\n");
-if(!-e $ca_path){
- die "$ca_path does not exist.\n";
+if(!$opt_ec_only && !-e $ca_path){
+ die "ca_path $ca_path does not exist.\n";
}
if(!$opt_ec_only && !-e $spec){
@@ -390,7 +401,7 @@ for(my $index=$from; $index<$to; ++$index){
if($sprai_path){
$PG3 = "$sprai_path/$PG3";
}
- my $PG4="myrealigner -f -B $valid_voters -b 3 -d 0.5";
+ my $PG4="myrealigner -f -B $valid_voters -b 3 -d 0.5 -l 131072";
if($sprai_path){
$PG4 = "$sprai_path/$PG4";
}
diff --git a/makefile b/makefile
index b1ca723..84ad6f2 100644
--- a/makefile
+++ b/makefile
@@ -1,5 +1,5 @@
APPNAME = 'sprai'
-VERSION = '0.9.9.13'
+VERSION = '0.9.9.14'
PREFIX=$(PWD)
COMPILED= \
diff --git a/waf b/waf
index 16a745b..b3b6e36 100755
Binary files a/waf and b/waf differ
diff --git a/waflib/Build.py b/waflib/Build.py
deleted file mode 100644
index 70330ef..0000000
--- a/waflib/Build.py
+++ /dev/null
@@ -1,769 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import os,sys,errno,re,shutil
-try:
- import cPickle
-except ImportError:
- import pickle as cPickle
-from waflib import Runner,TaskGen,Utils,ConfigSet,Task,Logs,Options,Context,Errors
-import waflib.Node
-CACHE_DIR='c4che'
-CACHE_SUFFIX='_cache.py'
-INSTALL=1337
-UNINSTALL=-1337
-SAVED_ATTRS='root node_deps raw_deps task_sigs'.split()
-CFG_FILES='cfg_files'
-POST_AT_ONCE=0
-POST_LAZY=1
-POST_BOTH=2
-class BuildContext(Context.Context):
- '''executes the build'''
- cmd='build'
- variant=''
- def __init__(self,**kw):
- super(BuildContext,self).__init__(**kw)
- self.is_install=0
- self.top_dir=kw.get('top_dir',Context.top_dir)
- self.run_dir=kw.get('run_dir',Context.run_dir)
- self.post_mode=POST_AT_ONCE
- self.out_dir=kw.get('out_dir',Context.out_dir)
- self.cache_dir=kw.get('cache_dir',None)
- if not self.cache_dir:
- self.cache_dir=self.out_dir+os.sep+CACHE_DIR
- self.all_envs={}
- self.task_sigs={}
- self.node_deps={}
- self.raw_deps={}
- self.cache_dir_contents={}
- self.task_gen_cache_names={}
- self.launch_dir=Context.launch_dir
- self.jobs=Options.options.jobs
- self.targets=Options.options.targets
- self.keep=Options.options.keep
- self.cache_global=Options.cache_global
- self.nocache=Options.options.nocache
- self.progress_bar=Options.options.progress_bar
- self.deps_man=Utils.defaultdict(list)
- self.current_group=0
- self.groups=[]
- self.group_names={}
- def get_variant_dir(self):
- if not self.variant:
- return self.out_dir
- return os.path.join(self.out_dir,self.variant)
- variant_dir=property(get_variant_dir,None)
- def __call__(self,*k,**kw):
- kw['bld']=self
- ret=TaskGen.task_gen(*k,**kw)
- self.task_gen_cache_names={}
- self.add_to_group(ret,group=kw.get('group',None))
- return ret
- def rule(self,*k,**kw):
- def f(rule):
- ret=self(*k,**kw)
- ret.rule=rule
- return ret
- return f
- def __copy__(self):
- raise Errors.WafError('build contexts are not supposed to be copied')
- def install_files(self,*k,**kw):
- pass
- def install_as(self,*k,**kw):
- pass
- def symlink_as(self,*k,**kw):
- pass
- def load_envs(self):
- node=self.root.find_node(self.cache_dir)
- if not node:
- raise Errors.WafError('The project was not configured: run "waf configure" first!')
- lst=node.ant_glob('**/*%s'%CACHE_SUFFIX,quiet=True)
- if not lst:
- raise Errors.WafError('The cache directory is empty: reconfigure the project')
- for x in lst:
- name=x.path_from(node).replace(CACHE_SUFFIX,'').replace('\\','/')
- env=ConfigSet.ConfigSet(x.abspath())
- self.all_envs[name]=env
- for f in env[CFG_FILES]:
- newnode=self.root.find_resource(f)
- try:
- h=Utils.h_file(newnode.abspath())
- except(IOError,AttributeError):
- Logs.error('cannot find %r'%f)
- h=Utils.SIG_NIL
- newnode.sig=h
- def init_dirs(self):
- if not(os.path.isabs(self.top_dir)and os.path.isabs(self.out_dir)):
- raise Errors.WafError('The project was not configured: run "waf configure" first!')
- self.path=self.srcnode=self.root.find_dir(self.top_dir)
- self.bldnode=self.root.make_node(self.variant_dir)
- self.bldnode.mkdir()
- def execute(self):
- self.restore()
- if not self.all_envs:
- self.load_envs()
- self.execute_build()
- def execute_build(self):
- Logs.info("Waf: Entering directory `%s'"%self.variant_dir)
- self.recurse([self.run_dir])
- self.pre_build()
- self.timer=Utils.Timer()
- if self.progress_bar:
- sys.stderr.write(Logs.colors.cursor_off)
- try:
- self.compile()
- finally:
- if self.progress_bar==1:
- c=len(self.returned_tasks)or 1
- self.to_log(self.progress_line(c,c,Logs.colors.BLUE,Logs.colors.NORMAL))
- print('')
- sys.stdout.flush()
- sys.stderr.write(Logs.colors.cursor_on)
- Logs.info("Waf: Leaving directory `%s'"%self.variant_dir)
- self.post_build()
- def restore(self):
- try:
- env=ConfigSet.ConfigSet(os.path.join(self.cache_dir,'build.config.py'))
- except(IOError,OSError):
- pass
- else:
- if env['version']<Context.HEXVERSION:
- raise Errors.WafError('Version mismatch! reconfigure the project')
- for t in env['tools']:
- self.setup(**t)
- f=None
- try:
- dbfn=os.path.join(self.variant_dir,Context.DBFILE)
- try:
- f=open(dbfn,'rb')
- except(IOError,EOFError):
- Logs.debug('build: Could not load the build cache %s (missing)'%dbfn)
- else:
- try:
- waflib.Node.pickle_lock.acquire()
- waflib.Node.Nod3=self.node_class
- try:
- data=cPickle.load(f)
- except Exception ,e:
- Logs.debug('build: Could not pickle the build cache %s: %r'%(dbfn,e))
- else:
- for x in SAVED_ATTRS:
- setattr(self,x,data[x])
- finally:
- waflib.Node.pickle_lock.release()
- finally:
- if f:
- f.close()
- self.init_dirs()
- def store(self):
- data={}
- for x in SAVED_ATTRS:
- data[x]=getattr(self,x)
- db=os.path.join(self.variant_dir,Context.DBFILE)
- try:
- waflib.Node.pickle_lock.acquire()
- waflib.Node.Nod3=self.node_class
- f=None
- try:
- f=open(db+'.tmp','wb')
- cPickle.dump(data,f,-1)
- finally:
- if f:
- f.close()
- finally:
- waflib.Node.pickle_lock.release()
- try:
- st=os.stat(db)
- os.unlink(db)
- if not Utils.is_win32:
- os.chown(db+'.tmp',st.st_uid,st.st_gid)
- except(AttributeError,OSError):
- pass
- os.rename(db+'.tmp',db)
- def compile(self):
- Logs.debug('build: compile()')
- self.producer=Runner.Parallel(self,self.jobs)
- self.producer.biter=self.get_build_iterator()
- self.returned_tasks=[]
- try:
- self.producer.start()
- except KeyboardInterrupt:
- self.store()
- raise
- else:
- if self.producer.dirty:
- self.store()
- if self.producer.error:
- raise Errors.BuildError(self.producer.error)
- def setup(self,tool,tooldir=None,funs=None):
- if isinstance(tool,list):
- for i in tool:self.setup(i,tooldir)
- return
- module=Context.load_tool(tool,tooldir)
- if hasattr(module,"setup"):module.setup(self)
- def get_env(self):
- try:
- return self.all_envs[self.variant]
- except KeyError:
- return self.all_envs['']
- def set_env(self,val):
- self.all_envs[self.variant]=val
- env=property(get_env,set_env)
- def add_manual_dependency(self,path,value):
- if path is None:
- raise ValueError('Invalid input')
- if isinstance(path,waflib.Node.Node):
- node=path
- elif os.path.isabs(path):
- node=self.root.find_resource(path)
- else:
- node=self.path.find_resource(path)
- if isinstance(value,list):
- self.deps_man[id(node)].extend(value)
- else:
- self.deps_man[id(node)].append(value)
- def launch_node(self):
- try:
- return self.p_ln
- except AttributeError:
- self.p_ln=self.root.find_dir(self.launch_dir)
- return self.p_ln
- def hash_env_vars(self,env,vars_lst):
- if not env.table:
- env=env.parent
- if not env:
- return Utils.SIG_NIL
- idx=str(id(env))+str(vars_lst)
- try:
- cache=self.cache_env
- except AttributeError:
- cache=self.cache_env={}
- else:
- try:
- return self.cache_env[idx]
- except KeyError:
- pass
- lst=[env[a]for a in vars_lst]
- ret=Utils.h_list(lst)
- Logs.debug('envhash: %s %r',Utils.to_hex(ret),lst)
- cache[idx]=ret
- return ret
- def get_tgen_by_name(self,name):
- cache=self.task_gen_cache_names
- if not cache:
- for g in self.groups:
- for tg in g:
- try:
- cache[tg.name]=tg
- except AttributeError:
- pass
- try:
- return cache[name]
- except KeyError:
- raise Errors.WafError('Could not find a task generator for the name %r'%name)
- def progress_line(self,state,total,col1,col2):
- n=len(str(total))
- Utils.rot_idx+=1
- ind=Utils.rot_chr[Utils.rot_idx%4]
- pc=(100.*state)/total
- eta=str(self.timer)
- fs="[%%%dd/%%%dd][%%s%%2d%%%%%%s][%s]["%(n,n,ind)
- left=fs%(state,total,col1,pc,col2)
- right='][%s%s%s]'%(col1,eta,col2)
- cols=Logs.get_term_cols()-len(left)-len(right)+2*len(col1)+2*len(col2)
- if cols<7:cols=7
- ratio=((cols*state)//total)-1
- bar=('='*ratio+'>').ljust(cols)
- msg=Utils.indicator%(left,bar,right)
- return msg
- def declare_chain(self,*k,**kw):
- return TaskGen.declare_chain(*k,**kw)
- def pre_build(self):
- for m in getattr(self,'pre_funs',[]):
- m(self)
- def post_build(self):
- for m in getattr(self,'post_funs',[]):
- m(self)
- def add_pre_fun(self,meth):
- try:
- self.pre_funs.append(meth)
- except AttributeError:
- self.pre_funs=[meth]
- def add_post_fun(self,meth):
- try:
- self.post_funs.append(meth)
- except AttributeError:
- self.post_funs=[meth]
- def get_group(self,x):
- if not self.groups:
- self.add_group()
- if x is None:
- return self.groups[self.current_group]
- if x in self.group_names:
- return self.group_names[x]
- return self.groups[x]
- def add_to_group(self,tgen,group=None):
- assert(isinstance(tgen,TaskGen.task_gen)or isinstance(tgen,Task.TaskBase))
- tgen.bld=self
- self.get_group(group).append(tgen)
- def get_group_name(self,g):
- if not isinstance(g,list):
- g=self.groups[g]
- for x in self.group_names:
- if id(self.group_names[x])==id(g):
- return x
- return''
- def get_group_idx(self,tg):
- se=id(tg)
- for i in range(len(self.groups)):
- for t in self.groups[i]:
- if id(t)==se:
- return i
- return None
- def add_group(self,name=None,move=True):
- if name and name in self.group_names:
- Logs.error('add_group: name %s already present'%name)
- g=[]
- self.group_names[name]=g
- self.groups.append(g)
- if move:
- self.current_group=len(self.groups)-1
- def set_group(self,idx):
- if isinstance(idx,str):
- g=self.group_names[idx]
- for i in range(len(self.groups)):
- if id(g)==id(self.groups[i]):
- self.current_group=i
- else:
- self.current_group=idx
- def total(self):
- total=0
- for group in self.groups:
- for tg in group:
- try:
- total+=len(tg.tasks)
- except AttributeError:
- total+=1
- return total
- def get_targets(self):
- to_post=[]
- min_grp=0
- for name in self.targets.split(','):
- tg=self.get_tgen_by_name(name)
- if not tg:
- raise Errors.WafError('target %r does not exist'%name)
- m=self.get_group_idx(tg)
- if m>min_grp:
- min_grp=m
- to_post=[tg]
- elif m==min_grp:
- to_post.append(tg)
- return(min_grp,to_post)
- def get_all_task_gen(self):
- lst=[]
- for g in self.groups:
- lst.extend(g)
- return lst
- def post_group(self):
- if self.targets=='*':
- for tg in self.groups[self.cur]:
- try:
- f=tg.post
- except AttributeError:
- pass
- else:
- f()
- elif self.targets:
- if self.cur<self._min_grp:
- for tg in self.groups[self.cur]:
- try:
- f=tg.post
- except AttributeError:
- pass
- else:
- f()
- else:
- for tg in self._exact_tg:
- tg.post()
- else:
- ln=self.launch_node()
- if ln.is_child_of(self.bldnode):
- Logs.warn('Building from the build directory, forcing --targets=*')
- ln=self.srcnode
- elif not ln.is_child_of(self.srcnode):
- Logs.warn('CWD %s is not under %s, forcing --targets=* (run distclean?)'%(ln.abspath(),self.srcnode.abspath()))
- ln=self.srcnode
- for tg in self.groups[self.cur]:
- try:
- f=tg.post
- except AttributeError:
- pass
- else:
- if tg.path.is_child_of(ln):
- f()
- def get_tasks_group(self,idx):
- tasks=[]
- for tg in self.groups[idx]:
- try:
- tasks.extend(tg.tasks)
- except AttributeError:
- tasks.append(tg)
- return tasks
- def get_build_iterator(self):
- self.cur=0
- if self.targets and self.targets!='*':
- (self._min_grp,self._exact_tg)=self.get_targets()
- global lazy_post
- if self.post_mode!=POST_LAZY:
- while self.cur<len(self.groups):
- self.post_group()
- self.cur+=1
- self.cur=0
- while self.cur<len(self.groups):
- if self.post_mode!=POST_AT_ONCE:
- self.post_group()
- tasks=self.get_tasks_group(self.cur)
- Task.set_file_constraints(tasks)
- Task.set_precedence_constraints(tasks)
- self.cur_tasks=tasks
- self.cur+=1
- if not tasks:
- continue
- yield tasks
- while 1:
- yield[]
-class inst(Task.Task):
- color='CYAN'
- def uid(self):
- lst=[self.dest,self.path]+self.source
- return Utils.h_list(repr(lst))
- def post(self):
- buf=[]
- for x in self.source:
- if isinstance(x,waflib.Node.Node):
- y=x
- else:
- y=self.path.find_resource(x)
- if not y:
- if Logs.verbose:
- Logs.warn('Could not find %s immediately (may cause broken builds)'%x)
- idx=self.generator.bld.get_group_idx(self)
- for tg in self.generator.bld.groups[idx]:
- if not isinstance(tg,inst)and id(tg)!=id(self):
- tg.post()
- y=self.path.find_resource(x)
- if y:
- break
- else:
- raise Errors.WafError('Could not find %r in %r'%(x,self.path))
- buf.append(y)
- self.inputs=buf
- def runnable_status(self):
- ret=super(inst,self).runnable_status()
- if ret==Task.SKIP_ME:
- return Task.RUN_ME
- return ret
- def __str__(self):
- return''
- def run(self):
- return self.generator.exec_task()
- def get_install_path(self,destdir=True):
- dest=Utils.subst_vars(self.dest,self.env)
- dest=dest.replace('/',os.sep)
- if destdir and Options.options.destdir:
- dest=os.path.join(Options.options.destdir,os.path.splitdrive(dest)[1].lstrip(os.sep))
- return dest
- def exec_install_files(self):
- destpath=self.get_install_path()
- if not destpath:
- raise Errors.WafError('unknown installation path %r'%self.generator)
- for x,y in zip(self.source,self.inputs):
- if self.relative_trick:
- destfile=os.path.join(destpath,y.path_from(self.path))
- Utils.check_dir(os.path.dirname(destfile))
- else:
- destfile=os.path.join(destpath,y.name)
- self.generator.bld.do_install(y.abspath(),destfile,self.chmod)
- def exec_install_as(self):
- destfile=self.get_install_path()
- self.generator.bld.do_install(self.inputs[0].abspath(),destfile,self.chmod)
- def exec_symlink_as(self):
- destfile=self.get_install_path()
- src=self.link
- if self.relative_trick:
- src=os.path.relpath(src,os.path.dirname(destfile))
- self.generator.bld.do_link(src,destfile)
-class InstallContext(BuildContext):
- '''installs the targets on the system'''
- cmd='install'
- def __init__(self,**kw):
- super(InstallContext,self).__init__(**kw)
- self.uninstall=[]
- self.is_install=INSTALL
- def do_install(self,src,tgt,chmod=Utils.O644):
- d,_=os.path.split(tgt)
- if not d:
- raise Errors.WafError('Invalid installation given %r->%r'%(src,tgt))
- Utils.check_dir(d)
- srclbl=src.replace(self.srcnode.abspath()+os.sep,'')
- if not Options.options.force:
- try:
- st1=os.stat(tgt)
- st2=os.stat(src)
- except OSError:
- pass
- else:
- if st1.st_mtime+2>=st2.st_mtime and st1.st_size==st2.st_size:
- if not self.progress_bar:
- Logs.info('- install %s (from %s)'%(tgt,srclbl))
- return False
- if not self.progress_bar:
- Logs.info('+ install %s (from %s)'%(tgt,srclbl))
- try:
- os.remove(tgt)
- except OSError:
- pass
- try:
- shutil.copy2(src,tgt)
- os.chmod(tgt,chmod)
- except IOError:
- try:
- os.stat(src)
- except(OSError,IOError):
- Logs.error('File %r does not exist'%src)
- raise Errors.WafError('Could not install the file %r'%tgt)
- def do_link(self,src,tgt):
- d,_=os.path.split(tgt)
- Utils.check_dir(d)
- link=False
- if not os.path.islink(tgt):
- link=True
- elif os.readlink(tgt)!=src:
- link=True
- if link:
- try:os.remove(tgt)
- except OSError:pass
- if not self.progress_bar:
- Logs.info('+ symlink %s (to %s)'%(tgt,src))
- os.symlink(src,tgt)
- else:
- if not self.progress_bar:
- Logs.info('- symlink %s (to %s)'%(tgt,src))
- def run_task_now(self,tsk,postpone):
- tsk.post()
- if not postpone:
- if tsk.runnable_status()==Task.ASK_LATER:
- raise self.WafError('cannot post the task %r'%tsk)
- tsk.run()
- def install_files(self,dest,files,env=None,chmod=Utils.O644,relative_trick=False,cwd=None,add=True,postpone=True):
- tsk=inst(env=env or self.env)
- tsk.bld=self
- tsk.path=cwd or self.path
- tsk.chmod=chmod
- if isinstance(files,waflib.Node.Node):
- tsk.source=[files]
- else:
- tsk.source=Utils.to_list(files)
- tsk.dest=dest
- tsk.exec_task=tsk.exec_install_files
- tsk.relative_trick=relative_trick
- if add:self.add_to_group(tsk)
- self.run_task_now(tsk,postpone)
- return tsk
- def install_as(self,dest,srcfile,env=None,chmod=Utils.O644,cwd=None,add=True,postpone=True):
- tsk=inst(env=env or self.env)
- tsk.bld=self
- tsk.path=cwd or self.path
- tsk.chmod=chmod
- tsk.source=[srcfile]
- tsk.dest=dest
- tsk.exec_task=tsk.exec_install_as
- if add:self.add_to_group(tsk)
- self.run_task_now(tsk,postpone)
- return tsk
- def symlink_as(self,dest,src,env=None,cwd=None,add=True,postpone=True,relative_trick=False):
- if Utils.is_win32:
- return
- tsk=inst(env=env or self.env)
- tsk.bld=self
- tsk.dest=dest
- tsk.path=cwd or self.path
- tsk.source=[]
- tsk.link=src
- tsk.relative_trick=relative_trick
- tsk.exec_task=tsk.exec_symlink_as
- if add:self.add_to_group(tsk)
- self.run_task_now(tsk,postpone)
- return tsk
-class UninstallContext(InstallContext):
- '''removes the targets installed'''
- cmd='uninstall'
- def __init__(self,**kw):
- super(UninstallContext,self).__init__(**kw)
- self.is_install=UNINSTALL
- def do_install(self,src,tgt,chmod=Utils.O644):
- if not self.progress_bar:
- Logs.info('- remove %s'%tgt)
- self.uninstall.append(tgt)
- try:
- os.remove(tgt)
- except OSError ,e:
- if e.errno!=errno.ENOENT:
- if not getattr(self,'uninstall_error',None):
- self.uninstall_error=True
- Logs.warn('build: some files could not be uninstalled (retry with -vv to list them)')
- if Logs.verbose>1:
- Logs.warn('Could not remove %s (error code %r)'%(e.filename,e.errno))
- while tgt:
- tgt=os.path.dirname(tgt)
- try:
- os.rmdir(tgt)
- except OSError:
- break
- def do_link(self,src,tgt):
- try:
- if not self.progress_bar:
- Logs.info('- unlink %s'%tgt)
- os.remove(tgt)
- except OSError:
- pass
- while tgt:
- tgt=os.path.dirname(tgt)
- try:
- os.rmdir(tgt)
- except OSError:
- break
- def execute(self):
- try:
- def runnable_status(self):
- return Task.SKIP_ME
- setattr(Task.Task,'runnable_status_back',Task.Task.runnable_status)
- setattr(Task.Task,'runnable_status',runnable_status)
- super(UninstallContext,self).execute()
- finally:
- setattr(Task.Task,'runnable_status',Task.Task.runnable_status_back)
-class CleanContext(BuildContext):
- '''cleans the project'''
- cmd='clean'
- def execute(self):
- self.restore()
- if not self.all_envs:
- self.load_envs()
- self.recurse([self.run_dir])
- try:
- self.clean()
- finally:
- self.store()
- def clean(self):
- Logs.debug('build: clean called')
- if self.bldnode!=self.srcnode:
- lst=[]
- for e in self.all_envs.values():
- lst.extend(self.root.find_or_declare(f)for f in e[CFG_FILES])
- for n in self.bldnode.ant_glob('**/*',excl='.lock* *conf_check_*/** config.log c4che/*',quiet=True):
- if n in lst:
- continue
- n.delete()
- self.root.children={}
- for v in'node_deps task_sigs raw_deps'.split():
- setattr(self,v,{})
-class ListContext(BuildContext):
- '''lists the targets to execute'''
- cmd='list'
- def execute(self):
- self.restore()
- if not self.all_envs:
- self.load_envs()
- self.recurse([self.run_dir])
- self.pre_build()
- self.timer=Utils.Timer()
- for g in self.groups:
- for tg in g:
- try:
- f=tg.post
- except AttributeError:
- pass
- else:
- f()
- try:
- self.get_tgen_by_name('')
- except Exception:
- pass
- lst=list(self.task_gen_cache_names.keys())
- lst.sort()
- for k in lst:
- Logs.pprint('GREEN',k)
-class StepContext(BuildContext):
- '''executes tasks in a step-by-step fashion, for debugging'''
- cmd='step'
- def __init__(self,**kw):
- super(StepContext,self).__init__(**kw)
- self.files=Options.options.files
- def compile(self):
- if not self.files:
- Logs.warn('Add a pattern for the debug build, for example "waf step --files=main.c,app"')
- BuildContext.compile(self)
- return
- targets=None
- if self.targets and self.targets!='*':
- targets=self.targets.split(',')
- for g in self.groups:
- for tg in g:
- if targets and tg.name not in targets:
- continue
- try:
- f=tg.post
- except AttributeError:
- pass
- else:
- f()
- for pat in self.files.split(','):
- matcher=self.get_matcher(pat)
- for tg in g:
- if isinstance(tg,Task.TaskBase):
- lst=[tg]
- else:
- lst=tg.tasks
- for tsk in lst:
- do_exec=False
- for node in getattr(tsk,'inputs',[]):
- if matcher(node,output=False):
- do_exec=True
- break
- for node in getattr(tsk,'outputs',[]):
- if matcher(node,output=True):
- do_exec=True
- break
- if do_exec:
- ret=tsk.run()
- Logs.info('%s -> exit %r'%(str(tsk),ret))
- def get_matcher(self,pat):
- inn=True
- out=True
- if pat.startswith('in:'):
- out=False
- pat=pat.replace('in:','')
- elif pat.startswith('out:'):
- inn=False
- pat=pat.replace('out:','')
- anode=self.root.find_node(pat)
- pattern=None
- if not anode:
- if not pat.startswith('^'):
- pat='^.+?%s'%pat
- if not pat.endswith('$'):
- pat='%s$'%pat
- pattern=re.compile(pat)
- def match(node,output):
- if output==True and not out:
- return False
- if output==False and not inn:
- return False
- if anode:
- return anode==node
- else:
- return pattern.match(node.abspath())
- return match
-BuildContext.store=Utils.nogc(BuildContext.store)
-BuildContext.restore=Utils.nogc(BuildContext.restore)
diff --git a/waflib/ConfigSet.py b/waflib/ConfigSet.py
deleted file mode 100644
index 42ecde3..0000000
--- a/waflib/ConfigSet.py
+++ /dev/null
@@ -1,152 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import copy,re,os
-from waflib import Logs,Utils
-re_imp=re.compile('^(#)*?([^#=]*?)\ =\ (.*?)$',re.M)
-class ConfigSet(object):
- __slots__=('table','parent')
- def __init__(self,filename=None):
- self.table={}
- if filename:
- self.load(filename)
- def __contains__(self,key):
- if key in self.table:return True
- try:return self.parent.__contains__(key)
- except AttributeError:return False
- def keys(self):
- keys=set()
- cur=self
- while cur:
- keys.update(cur.table.keys())
- cur=getattr(cur,'parent',None)
- keys=list(keys)
- keys.sort()
- return keys
- def __str__(self):
- return"\n".join(["%r %r"%(x,self.__getitem__(x))for x in self.keys()])
- def __getitem__(self,key):
- try:
- while 1:
- x=self.table.get(key,None)
- if not x is None:
- return x
- self=self.parent
- except AttributeError:
- return[]
- def __setitem__(self,key,value):
- self.table[key]=value
- def __delitem__(self,key):
- self[key]=[]
- def __getattr__(self,name):
- if name in self.__slots__:
- return object.__getattr__(self,name)
- else:
- return self[name]
- def __setattr__(self,name,value):
- if name in self.__slots__:
- object.__setattr__(self,name,value)
- else:
- self[name]=value
- def __delattr__(self,name):
- if name in self.__slots__:
- object.__delattr__(self,name)
- else:
- del self[name]
- def derive(self):
- newenv=ConfigSet()
- newenv.parent=self
- return newenv
- def detach(self):
- tbl=self.get_merged_dict()
- try:
- delattr(self,'parent')
- except AttributeError:
- pass
- else:
- keys=tbl.keys()
- for x in keys:
- tbl[x]=copy.deepcopy(tbl[x])
- self.table=tbl
- def get_flat(self,key):
- s=self[key]
- if isinstance(s,str):return s
- return' '.join(s)
- def _get_list_value_for_modification(self,key):
- try:
- value=self.table[key]
- except KeyError:
- try:value=self.parent[key]
- except AttributeError:value=[]
- if isinstance(value,list):
- value=value[:]
- else:
- value=[value]
- else:
- if not isinstance(value,list):
- value=[value]
- self.table[key]=value
- return value
- def append_value(self,var,val):
- current_value=self._get_list_value_for_modification(var)
- if isinstance(val,str):
- val=[val]
- current_value.extend(val)
- def prepend_value(self,var,val):
- if isinstance(val,str):
- val=[val]
- self.table[var]=val+self._get_list_value_for_modification(var)
- def append_unique(self,var,val):
- if isinstance(val,str):
- val=[val]
- current_value=self._get_list_value_for_modification(var)
- for x in val:
- if x not in current_value:
- current_value.append(x)
- def get_merged_dict(self):
- table_list=[]
- env=self
- while 1:
- table_list.insert(0,env.table)
- try:env=env.parent
- except AttributeError:break
- merged_table={}
- for table in table_list:
- merged_table.update(table)
- return merged_table
- def store(self,filename):
- try:
- os.makedirs(os.path.split(filename)[0])
- except OSError:
- pass
- f=None
- try:
- f=open(filename,'w')
- merged_table=self.get_merged_dict()
- keys=list(merged_table.keys())
- keys.sort()
- for k in keys:
- if k!='undo_stack':
- f.write('%s = %r\n'%(k,merged_table[k]))
- finally:
- if f:
- f.close()
- def load(self,filename):
- tbl=self.table
- code=Utils.readf(filename,m='rU')
- for m in re_imp.finditer(code):
- g=m.group
- tbl[g(2)]=eval(g(3))
- Logs.debug('env: %s'%str(self.table))
- def update(self,d):
- for k,v in d.items():
- self[k]=v
- def stash(self):
- orig=self.table
- tbl=self.table=self.table.copy()
- for x in tbl.keys():
- tbl[x]=copy.deepcopy(tbl[x])
- self.undo_stack=self.undo_stack+[orig]
- def revert(self):
- self.table=self.undo_stack.pop(-1)
diff --git a/waflib/Configure.py b/waflib/Configure.py
deleted file mode 100644
index e8148d5..0000000
--- a/waflib/Configure.py
+++ /dev/null
@@ -1,317 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import os,shlex,sys,time
-from waflib import ConfigSet,Utils,Options,Logs,Context,Build,Errors
-try:
- from urllib import request
-except ImportError:
- from urllib import urlopen
-else:
- urlopen=request.urlopen
-BREAK='break'
-CONTINUE='continue'
-WAF_CONFIG_LOG='config.log'
-autoconfig=False
-conf_template='''# project %(app)s configured on %(now)s by
-# waf %(wafver)s (abi %(abi)s, python %(pyver)x on %(systype)s)
-# using %(args)s
-#'''
-def download_check(node):
- pass
-def download_tool(tool,force=False,ctx=None):
- for x in Utils.to_list(Context.remote_repo):
- for sub in Utils.to_list(Context.remote_locs):
- url='/'.join((x,sub,tool+'.py'))
- try:
- web=urlopen(url)
- try:
- if web.getcode()!=200:
- continue
- except AttributeError:
- pass
- except Exception:
- continue
- else:
- tmp=ctx.root.make_node(os.sep.join((Context.waf_dir,'waflib','extras',tool+'.py')))
- tmp.write(web.read(),'wb')
- Logs.warn('Downloaded %s from %s'%(tool,url))
- download_check(tmp)
- try:
- module=Context.load_tool(tool)
- except Exception:
- Logs.warn('The tool %s from %s is unusable'%(tool,url))
- try:
- tmp.delete()
- except Exception:
- pass
- continue
- return module
- raise Errors.WafError('Could not load the Waf tool')
-class ConfigurationContext(Context.Context):
- '''configures the project'''
- cmd='configure'
- error_handlers=[]
- def __init__(self,**kw):
- super(ConfigurationContext,self).__init__(**kw)
- self.environ=dict(os.environ)
- self.all_envs={}
- self.top_dir=None
- self.out_dir=None
- self.tools=[]
- self.hash=0
- self.files=[]
- self.tool_cache=[]
- self.setenv('')
- def setenv(self,name,env=None):
- if name not in self.all_envs or env:
- if not env:
- env=ConfigSet.ConfigSet()
- self.prepare_env(env)
- else:
- env=env.derive()
- self.all_envs[name]=env
- self.variant=name
- def get_env(self):
- return self.all_envs[self.variant]
- def set_env(self,val):
- self.all_envs[self.variant]=val
- env=property(get_env,set_env)
- def init_dirs(self):
- top=self.top_dir
- if not top:
- top=Options.options.top
- if not top:
- top=getattr(Context.g_module,Context.TOP,None)
- if not top:
- top=self.path.abspath()
- top=os.path.abspath(top)
- self.srcnode=(os.path.isabs(top)and self.root or self.path).find_dir(top)
- assert(self.srcnode)
- out=self.out_dir
- if not out:
- out=Options.options.out
- if not out:
- out=getattr(Context.g_module,Context.OUT,None)
- if not out:
- out=Options.lockfile.replace('.lock-waf_%s_'%sys.platform,'').replace('.lock-waf','')
- self.bldnode=(os.path.isabs(out)and self.root or self.path).make_node(out)
- self.bldnode.mkdir()
- if not os.path.isdir(self.bldnode.abspath()):
- conf.fatal('Could not create the build directory %s'%self.bldnode.abspath())
- def execute(self):
- self.init_dirs()
- self.cachedir=self.bldnode.make_node(Build.CACHE_DIR)
- self.cachedir.mkdir()
- path=os.path.join(self.bldnode.abspath(),WAF_CONFIG_LOG)
- self.logger=Logs.make_logger(path,'cfg')
- app=getattr(Context.g_module,'APPNAME','')
- if app:
- ver=getattr(Context.g_module,'VERSION','')
- if ver:
- app="%s (%s)"%(app,ver)
- now=time.ctime()
- pyver=sys.hexversion
- systype=sys.platform
- args=" ".join(sys.argv)
- wafver=Context.WAFVERSION
- abi=Context.ABI
- self.to_log(conf_template%vars())
- self.msg('Setting top to',self.srcnode.abspath())
- self.msg('Setting out to',self.bldnode.abspath())
- if id(self.srcnode)==id(self.bldnode):
- Logs.warn('Setting top == out (remember to use "update_outputs")')
- elif id(self.path)!=id(self.srcnode):
- if self.srcnode.is_child_of(self.path):
- Logs.warn('Are you certain that you do not want to set top="." ?')
- super(ConfigurationContext,self).execute()
- self.store()
- Context.top_dir=self.srcnode.abspath()
- Context.out_dir=self.bldnode.abspath()
- env=ConfigSet.ConfigSet()
- env['argv']=sys.argv
- env['options']=Options.options.__dict__
- env.run_dir=Context.run_dir
- env.top_dir=Context.top_dir
- env.out_dir=Context.out_dir
- env['hash']=self.hash
- env['files']=self.files
- env['environ']=dict(self.environ)
- if not self.env.NO_LOCK_IN_RUN:
- env.store(Context.run_dir+os.sep+Options.lockfile)
- if not self.env.NO_LOCK_IN_TOP:
- env.store(Context.top_dir+os.sep+Options.lockfile)
- if not self.env.NO_LOCK_IN_OUT:
- env.store(Context.out_dir+os.sep+Options.lockfile)
- def prepare_env(self,env):
- if not env.PREFIX:
- if Options.options.prefix or Utils.is_win32:
- env.PREFIX=os.path.abspath(os.path.expanduser(Options.options.prefix))
- else:
- env.PREFIX=''
- if not env.BINDIR:
- env.BINDIR=Utils.subst_vars('${PREFIX}/bin',env)
- if not env.LIBDIR:
- env.LIBDIR=Utils.subst_vars('${PREFIX}/lib',env)
- def store(self):
- n=self.cachedir.make_node('build.config.py')
- n.write('version = 0x%x\ntools = %r\n'%(Context.HEXVERSION,self.tools))
- if not self.all_envs:
- self.fatal('nothing to store in the configuration context!')
- for key in self.all_envs:
- tmpenv=self.all_envs[key]
- tmpenv.store(os.path.join(self.cachedir.abspath(),key+Build.CACHE_SUFFIX))
- def load(self,input,tooldir=None,funs=None,download=True):
- tools=Utils.to_list(input)
- if tooldir:tooldir=Utils.to_list(tooldir)
- for tool in tools:
- mag=(tool,id(self.env),funs)
- if mag in self.tool_cache:
- self.to_log('(tool %s is already loaded, skipping)'%tool)
- continue
- self.tool_cache.append(mag)
- module=None
- try:
- module=Context.load_tool(tool,tooldir)
- except ImportError ,e:
- if Options.options.download:
- module=download_tool(tool,ctx=self)
- if not module:
- self.fatal('Could not load the Waf tool %r or download a suitable replacement from the repository (sys.path %r)\n%s'%(tool,sys.path,e))
- else:
- self.fatal('Could not load the Waf tool %r from %r (try the --download option?):\n%s'%(tool,sys.path,e))
- except Exception ,e:
- self.to_log('imp %r (%r & %r)'%(tool,tooldir,funs))
- self.to_log(Utils.ex_stack())
- raise
- if funs is not None:
- self.eval_rules(funs)
- else:
- func=getattr(module,'configure',None)
- if func:
- if type(func)is type(Utils.readf):func(self)
- else:self.eval_rules(func)
- self.tools.append({'tool':tool,'tooldir':tooldir,'funs':funs})
- def post_recurse(self,node):
- super(ConfigurationContext,self).post_recurse(node)
- self.hash=hash((self.hash,node.read('rb')))
- self.files.append(node.abspath())
- def eval_rules(self,rules):
- self.rules=Utils.to_list(rules)
- for x in self.rules:
- f=getattr(self,x)
- if not f:self.fatal("No such method '%s'."%x)
- try:
- f()
- except Exception ,e:
- ret=self.err_handler(x,e)
- if ret==BREAK:
- break
- elif ret==CONTINUE:
- continue
- else:
- raise
- def err_handler(self,fun,error):
- pass
-def conf(f):
- def fun(*k,**kw):
- mandatory=True
- if'mandatory'in kw:
- mandatory=kw['mandatory']
- del kw['mandatory']
- try:
- return f(*k,**kw)
- except Errors.ConfigurationError:
- if mandatory:
- raise
- setattr(ConfigurationContext,f.__name__,fun)
- setattr(Build.BuildContext,f.__name__,fun)
- return f
- at conf
-def add_os_flags(self,var,dest=None):
- try:self.env.append_value(dest or var,shlex.split(self.environ[var]))
- except KeyError:pass
- at conf
-def cmd_to_list(self,cmd):
- if isinstance(cmd,str)and cmd.find(' '):
- try:
- os.stat(cmd)
- except OSError:
- return shlex.split(cmd)
- else:
- return[cmd]
- return cmd
- at conf
-def check_waf_version(self,mini='1.6.99',maxi='1.8.0'):
- self.start_msg('Checking for waf version in %s-%s'%(str(mini),str(maxi)))
- ver=Context.HEXVERSION
- if Utils.num2ver(mini)>ver:
- self.fatal('waf version should be at least %r (%r found)'%(Utils.num2ver(mini),ver))
- if Utils.num2ver(maxi)<ver:
- self.fatal('waf version should be at most %r (%r found)'%(Utils.num2ver(maxi),ver))
- self.end_msg('ok')
- at conf
-def find_file(self,filename,path_list=[]):
- for n in Utils.to_list(filename):
- for d in Utils.to_list(path_list):
- p=os.path.join(d,n)
- if os.path.exists(p):
- return p
- self.fatal('Could not find %r'%filename)
- at conf
-def find_program(self,filename,**kw):
- exts=kw.get('exts',Utils.is_win32 and'.exe,.com,.bat,.cmd'or',.sh,.pl,.py')
- environ=kw.get('environ',os.environ)
- ret=''
- filename=Utils.to_list(filename)
- var=kw.get('var','')
- if not var:
- var=filename[0].upper()
- if self.env[var]:
- ret=self.env[var]
- elif var in environ:
- ret=environ[var]
- path_list=kw.get('path_list','')
- if not ret:
- if path_list:
- path_list=Utils.to_list(path_list)
- else:
- path_list=environ.get('PATH','').split(os.pathsep)
- if not isinstance(filename,list):
- filename=[filename]
- for a in exts.split(','):
- if ret:
- break
- for b in filename:
- if ret:
- break
- for c in path_list:
- if ret:
- break
- x=os.path.expanduser(os.path.join(c,b+a))
- if os.path.isfile(x):
- ret=x
- if not ret and Utils.winreg:
- ret=Utils.get_registry_app_path(Utils.winreg.HKEY_CURRENT_USER,filename)
- if not ret and Utils.winreg:
- ret=Utils.get_registry_app_path(Utils.winreg.HKEY_LOCAL_MACHINE,filename)
- self.msg('Checking for program '+','.join(filename),ret or False)
- self.to_log('find program=%r paths=%r var=%r -> %r'%(filename,path_list,var,ret))
- if not ret:
- self.fatal(kw.get('errmsg','')or'Could not find the program %s'%','.join(filename))
- if var:
- self.env[var]=ret
- return ret
- at conf
-def find_perl_program(self,filename,path_list=[],var=None,environ=None,exts=''):
- try:
- app=self.find_program(filename,path_list=path_list,var=var,environ=environ,exts=exts)
- except Exception:
- self.find_program('perl',var='PERL')
- app=self.find_file(filename,os.environ['PATH'].split(os.pathsep))
- if not app:
- raise
- if var:
- self.env[var]=Utils.to_list(self.env['PERL'])+[app]
- self.msg('Checking for %r'%filename,app)
diff --git a/waflib/Context.py b/waflib/Context.py
deleted file mode 100644
index 494ece2..0000000
--- a/waflib/Context.py
+++ /dev/null
@@ -1,319 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import os,imp,sys
-from waflib import Utils,Errors,Logs
-import waflib.Node
-HEXVERSION=0x1070900
-WAFVERSION="1.7.9"
-WAFREVISION="9e92489dbc008e4abae9c147b1d63b48296797c2"
-ABI=98
-DBFILE='.wafpickle-%s-%d-%d'%(sys.platform,sys.hexversion,ABI)
-APPNAME='APPNAME'
-VERSION='VERSION'
-TOP='top'
-OUT='out'
-WSCRIPT_FILE='wscript'
-launch_dir=''
-run_dir=''
-top_dir=''
-out_dir=''
-waf_dir=''
-local_repo=''
-remote_repo='http://waf.googlecode.com/git/'
-remote_locs=['waflib/extras','waflib/Tools']
-g_module=None
-STDOUT=1
-STDERR=-1
-BOTH=0
-classes=[]
-def create_context(cmd_name,*k,**kw):
- global classes
- for x in classes:
- if x.cmd==cmd_name:
- return x(*k,**kw)
- ctx=Context(*k,**kw)
- ctx.fun=cmd_name
- return ctx
-class store_context(type):
- def __init__(cls,name,bases,dict):
- super(store_context,cls).__init__(name,bases,dict)
- name=cls.__name__
- if name=='ctx'or name=='Context':
- return
- try:
- cls.cmd
- except AttributeError:
- raise Errors.WafError('Missing command for the context class %r (cmd)'%name)
- if not getattr(cls,'fun',None):
- cls.fun=cls.cmd
- global classes
- classes.insert(0,cls)
-ctx=store_context('ctx',(object,),{})
-class Context(ctx):
- errors=Errors
- tools={}
- def __init__(self,**kw):
- try:
- rd=kw['run_dir']
- except KeyError:
- global run_dir
- rd=run_dir
- class node_class(waflib.Node.Node):
- pass
- self.node_class=node_class
- self.node_class.__module__="waflib.Node"
- self.node_class.__name__="Nod3"
- self.node_class.ctx=self
- self.root=self.node_class('',None)
- self.cur_script=None
- self.path=self.root.find_dir(rd)
- self.stack_path=[]
- self.exec_dict={'ctx':self,'conf':self,'bld':self,'opt':self}
- self.logger=None
- def __hash__(self):
- return id(self)
- def load(self,tool_list,*k,**kw):
- tools=Utils.to_list(tool_list)
- path=Utils.to_list(kw.get('tooldir',''))
- for t in tools:
- module=load_tool(t,path)
- fun=getattr(module,kw.get('name',self.fun),None)
- if fun:
- fun(self)
- def execute(self):
- global g_module
- self.recurse([os.path.dirname(g_module.root_path)])
- def pre_recurse(self,node):
- self.stack_path.append(self.cur_script)
- self.cur_script=node
- self.path=node.parent
- def post_recurse(self,node):
- self.cur_script=self.stack_path.pop()
- if self.cur_script:
- self.path=self.cur_script.parent
- def recurse(self,dirs,name=None,mandatory=True,once=True):
- try:
- cache=self.recurse_cache
- except AttributeError:
- cache=self.recurse_cache={}
- for d in Utils.to_list(dirs):
- if not os.path.isabs(d):
- d=os.path.join(self.path.abspath(),d)
- WSCRIPT=os.path.join(d,WSCRIPT_FILE)
- WSCRIPT_FUN=WSCRIPT+'_'+(name or self.fun)
- node=self.root.find_node(WSCRIPT_FUN)
- if node and(not once or node not in cache):
- cache[node]=True
- self.pre_recurse(node)
- try:
- function_code=node.read('rU')
- exec(compile(function_code,node.abspath(),'exec'),self.exec_dict)
- finally:
- self.post_recurse(node)
- elif not node:
- node=self.root.find_node(WSCRIPT)
- tup=(node,name or self.fun)
- if node and(not once or tup not in cache):
- cache[tup]=True
- self.pre_recurse(node)
- try:
- wscript_module=load_module(node.abspath())
- user_function=getattr(wscript_module,(name or self.fun),None)
- if not user_function:
- if not mandatory:
- continue
- raise Errors.WafError('No function %s defined in %s'%(name or self.fun,node.abspath()))
- user_function(self)
- finally:
- self.post_recurse(node)
- elif not node:
- if not mandatory:
- continue
- raise Errors.WafError('No wscript file in directory %s'%d)
- def exec_command(self,cmd,**kw):
- subprocess=Utils.subprocess
- kw['shell']=isinstance(cmd,str)
- Logs.debug('runner: %r'%cmd)
- Logs.debug('runner_env: kw=%s'%kw)
- if self.logger:
- self.logger.info(cmd)
- if'stdout'not in kw:
- kw['stdout']=subprocess.PIPE
- if'stderr'not in kw:
- kw['stderr']=subprocess.PIPE
- try:
- if kw['stdout']or kw['stderr']:
- p=subprocess.Popen(cmd,**kw)
- (out,err)=p.communicate()
- ret=p.returncode
- else:
- out,err=(None,None)
- ret=subprocess.Popen(cmd,**kw).wait()
- except Exception ,e:
- raise Errors.WafError('Execution failure: %s'%str(e),ex=e)
- if out:
- if not isinstance(out,str):
- out=out.decode(sys.stdout.encoding or'iso8859-1')
- if self.logger:
- self.logger.debug('out: %s'%out)
- else:
- sys.stdout.write(out)
- if err:
- if not isinstance(err,str):
- err=err.decode(sys.stdout.encoding or'iso8859-1')
- if self.logger:
- self.logger.error('err: %s'%err)
- else:
- sys.stderr.write(err)
- return ret
- def cmd_and_log(self,cmd,**kw):
- subprocess=Utils.subprocess
- kw['shell']=isinstance(cmd,str)
- Logs.debug('runner: %r'%cmd)
- if'quiet'in kw:
- quiet=kw['quiet']
- del kw['quiet']
- else:
- quiet=None
- if'output'in kw:
- to_ret=kw['output']
- del kw['output']
- else:
- to_ret=STDOUT
- kw['stdout']=kw['stderr']=subprocess.PIPE
- if quiet is None:
- self.to_log(cmd)
- try:
- p=subprocess.Popen(cmd,**kw)
- (out,err)=p.communicate()
- except Exception ,e:
- raise Errors.WafError('Execution failure: %s'%str(e),ex=e)
- if not isinstance(out,str):
- out=out.decode(sys.stdout.encoding or'iso8859-1')
- if not isinstance(err,str):
- err=err.decode(sys.stdout.encoding or'iso8859-1')
- if out and quiet!=STDOUT and quiet!=BOTH:
- self.to_log('out: %s'%out)
- if err and quiet!=STDERR and quiet!=BOTH:
- self.to_log('err: %s'%err)
- if p.returncode:
- e=Errors.WafError('Command %r returned %r'%(cmd,p.returncode))
- e.returncode=p.returncode
- e.stderr=err
- e.stdout=out
- raise e
- if to_ret==BOTH:
- return(out,err)
- elif to_ret==STDERR:
- return err
- return out
- def fatal(self,msg,ex=None):
- if self.logger:
- self.logger.info('from %s: %s'%(self.path.abspath(),msg))
- try:
- msg='%s\n(complete log in %s)'%(msg,self.logger.handlers[0].baseFilename)
- except Exception:
- pass
- raise self.errors.ConfigurationError(msg,ex=ex)
- def to_log(self,msg):
- if not msg:
- return
- if self.logger:
- self.logger.info(msg)
- else:
- sys.stderr.write(str(msg))
- sys.stderr.flush()
- def msg(self,msg,result,color=None):
- self.start_msg(msg)
- if not isinstance(color,str):
- color=result and'GREEN'or'YELLOW'
- self.end_msg(result,color)
- def start_msg(self,msg):
- try:
- if self.in_msg:
- self.in_msg+=1
- return
- except AttributeError:
- self.in_msg=0
- self.in_msg+=1
- try:
- self.line_just=max(self.line_just,len(msg))
- except AttributeError:
- self.line_just=max(40,len(msg))
- for x in(self.line_just*'-',msg):
- self.to_log(x)
- Logs.pprint('NORMAL',"%s :"%msg.ljust(self.line_just),sep='')
- def end_msg(self,result,color=None):
- self.in_msg-=1
- if self.in_msg:
- return
- defcolor='GREEN'
- if result==True:
- msg='ok'
- elif result==False:
- msg='not found'
- defcolor='YELLOW'
- else:
- msg=str(result)
- self.to_log(msg)
- Logs.pprint(color or defcolor,msg)
- def load_special_tools(self,var,ban=[]):
- global waf_dir
- lst=self.root.find_node(waf_dir).find_node('waflib/extras').ant_glob(var)
- for x in lst:
- if not x.name in ban:
- load_tool(x.name.replace('.py',''))
-cache_modules={}
-def load_module(path):
- try:
- return cache_modules[path]
- except KeyError:
- pass
- module=imp.new_module(WSCRIPT_FILE)
- try:
- code=Utils.readf(path,m='rU')
- except(IOError,OSError):
- raise Errors.WafError('Could not read the file %r'%path)
- module_dir=os.path.dirname(path)
- sys.path.insert(0,module_dir)
- exec(compile(code,path,'exec'),module.__dict__)
- sys.path.remove(module_dir)
- cache_modules[path]=module
- return module
-def load_tool(tool,tooldir=None):
- if tool=='java':
- tool='javaw'
- elif tool=='compiler_cc':
- tool='compiler_c'
- else:
- tool=tool.replace('++','xx')
- if tooldir:
- assert isinstance(tooldir,list)
- sys.path=tooldir+sys.path
- try:
- __import__(tool)
- ret=sys.modules[tool]
- Context.tools[tool]=ret
- return ret
- finally:
- for d in tooldir:
- sys.path.remove(d)
- else:
- global waf_dir
- try:
- os.stat(os.path.join(waf_dir,'waflib','extras',tool+'.py'))
- except OSError:
- try:
- os.stat(os.path.join(waf_dir,'waflib','Tools',tool+'.py'))
- except OSError:
- d=tool
- else:
- d='waflib.Tools.%s'%tool
- else:
- d='waflib.extras.%s'%tool
- __import__(d)
- ret=sys.modules[d]
- Context.tools[tool]=ret
- return ret
diff --git a/waflib/Errors.py b/waflib/Errors.py
deleted file mode 100644
index aacc1a9..0000000
--- a/waflib/Errors.py
+++ /dev/null
@@ -1,37 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import traceback,sys
-class WafError(Exception):
- def __init__(self,msg='',ex=None):
- self.msg=msg
- assert not isinstance(msg,Exception)
- self.stack=[]
- if ex:
- if not msg:
- self.msg=str(ex)
- if isinstance(ex,WafError):
- self.stack=ex.stack
- else:
- self.stack=traceback.extract_tb(sys.exc_info()[2])
- self.stack+=traceback.extract_stack()[:-1]
- self.verbose_msg=''.join(traceback.format_list(self.stack))
- def __str__(self):
- return str(self.msg)
-class BuildError(WafError):
- def __init__(self,error_tasks=[]):
- self.tasks=error_tasks
- WafError.__init__(self,self.format_error())
- def format_error(self):
- lst=['Build failed']
- for tsk in self.tasks:
- txt=tsk.format_error()
- if txt:lst.append(txt)
- return'\n'.join(lst)
-class ConfigurationError(WafError):
- pass
-class TaskRescan(WafError):
- pass
-class TaskNotReady(WafError):
- pass
diff --git a/waflib/Logs.py b/waflib/Logs.py
deleted file mode 100644
index d6d4ddd..0000000
--- a/waflib/Logs.py
+++ /dev/null
@@ -1,176 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import os,re,traceback,sys
-try:
- import threading
-except ImportError:
- pass
-else:
- wlock=threading.Lock()
- class sync_stream(object):
- def __init__(self,stream):
- self.stream=stream
- self.encoding=self.stream.encoding
- def write(self,txt):
- try:
- wlock.acquire()
- self.stream.write(txt)
- self.stream.flush()
- finally:
- wlock.release()
- def fileno(self):
- return self.stream.fileno()
- def flush(self):
- self.stream.flush()
- def isatty(self):
- return self.stream.isatty()
- _nocolor=os.environ.get('NOCOLOR','no')not in('no','0','false')
- try:
- if not _nocolor:
- import waflib.ansiterm
- except ImportError:
- pass
- if not os.environ.get('NOSYNC',False):
- if id(sys.stdout)==id(sys.__stdout__):
- sys.stdout=sync_stream(sys.stdout)
- sys.stderr=sync_stream(sys.stderr)
-import logging
-LOG_FORMAT="%(asctime)s %(c1)s%(zone)s%(c2)s %(message)s"
-HOUR_FORMAT="%H:%M:%S"
-zones=''
-verbose=0
-colors_lst={'USE':True,'BOLD':'\x1b[01;1m','RED':'\x1b[01;31m','GREEN':'\x1b[32m','YELLOW':'\x1b[33m','PINK':'\x1b[35m','BLUE':'\x1b[01;34m','CYAN':'\x1b[36m','NORMAL':'\x1b[0m','cursor_on':'\x1b[?25h','cursor_off':'\x1b[?25l',}
-got_tty=not os.environ.get('TERM','dumb')in['dumb','emacs']
-if got_tty:
- try:
- got_tty=sys.stderr.isatty()and sys.stdout.isatty()
- except AttributeError:
- got_tty=False
-if(not got_tty and os.environ.get('TERM','dumb')!='msys')or _nocolor:
- colors_lst['USE']=False
-def get_term_cols():
- return 80
-try:
- import struct,fcntl,termios
-except ImportError:
- pass
-else:
- if got_tty:
- def get_term_cols_real():
- dummy_lines,cols=struct.unpack("HHHH",fcntl.ioctl(sys.stderr.fileno(),termios.TIOCGWINSZ,struct.pack("HHHH",0,0,0,0)))[:2]
- return cols
- try:
- get_term_cols_real()
- except Exception:
- pass
- else:
- get_term_cols=get_term_cols_real
-get_term_cols.__doc__="""
- Get the console width in characters.
-
- :return: the number of characters per line
- :rtype: int
- """
-def get_color(cl):
- if not colors_lst['USE']:return''
- return colors_lst.get(cl,'')
-class color_dict(object):
- def __getattr__(self,a):
- return get_color(a)
- def __call__(self,a):
- return get_color(a)
-colors=color_dict()
-re_log=re.compile(r'(\w+): (.*)',re.M)
-class log_filter(logging.Filter):
- def __init__(self,name=None):
- pass
- def filter(self,rec):
- rec.c1=colors.PINK
- rec.c2=colors.NORMAL
- rec.zone=rec.module
- if rec.levelno>=logging.INFO:
- if rec.levelno>=logging.ERROR:
- rec.c1=colors.RED
- elif rec.levelno>=logging.WARNING:
- rec.c1=colors.YELLOW
- else:
- rec.c1=colors.GREEN
- return True
- m=re_log.match(rec.msg)
- if m:
- rec.zone=m.group(1)
- rec.msg=m.group(2)
- if zones:
- return getattr(rec,'zone','')in zones or'*'in zones
- elif not verbose>2:
- return False
- return True
-class formatter(logging.Formatter):
- def __init__(self):
- logging.Formatter.__init__(self,LOG_FORMAT,HOUR_FORMAT)
- def format(self,rec):
- if rec.levelno>=logging.WARNING or rec.levelno==logging.INFO:
- try:
- msg=rec.msg.decode('utf-8')
- except Exception:
- msg=rec.msg
- return'%s%s%s'%(rec.c1,msg,rec.c2)
- return logging.Formatter.format(self,rec)
-log=None
-def debug(*k,**kw):
- if verbose:
- k=list(k)
- k[0]=k[0].replace('\n',' ')
- global log
- log.debug(*k,**kw)
-def error(*k,**kw):
- global log
- log.error(*k,**kw)
- if verbose>2:
- st=traceback.extract_stack()
- if st:
- st=st[:-1]
- buf=[]
- for filename,lineno,name,line in st:
- buf.append(' File "%s", line %d, in %s'%(filename,lineno,name))
- if line:
- buf.append(' %s'%line.strip())
- if buf:log.error("\n".join(buf))
-def warn(*k,**kw):
- global log
- log.warn(*k,**kw)
-def info(*k,**kw):
- global log
- log.info(*k,**kw)
-def init_log():
- global log
- log=logging.getLogger('waflib')
- log.handlers=[]
- log.filters=[]
- hdlr=logging.StreamHandler()
- hdlr.setFormatter(formatter())
- log.addHandler(hdlr)
- log.addFilter(log_filter())
- log.setLevel(logging.DEBUG)
-def make_logger(path,name):
- logger=logging.getLogger(name)
- hdlr=logging.FileHandler(path,'w')
- formatter=logging.Formatter('%(message)s')
- hdlr.setFormatter(formatter)
- logger.addHandler(hdlr)
- logger.setLevel(logging.DEBUG)
- return logger
-def make_mem_logger(name,to_log,size=10000):
- from logging.handlers import MemoryHandler
- logger=logging.getLogger(name)
- hdlr=MemoryHandler(size,target=to_log)
- formatter=logging.Formatter('%(message)s')
- hdlr.setFormatter(formatter)
- logger.addHandler(hdlr)
- logger.memhandler=hdlr
- logger.setLevel(logging.DEBUG)
- return logger
-def pprint(col,str,label='',sep='\n'):
- sys.stderr.write("%s%s%s %s%s"%(colors(col),str,colors.NORMAL,label,sep))
diff --git a/waflib/Node.py b/waflib/Node.py
deleted file mode 100644
index 79d6bbf..0000000
--- a/waflib/Node.py
+++ /dev/null
@@ -1,466 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import os,re,sys,shutil
-from waflib import Utils,Errors
-exclude_regs='''
-**/*~
-**/#*#
-**/.#*
-**/%*%
-**/._*
-**/CVS
-**/CVS/**
-**/.cvsignore
-**/SCCS
-**/SCCS/**
-**/vssver.scc
-**/.svn
-**/.svn/**
-**/BitKeeper
-**/.git
-**/.git/**
-**/.gitignore
-**/.bzr
-**/.bzrignore
-**/.bzr/**
-**/.hg
-**/.hg/**
-**/_MTN
-**/_MTN/**
-**/.arch-ids
-**/{arch}
-**/_darcs
-**/_darcs/**
-**/.DS_Store'''
-def split_path(path):
- return path.split('/')
-def split_path_cygwin(path):
- if path.startswith('//'):
- ret=path.split('/')[2:]
- ret[0]='/'+ret[0]
- return ret
- return path.split('/')
-re_sp=re.compile('[/\\\\]')
-def split_path_win32(path):
- if path.startswith('\\\\'):
- ret=re.split(re_sp,path)[2:]
- ret[0]='\\'+ret[0]
- return ret
- return re.split(re_sp,path)
-if sys.platform=='cygwin':
- split_path=split_path_cygwin
-elif Utils.is_win32:
- split_path=split_path_win32
-class Node(object):
- __slots__=('name','sig','children','parent','cache_abspath','cache_isdir','cache_sig')
- def __init__(self,name,parent):
- self.name=name
- self.parent=parent
- if parent:
- if name in parent.children:
- raise Errors.WafError('node %s exists in the parent files %r already'%(name,parent))
- parent.children[name]=self
- def __setstate__(self,data):
- self.name=data[0]
- self.parent=data[1]
- if data[2]is not None:
- self.children=data[2]
- if data[3]is not None:
- self.sig=data[3]
- def __getstate__(self):
- return(self.name,self.parent,getattr(self,'children',None),getattr(self,'sig',None))
- def __str__(self):
- return self.name
- def __repr__(self):
- return self.abspath()
- def __hash__(self):
- return id(self)
- def __eq__(self,node):
- return id(self)==id(node)
- def __copy__(self):
- raise Errors.WafError('nodes are not supposed to be copied')
- def read(self,flags='r',encoding='ISO8859-1'):
- return Utils.readf(self.abspath(),flags,encoding)
- def write(self,data,flags='w',encoding='ISO8859-1'):
- Utils.writef(self.abspath(),data,flags,encoding)
- def chmod(self,val):
- os.chmod(self.abspath(),val)
- def delete(self):
- try:
- if getattr(self,'children',None):
- shutil.rmtree(self.abspath())
- else:
- os.unlink(self.abspath())
- except OSError:
- pass
- self.evict()
- def evict(self):
- del self.parent.children[self.name]
- def suffix(self):
- k=max(0,self.name.rfind('.'))
- return self.name[k:]
- def height(self):
- d=self
- val=-1
- while d:
- d=d.parent
- val+=1
- return val
- def listdir(self):
- lst=Utils.listdir(self.abspath())
- lst.sort()
- return lst
- def mkdir(self):
- if getattr(self,'cache_isdir',None):
- return
- try:
- self.parent.mkdir()
- except OSError:
- pass
- if self.name:
- try:
- os.makedirs(self.abspath())
- except OSError:
- pass
- if not os.path.isdir(self.abspath()):
- raise Errors.WafError('Could not create the directory %s'%self.abspath())
- try:
- self.children
- except AttributeError:
- self.children={}
- self.cache_isdir=True
- def find_node(self,lst):
- if isinstance(lst,str):
- lst=[x for x in split_path(lst)if x and x!='.']
- cur=self
- for x in lst:
- if x=='..':
- cur=cur.parent or cur
- continue
- try:
- ch=cur.children
- except AttributeError:
- cur.children={}
- else:
- try:
- cur=cur.children[x]
- continue
- except KeyError:
- pass
- cur=self.__class__(x,cur)
- try:
- os.stat(cur.abspath())
- except OSError:
- cur.evict()
- return None
- ret=cur
- try:
- os.stat(ret.abspath())
- except OSError:
- ret.evict()
- return None
- try:
- while not getattr(cur.parent,'cache_isdir',None):
- cur=cur.parent
- cur.cache_isdir=True
- except AttributeError:
- pass
- return ret
- def make_node(self,lst):
- if isinstance(lst,str):
- lst=[x for x in split_path(lst)if x and x!='.']
- cur=self
- for x in lst:
- if x=='..':
- cur=cur.parent or cur
- continue
- if getattr(cur,'children',{}):
- if x in cur.children:
- cur=cur.children[x]
- continue
- else:
- cur.children={}
- cur=self.__class__(x,cur)
- return cur
- def search_node(self,lst):
- if isinstance(lst,str):
- lst=[x for x in split_path(lst)if x and x!='.']
- cur=self
- for x in lst:
- if x=='..':
- cur=cur.parent or cur
- else:
- try:
- cur=cur.children[x]
- except(AttributeError,KeyError):
- return None
- return cur
- def path_from(self,node):
- c1=self
- c2=node
- c1h=c1.height()
- c2h=c2.height()
- lst=[]
- up=0
- while c1h>c2h:
- lst.append(c1.name)
- c1=c1.parent
- c1h-=1
- while c2h>c1h:
- up+=1
- c2=c2.parent
- c2h-=1
- while id(c1)!=id(c2):
- lst.append(c1.name)
- up+=1
- c1=c1.parent
- c2=c2.parent
- for i in range(up):
- lst.append('..')
- lst.reverse()
- return os.sep.join(lst)or'.'
- def abspath(self):
- try:
- return self.cache_abspath
- except AttributeError:
- pass
- if os.sep=='/':
- if not self.parent:
- val=os.sep
- elif not self.parent.name:
- val=os.sep+self.name
- else:
- val=self.parent.abspath()+os.sep+self.name
- else:
- if not self.parent:
- val=''
- elif not self.parent.name:
- val=self.name+os.sep
- else:
- val=self.parent.abspath().rstrip(os.sep)+os.sep+self.name
- self.cache_abspath=val
- return val
- def is_child_of(self,node):
- p=self
- diff=self.height()-node.height()
- while diff>0:
- diff-=1
- p=p.parent
- return id(p)==id(node)
- def ant_iter(self,accept=None,maxdepth=25,pats=[],dir=False,src=True,remove=True):
- dircont=self.listdir()
- dircont.sort()
- try:
- lst=set(self.children.keys())
- except AttributeError:
- self.children={}
- else:
- if remove:
- for x in lst-set(dircont):
- self.children[x].evict()
- for name in dircont:
- npats=accept(name,pats)
- if npats and npats[0]:
- accepted=[]in npats[0]
- node=self.make_node([name])
- isdir=os.path.isdir(node.abspath())
- if accepted:
- if isdir:
- if dir:
- yield node
- else:
- if src:
- yield node
- if getattr(node,'cache_isdir',None)or isdir:
- node.cache_isdir=True
- if maxdepth:
- for k in node.ant_iter(accept=accept,maxdepth=maxdepth-1,pats=npats,dir=dir,src=src,remove=remove):
- yield k
- raise StopIteration
- def ant_glob(self,*k,**kw):
- src=kw.get('src',True)
- dir=kw.get('dir',False)
- excl=kw.get('excl',exclude_regs)
- incl=k and k[0]or kw.get('incl','**')
- reflags=kw.get('ignorecase',0)and re.I
- def to_pat(s):
- lst=Utils.to_list(s)
- ret=[]
- for x in lst:
- x=x.replace('\\','/').replace('//','/')
- if x.endswith('/'):
- x+='**'
- lst2=x.split('/')
- accu=[]
- for k in lst2:
- if k=='**':
- accu.append(k)
- else:
- k=k.replace('.','[.]').replace('*','.*').replace('?','.').replace('+','\\+')
- k='^%s$'%k
- try:
- accu.append(re.compile(k,flags=reflags))
- except Exception ,e:
- raise Errors.WafError("Invalid pattern: %s"%k,e)
- ret.append(accu)
- return ret
- def filtre(name,nn):
- ret=[]
- for lst in nn:
- if not lst:
- pass
- elif lst[0]=='**':
- ret.append(lst)
- if len(lst)>1:
- if lst[1].match(name):
- ret.append(lst[2:])
- else:
- ret.append([])
- elif lst[0].match(name):
- ret.append(lst[1:])
- return ret
- def accept(name,pats):
- nacc=filtre(name,pats[0])
- nrej=filtre(name,pats[1])
- if[]in nrej:
- nacc=[]
- return[nacc,nrej]
- ret=[x for x in self.ant_iter(accept=accept,pats=[to_pat(incl),to_pat(excl)],maxdepth=25,dir=dir,src=src,remove=kw.get('remove',True))]
- if kw.get('flat',False):
- return' '.join([x.path_from(self)for x in ret])
- return ret
- def is_src(self):
- cur=self
- x=id(self.ctx.srcnode)
- y=id(self.ctx.bldnode)
- while cur.parent:
- if id(cur)==y:
- return False
- if id(cur)==x:
- return True
- cur=cur.parent
- return False
- def is_bld(self):
- cur=self
- y=id(self.ctx.bldnode)
- while cur.parent:
- if id(cur)==y:
- return True
- cur=cur.parent
- return False
- def get_src(self):
- cur=self
- x=id(self.ctx.srcnode)
- y=id(self.ctx.bldnode)
- lst=[]
- while cur.parent:
- if id(cur)==y:
- lst.reverse()
- return self.ctx.srcnode.make_node(lst)
- if id(cur)==x:
- return self
- lst.append(cur.name)
- cur=cur.parent
- return self
- def get_bld(self):
- cur=self
- x=id(self.ctx.srcnode)
- y=id(self.ctx.bldnode)
- lst=[]
- while cur.parent:
- if id(cur)==y:
- return self
- if id(cur)==x:
- lst.reverse()
- return self.ctx.bldnode.make_node(lst)
- lst.append(cur.name)
- cur=cur.parent
- lst.reverse()
- if lst and Utils.is_win32 and len(lst[0])==2 and lst[0].endswith(':'):
- lst[0]=lst[0][0]
- return self.ctx.bldnode.make_node(['__root__']+lst)
- def find_resource(self,lst):
- if isinstance(lst,str):
- lst=[x for x in split_path(lst)if x and x!='.']
- node=self.get_bld().search_node(lst)
- if not node:
- self=self.get_src()
- node=self.find_node(lst)
- if node:
- if os.path.isdir(node.abspath()):
- return None
- return node
- def find_or_declare(self,lst):
- if isinstance(lst,str):
- lst=[x for x in split_path(lst)if x and x!='.']
- node=self.get_bld().search_node(lst)
- if node:
- if not os.path.isfile(node.abspath()):
- node.sig=None
- node.parent.mkdir()
- return node
- self=self.get_src()
- node=self.find_node(lst)
- if node:
- if not os.path.isfile(node.abspath()):
- node.sig=None
- node.parent.mkdir()
- return node
- node=self.get_bld().make_node(lst)
- node.parent.mkdir()
- return node
- def find_dir(self,lst):
- if isinstance(lst,str):
- lst=[x for x in split_path(lst)if x and x!='.']
- node=self.find_node(lst)
- try:
- if not os.path.isdir(node.abspath()):
- return None
- except(OSError,AttributeError):
- return None
- return node
- def change_ext(self,ext,ext_in=None):
- name=self.name
- if ext_in is None:
- k=name.rfind('.')
- if k>=0:
- name=name[:k]+ext
- else:
- name=name+ext
- else:
- name=name[:-len(ext_in)]+ext
- return self.parent.find_or_declare([name])
- def nice_path(self,env=None):
- return self.path_from(self.ctx.launch_node())
- def bldpath(self):
- return self.path_from(self.ctx.bldnode)
- def srcpath(self):
- return self.path_from(self.ctx.srcnode)
- def relpath(self):
- cur=self
- x=id(self.ctx.bldnode)
- while cur.parent:
- if id(cur)==x:
- return self.bldpath()
- cur=cur.parent
- return self.srcpath()
- def bld_dir(self):
- return self.parent.bldpath()
- def bld_base(self):
- s=os.path.splitext(self.name)[0]
- return self.bld_dir()+os.sep+s
- def get_bld_sig(self):
- try:
- return self.cache_sig
- except AttributeError:
- pass
- if not self.is_bld()or self.ctx.bldnode is self.ctx.srcnode:
- self.sig=Utils.h_file(self.abspath())
- self.cache_sig=ret=self.sig
- return ret
- search=search_node
-pickle_lock=Utils.threading.Lock()
-class Nod3(Node):
- pass
diff --git a/waflib/Options.py b/waflib/Options.py
deleted file mode 100644
index 21f4254..0000000
--- a/waflib/Options.py
+++ /dev/null
@@ -1,135 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import os,tempfile,optparse,sys,re
-from waflib import Logs,Utils,Context
-cmds='distclean configure build install clean uninstall check dist distcheck'.split()
-options={}
-commands=[]
-lockfile=os.environ.get('WAFLOCK','.lock-waf_%s_build'%sys.platform)
-try:cache_global=os.path.abspath(os.environ['WAFCACHE'])
-except KeyError:cache_global=''
-platform=Utils.unversioned_sys_platform()
-class opt_parser(optparse.OptionParser):
- def __init__(self,ctx):
- optparse.OptionParser.__init__(self,conflict_handler="resolve",version='waf %s (%s)'%(Context.WAFVERSION,Context.WAFREVISION))
- self.formatter.width=Logs.get_term_cols()
- p=self.add_option
- self.ctx=ctx
- jobs=ctx.jobs()
- p('-j','--jobs',dest='jobs',default=jobs,type='int',help='amount of parallel jobs (%r)'%jobs)
- p('-k','--keep',dest='keep',default=0,action='count',help='keep running happily even if errors are found')
- p('-v','--verbose',dest='verbose',default=0,action='count',help='verbosity level -v -vv or -vvv [default: 0]')
- p('--nocache',dest='nocache',default=False,action='store_true',help='ignore the WAFCACHE (if set)')
- p('--zones',dest='zones',default='',action='store',help='debugging zones (task_gen, deps, tasks, etc)')
- gr=optparse.OptionGroup(self,'configure options')
- self.add_option_group(gr)
- gr.add_option('-o','--out',action='store',default='',help='build dir for the project',dest='out')
- gr.add_option('-t','--top',action='store',default='',help='src dir for the project',dest='top')
- default_prefix=os.environ.get('PREFIX')
- if not default_prefix:
- if platform=='win32':
- d=tempfile.gettempdir()
- default_prefix=d[0].upper()+d[1:]
- else:
- default_prefix='/usr/local/'
- gr.add_option('--prefix',dest='prefix',default=default_prefix,help='installation prefix [default: %r]'%default_prefix)
- gr.add_option('--download',dest='download',default=False,action='store_true',help='try to download the tools if missing')
- gr=optparse.OptionGroup(self,'build and install options')
- self.add_option_group(gr)
- gr.add_option('-p','--progress',dest='progress_bar',default=0,action='count',help='-p: progress bar; -pp: ide output')
- gr.add_option('--targets',dest='targets',default='',action='store',help='task generators, e.g. "target1,target2"')
- gr=optparse.OptionGroup(self,'step options')
- self.add_option_group(gr)
- gr.add_option('--files',dest='files',default='',action='store',help='files to process, by regexp, e.g. "*/main.c,*/test/main.o"')
- default_destdir=os.environ.get('DESTDIR','')
- gr=optparse.OptionGroup(self,'install/uninstall options')
- self.add_option_group(gr)
- gr.add_option('--destdir',help='installation root [default: %r]'%default_destdir,default=default_destdir,dest='destdir')
- gr.add_option('-f','--force',dest='force',default=False,action='store_true',help='force file installation')
- gr.add_option('--distcheck-args',help='arguments to pass to distcheck',default=None,action='store')
- def get_usage(self):
- cmds_str={}
- for cls in Context.classes:
- if not cls.cmd or cls.cmd=='options':
- continue
- s=cls.__doc__ or''
- cmds_str[cls.cmd]=s
- if Context.g_module:
- for(k,v)in Context.g_module.__dict__.items():
- if k in['options','init','shutdown']:
- continue
- if type(v)is type(Context.create_context):
- if v.__doc__ and not k.startswith('_'):
- cmds_str[k]=v.__doc__
- just=0
- for k in cmds_str:
- just=max(just,len(k))
- lst=[' %s: %s'%(k.ljust(just),v)for(k,v)in cmds_str.items()]
- lst.sort()
- ret='\n'.join(lst)
- return'''waf [commands] [options]
-
-Main commands (example: ./waf build -j4)
-%s
-'''%ret
-class OptionsContext(Context.Context):
- cmd='options'
- fun='options'
- def __init__(self,**kw):
- super(OptionsContext,self).__init__(**kw)
- self.parser=opt_parser(self)
- self.option_groups={}
- def jobs(self):
- count=int(os.environ.get('JOBS',0))
- if count<1:
- if'NUMBER_OF_PROCESSORS'in os.environ:
- count=int(os.environ.get('NUMBER_OF_PROCESSORS',1))
- else:
- if hasattr(os,'sysconf_names'):
- if'SC_NPROCESSORS_ONLN'in os.sysconf_names:
- count=int(os.sysconf('SC_NPROCESSORS_ONLN'))
- elif'SC_NPROCESSORS_CONF'in os.sysconf_names:
- count=int(os.sysconf('SC_NPROCESSORS_CONF'))
- if not count and os.name not in('nt','java'):
- try:
- tmp=self.cmd_and_log(['sysctl','-n','hw.ncpu'],quiet=0)
- except Exception:
- pass
- else:
- if re.match('^[0-9]+$',tmp):
- count=int(tmp)
- if count<1:
- count=1
- elif count>1024:
- count=1024
- return count
- def add_option(self,*k,**kw):
- return self.parser.add_option(*k,**kw)
- def add_option_group(self,*k,**kw):
- try:
- gr=self.option_groups[k[0]]
- except KeyError:
- gr=self.parser.add_option_group(*k,**kw)
- self.option_groups[k[0]]=gr
- return gr
- def get_option_group(self,opt_str):
- try:
- return self.option_groups[opt_str]
- except KeyError:
- for group in self.parser.option_groups:
- if group.title==opt_str:
- return group
- return None
- def parse_args(self,_args=None):
- global options,commands
- (options,leftover_args)=self.parser.parse_args(args=_args)
- commands=leftover_args
- if options.destdir:
- options.destdir=os.path.abspath(os.path.expanduser(options.destdir))
- if options.verbose>=1:
- self.load('errcheck')
- def execute(self):
- super(OptionsContext,self).execute()
- self.parse_args()
diff --git a/waflib/Runner.py b/waflib/Runner.py
deleted file mode 100644
index 15b6a27..0000000
--- a/waflib/Runner.py
+++ /dev/null
@@ -1,197 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import random,atexit
-try:
- from queue import Queue
-except ImportError:
- from Queue import Queue
-from waflib import Utils,Task,Errors,Logs
-GAP=10
-class TaskConsumer(Utils.threading.Thread):
- def __init__(self):
- Utils.threading.Thread.__init__(self)
- self.ready=Queue()
- self.setDaemon(1)
- self.start()
- def run(self):
- try:
- self.loop()
- except Exception:
- pass
- def loop(self):
- while 1:
- tsk=self.ready.get()
- if not isinstance(tsk,Task.TaskBase):
- tsk(self)
- else:
- tsk.process()
-pool=Queue()
-def get_pool():
- try:
- return pool.get(False)
- except Exception:
- return TaskConsumer()
-def put_pool(x):
- pool.put(x)
-def _free_resources():
- global pool
- lst=[]
- while pool.qsize():
- lst.append(pool.get())
- for x in lst:
- x.ready.put(None)
- for x in lst:
- x.join()
- pool=None
-atexit.register(_free_resources)
-class Parallel(object):
- def __init__(self,bld,j=2):
- self.numjobs=j
- self.bld=bld
- self.outstanding=[]
- self.frozen=[]
- self.out=Queue(0)
- self.count=0
- self.processed=1
- self.stop=False
- self.error=[]
- self.biter=None
- self.dirty=False
- def get_next_task(self):
- if not self.outstanding:
- return None
- return self.outstanding.pop(0)
- def postpone(self,tsk):
- if random.randint(0,1):
- self.frozen.insert(0,tsk)
- else:
- self.frozen.append(tsk)
- def refill_task_list(self):
- while self.count>self.numjobs*GAP:
- self.get_out()
- while not self.outstanding:
- if self.count:
- self.get_out()
- elif self.frozen:
- try:
- cond=self.deadlock==self.processed
- except AttributeError:
- pass
- else:
- if cond:
- msg='check the build order for the tasks'
- for tsk in self.frozen:
- if not tsk.run_after:
- msg='check the methods runnable_status'
- break
- lst=[]
- for tsk in self.frozen:
- lst.append('%s\t-> %r'%(repr(tsk),[id(x)for x in tsk.run_after]))
- raise Errors.WafError('Deadlock detected: %s%s'%(msg,''.join(lst)))
- self.deadlock=self.processed
- if self.frozen:
- self.outstanding+=self.frozen
- self.frozen=[]
- elif not self.count:
- self.outstanding.extend(self.biter.next())
- self.total=self.bld.total()
- break
- def add_more_tasks(self,tsk):
- if getattr(tsk,'more_tasks',None):
- self.outstanding+=tsk.more_tasks
- self.total+=len(tsk.more_tasks)
- def get_out(self):
- tsk=self.out.get()
- if not self.stop:
- self.add_more_tasks(tsk)
- self.count-=1
- self.dirty=True
- return tsk
- def error_handler(self,tsk):
- if not self.bld.keep:
- self.stop=True
- self.error.append(tsk)
- def add_task(self,tsk):
- try:
- self.pool
- except AttributeError:
- self.init_task_pool()
- self.ready.put(tsk)
- def init_task_pool(self):
- pool=self.pool=[get_pool()for i in range(self.numjobs)]
- self.ready=Queue(0)
- def setq(consumer):
- consumer.ready=self.ready
- for x in pool:
- x.ready.put(setq)
- return pool
- def free_task_pool(self):
- def setq(consumer):
- consumer.ready=Queue(0)
- self.out.put(self)
- try:
- pool=self.pool
- except AttributeError:
- pass
- else:
- for x in pool:
- self.ready.put(setq)
- for x in pool:
- self.get_out()
- for x in pool:
- put_pool(x)
- self.pool=[]
- def start(self):
- self.total=self.bld.total()
- while not self.stop:
- self.refill_task_list()
- tsk=self.get_next_task()
- if not tsk:
- if self.count:
- continue
- else:
- break
- if tsk.hasrun:
- self.processed+=1
- continue
- if self.stop:
- break
- try:
- st=tsk.runnable_status()
- except Exception:
- self.processed+=1
- tsk.err_msg=Utils.ex_stack()
- if not self.stop and self.bld.keep:
- tsk.hasrun=Task.SKIPPED
- if self.bld.keep==1:
- if Logs.verbose>1 or not self.error:
- self.error.append(tsk)
- self.stop=True
- else:
- if Logs.verbose>1:
- self.error.append(tsk)
- continue
- tsk.hasrun=Task.EXCEPTION
- self.error_handler(tsk)
- continue
- if st==Task.ASK_LATER:
- self.postpone(tsk)
- elif st==Task.SKIP_ME:
- self.processed+=1
- tsk.hasrun=Task.SKIPPED
- self.add_more_tasks(tsk)
- else:
- tsk.position=(self.processed,self.total)
- self.count+=1
- tsk.master=self
- self.processed+=1
- if self.numjobs==1:
- tsk.process()
- else:
- self.add_task(tsk)
- while self.error and self.count:
- self.get_out()
- assert(self.count==0 or self.stop)
- self.free_task_pool()
diff --git a/waflib/Scripting.py b/waflib/Scripting.py
deleted file mode 100644
index c33ab32..0000000
--- a/waflib/Scripting.py
+++ /dev/null
@@ -1,373 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import os,shlex,shutil,traceback,errno,sys,stat
-from waflib import Utils,Configure,Logs,Options,ConfigSet,Context,Errors,Build,Node
-build_dir_override=None
-no_climb_commands=['configure']
-default_cmd="build"
-def waf_entry_point(current_directory,version,wafdir):
- Logs.init_log()
- if Context.WAFVERSION!=version:
- Logs.error('Waf script %r and library %r do not match (directory %r)'%(version,Context.WAFVERSION,wafdir))
- sys.exit(1)
- if'--version'in sys.argv:
- Context.run_dir=current_directory
- ctx=Context.create_context('options')
- ctx.curdir=current_directory
- ctx.parse_args()
- sys.exit(0)
- Context.waf_dir=wafdir
- Context.launch_dir=current_directory
- no_climb=os.environ.get('NOCLIMB',None)
- if not no_climb:
- for k in no_climb_commands:
- if k in sys.argv:
- no_climb=True
- break
- cur=current_directory
- while cur:
- lst=os.listdir(cur)
- if Options.lockfile in lst:
- env=ConfigSet.ConfigSet()
- try:
- env.load(os.path.join(cur,Options.lockfile))
- ino=os.stat(cur)[stat.ST_INO]
- except Exception:
- pass
- else:
- for x in[env.run_dir,env.top_dir,env.out_dir]:
- if Utils.is_win32:
- if cur==x:
- load=True
- break
- else:
- try:
- ino2=os.stat(x)[stat.ST_INO]
- except OSError:
- pass
- else:
- if ino==ino2:
- load=True
- break
- else:
- Logs.warn('invalid lock file in %s'%cur)
- load=False
- if load:
- Context.run_dir=env.run_dir
- Context.top_dir=env.top_dir
- Context.out_dir=env.out_dir
- break
- if not Context.run_dir:
- if Context.WSCRIPT_FILE in lst:
- Context.run_dir=cur
- next=os.path.dirname(cur)
- if next==cur:
- break
- cur=next
- if no_climb:
- break
- if not Context.run_dir:
- if'-h'in sys.argv or'--help'in sys.argv:
- Logs.warn('No wscript file found: the help message may be incomplete')
- Context.run_dir=current_directory
- ctx=Context.create_context('options')
- ctx.curdir=current_directory
- ctx.parse_args()
- sys.exit(0)
- Logs.error('Waf: Run from a directory containing a file named %r'%Context.WSCRIPT_FILE)
- sys.exit(1)
- try:
- os.chdir(Context.run_dir)
- except OSError:
- Logs.error('Waf: The folder %r is unreadable'%Context.run_dir)
- sys.exit(1)
- try:
- set_main_module(Context.run_dir+os.sep+Context.WSCRIPT_FILE)
- except Errors.WafError ,e:
- Logs.pprint('RED',e.verbose_msg)
- Logs.error(str(e))
- sys.exit(1)
- except Exception ,e:
- Logs.error('Waf: The wscript in %r is unreadable'%Context.run_dir,e)
- traceback.print_exc(file=sys.stdout)
- sys.exit(2)
- try:
- run_commands()
- except Errors.WafError ,e:
- if Logs.verbose>1:
- Logs.pprint('RED',e.verbose_msg)
- Logs.error(e.msg)
- sys.exit(1)
- except SystemExit:
- raise
- except Exception ,e:
- traceback.print_exc(file=sys.stdout)
- sys.exit(2)
- except KeyboardInterrupt:
- Logs.pprint('RED','Interrupted')
- sys.exit(68)
-def set_main_module(file_path):
- Context.g_module=Context.load_module(file_path)
- Context.g_module.root_path=file_path
- def set_def(obj):
- name=obj.__name__
- if not name in Context.g_module.__dict__:
- setattr(Context.g_module,name,obj)
- for k in[update,dist,distclean,distcheck,update]:
- set_def(k)
- if not'init'in Context.g_module.__dict__:
- Context.g_module.init=Utils.nada
- if not'shutdown'in Context.g_module.__dict__:
- Context.g_module.shutdown=Utils.nada
- if not'options'in Context.g_module.__dict__:
- Context.g_module.options=Utils.nada
-def parse_options():
- Context.create_context('options').execute()
- if not Options.commands:
- Options.commands=[default_cmd]
- Options.commands=[x for x in Options.commands if x!='options']
- Logs.verbose=Options.options.verbose
- Logs.init_log()
- if Options.options.zones:
- Logs.zones=Options.options.zones.split(',')
- if not Logs.verbose:
- Logs.verbose=1
- elif Logs.verbose>0:
- Logs.zones=['runner']
- if Logs.verbose>2:
- Logs.zones=['*']
-def run_command(cmd_name):
- ctx=Context.create_context(cmd_name)
- ctx.log_timer=Utils.Timer()
- ctx.options=Options.options
- ctx.cmd=cmd_name
- ctx.execute()
- return ctx
-def run_commands():
- parse_options()
- run_command('init')
- while Options.commands:
- cmd_name=Options.commands.pop(0)
- ctx=run_command(cmd_name)
- Logs.info('%r finished successfully (%s)'%(cmd_name,str(ctx.log_timer)))
- run_command('shutdown')
-def _can_distclean(name):
- for k in'.o .moc .exe'.split():
- if name.endswith(k):
- return True
- return False
-def distclean_dir(dirname):
- for(root,dirs,files)in os.walk(dirname):
- for f in files:
- if _can_distclean(f):
- fname=root+os.sep+f
- try:
- os.unlink(fname)
- except OSError:
- Logs.warn('Could not remove %r'%fname)
- for x in[Context.DBFILE,'config.log']:
- try:
- os.unlink(x)
- except OSError:
- pass
- try:
- shutil.rmtree('c4che')
- except OSError:
- pass
-def distclean(ctx):
- '''removes the build directory'''
- lst=os.listdir('.')
- for f in lst:
- if f==Options.lockfile:
- try:
- proj=ConfigSet.ConfigSet(f)
- except IOError:
- Logs.warn('Could not read %r'%f)
- continue
- if proj['out_dir']!=proj['top_dir']:
- try:
- shutil.rmtree(proj['out_dir'])
- except IOError:
- pass
- except OSError ,e:
- if e.errno!=errno.ENOENT:
- Logs.warn('project %r cannot be removed'%proj[Context.OUT])
- else:
- distclean_dir(proj['out_dir'])
- for k in(proj['out_dir'],proj['top_dir'],proj['run_dir']):
- try:
- os.remove(os.path.join(k,Options.lockfile))
- except OSError ,e:
- if e.errno!=errno.ENOENT:
- Logs.warn('file %r cannot be removed'%f)
- if f.startswith('.waf')and not Options.commands:
- shutil.rmtree(f,ignore_errors=True)
-class Dist(Context.Context):
- '''creates an archive containing the project source code'''
- cmd='dist'
- fun='dist'
- algo='tar.bz2'
- ext_algo={}
- def execute(self):
- self.recurse([os.path.dirname(Context.g_module.root_path)])
- self.archive()
- def archive(self):
- import tarfile
- arch_name=self.get_arch_name()
- try:
- self.base_path
- except AttributeError:
- self.base_path=self.path
- node=self.base_path.make_node(arch_name)
- try:
- node.delete()
- except Exception:
- pass
- files=self.get_files()
- if self.algo.startswith('tar.'):
- tar=tarfile.open(arch_name,'w:'+self.algo.replace('tar.',''))
- for x in files:
- self.add_tar_file(x,tar)
- tar.close()
- elif self.algo=='zip':
- import zipfile
- zip=zipfile.ZipFile(arch_name,'w',compression=zipfile.ZIP_DEFLATED)
- for x in files:
- archive_name=self.get_base_name()+'/'+x.path_from(self.base_path)
- zip.write(x.abspath(),archive_name,zipfile.ZIP_DEFLATED)
- zip.close()
- else:
- self.fatal('Valid algo types are tar.bz2, tar.gz or zip')
- try:
- from hashlib import sha1 as sha
- except ImportError:
- from sha import sha
- try:
- digest=" (sha=%r)"%sha(node.read()).hexdigest()
- except Exception:
- digest=''
- Logs.info('New archive created: %s%s'%(self.arch_name,digest))
- def get_tar_path(self,node):
- return node.abspath()
- def add_tar_file(self,x,tar):
- p=self.get_tar_path(x)
- tinfo=tar.gettarinfo(name=p,arcname=self.get_tar_prefix()+'/'+x.path_from(self.base_path))
- tinfo.uid=0
- tinfo.gid=0
- tinfo.uname='root'
- tinfo.gname='root'
- fu=None
- try:
- fu=open(p,'rb')
- tar.addfile(tinfo,fileobj=fu)
- finally:
- if fu:
- fu.close()
- def get_tar_prefix(self):
- try:
- return self.tar_prefix
- except AttributeError:
- return self.get_base_name()
- def get_arch_name(self):
- try:
- self.arch_name
- except AttributeError:
- self.arch_name=self.get_base_name()+'.'+self.ext_algo.get(self.algo,self.algo)
- return self.arch_name
- def get_base_name(self):
- try:
- self.base_name
- except AttributeError:
- appname=getattr(Context.g_module,Context.APPNAME,'noname')
- version=getattr(Context.g_module,Context.VERSION,'1.0')
- self.base_name=appname+'-'+version
- return self.base_name
- def get_excl(self):
- try:
- return self.excl
- except AttributeError:
- self.excl=Node.exclude_regs+' **/waf-1.7.* **/.waf-1.7* **/waf3-1.7.* **/.waf3-1.7* **/*~ **/*.rej **/*.orig **/*.pyc **/*.pyo **/*.bak **/*.swp **/.lock-w*'
- nd=self.root.find_node(Context.out_dir)
- if nd:
- self.excl+=' '+nd.path_from(self.base_path)
- return self.excl
- def get_files(self):
- try:
- files=self.files
- except AttributeError:
- files=self.base_path.ant_glob('**/*',excl=self.get_excl())
- return files
-def dist(ctx):
- '''makes a tarball for redistributing the sources'''
- pass
-class DistCheck(Dist):
- fun='distcheck'
- cmd='distcheck'
- def execute(self):
- self.recurse([os.path.dirname(Context.g_module.root_path)])
- self.archive()
- self.check()
- def check(self):
- import tempfile,tarfile
- t=None
- try:
- t=tarfile.open(self.get_arch_name())
- for x in t:
- t.extract(x)
- finally:
- if t:
- t.close()
- cfg=[]
- if Options.options.distcheck_args:
- cfg=shlex.split(Options.options.distcheck_args)
- else:
- cfg=[x for x in sys.argv if x.startswith('-')]
- instdir=tempfile.mkdtemp('.inst',self.get_base_name())
- ret=Utils.subprocess.Popen([sys.argv[0],'configure','install','uninstall','--destdir='+instdir]+cfg,cwd=self.get_base_name()).wait()
- if ret:
- raise Errors.WafError('distcheck failed with code %i'%ret)
- if os.path.exists(instdir):
- raise Errors.WafError('distcheck succeeded, but files were left in %s'%instdir)
- shutil.rmtree(self.get_base_name())
-def distcheck(ctx):
- '''checks if the project compiles (tarball from 'dist')'''
- pass
-def update(ctx):
- '''updates the plugins from the *waflib/extras* directory'''
- lst=Options.options.files.split(',')
- if not lst:
- lst=[x for x in Utils.listdir(Context.waf_dir+'/waflib/extras')if x.endswith('.py')]
- for x in lst:
- tool=x.replace('.py','')
- try:
- Configure.download_tool(tool,force=True,ctx=ctx)
- except Errors.WafError:
- Logs.error('Could not find the tool %s in the remote repository'%x)
-def autoconfigure(execute_method):
- def execute(self):
- if not Configure.autoconfig:
- return execute_method(self)
- env=ConfigSet.ConfigSet()
- do_config=False
- try:
- env.load(os.path.join(Context.top_dir,Options.lockfile))
- except Exception:
- Logs.warn('Configuring the project')
- do_config=True
- else:
- if env.run_dir!=Context.run_dir:
- do_config=True
- else:
- h=0
- for f in env['files']:
- h=hash((h,Utils.readf(f,'rb')))
- do_config=h!=env.hash
- if do_config:
- Options.commands.insert(0,self.cmd)
- Options.commands.insert(0,'configure')
- return
- return execute_method(self)
- return execute
-Build.BuildContext.execute=autoconfigure(Build.BuildContext.execute)
diff --git a/waflib/Task.py b/waflib/Task.py
deleted file mode 100644
index 1b54a54..0000000
--- a/waflib/Task.py
+++ /dev/null
@@ -1,677 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import os,shutil,re,tempfile
-from waflib import Utils,Logs,Errors
-NOT_RUN=0
-MISSING=1
-CRASHED=2
-EXCEPTION=3
-SKIPPED=8
-SUCCESS=9
-ASK_LATER=-1
-SKIP_ME=-2
-RUN_ME=-3
-COMPILE_TEMPLATE_SHELL='''
-def f(tsk):
- env = tsk.env
- gen = tsk.generator
- bld = gen.bld
- wd = getattr(tsk, 'cwd', None)
- p = env.get_flat
- tsk.last_cmd = cmd = \'\'\' %s \'\'\' % s
- return tsk.exec_command(cmd, cwd=wd, env=env.env or None)
-'''
-COMPILE_TEMPLATE_NOSHELL='''
-def f(tsk):
- env = tsk.env
- gen = tsk.generator
- bld = gen.bld
- wd = getattr(tsk, 'cwd', None)
- def to_list(xx):
- if isinstance(xx, str): return [xx]
- return xx
- tsk.last_cmd = lst = []
- %s
- lst = [x for x in lst if x]
- return tsk.exec_command(lst, cwd=wd, env=env.env or None)
-'''
-def cache_outputs(cls):
- m1=cls.run
- def run(self):
- bld=self.generator.bld
- if bld.cache_global and not bld.nocache:
- if self.can_retrieve_cache():
- return 0
- return m1(self)
- cls.run=run
- m2=cls.post_run
- def post_run(self):
- bld=self.generator.bld
- ret=m2(self)
- if bld.cache_global and not bld.nocache:
- self.put_files_cache()
- return ret
- cls.post_run=post_run
- return cls
-classes={}
-class store_task_type(type):
- def __init__(cls,name,bases,dict):
- super(store_task_type,cls).__init__(name,bases,dict)
- name=cls.__name__
- if name.endswith('_task'):
- name=name.replace('_task','')
- if name!='evil'and name!='TaskBase':
- global classes
- if getattr(cls,'run_str',None):
- (f,dvars)=compile_fun(cls.run_str,cls.shell)
- cls.hcode=cls.run_str
- cls.run_str=None
- cls.run=f
- cls.vars=list(set(cls.vars+dvars))
- cls.vars.sort()
- elif getattr(cls,'run',None)and not'hcode'in cls.__dict__:
- cls.hcode=Utils.h_fun(cls.run)
- if not getattr(cls,'nocache',None):
- cls=cache_outputs(cls)
- getattr(cls,'register',classes)[name]=cls
-evil=store_task_type('evil',(object,),{})
-class TaskBase(evil):
- color='GREEN'
- ext_in=[]
- ext_out=[]
- before=[]
- after=[]
- hcode=''
- def __init__(self,*k,**kw):
- self.hasrun=NOT_RUN
- try:
- self.generator=kw['generator']
- except KeyError:
- self.generator=self
- def __repr__(self):
- return'\n\t{task %r: %s %s}'%(self.__class__.__name__,id(self),str(getattr(self,'fun','')))
- def __str__(self):
- if hasattr(self,'fun'):
- return'executing: %s\n'%self.fun.__name__
- return self.__class__.__name__+'\n'
- def __hash__(self):
- return id(self)
- def exec_command(self,cmd,**kw):
- bld=self.generator.bld
- try:
- if not kw.get('cwd',None):
- kw['cwd']=bld.cwd
- except AttributeError:
- bld.cwd=kw['cwd']=bld.variant_dir
- return bld.exec_command(cmd,**kw)
- def runnable_status(self):
- return RUN_ME
- def process(self):
- m=self.master
- if m.stop:
- m.out.put(self)
- return
- try:
- del self.generator.bld.task_sigs[self.uid()]
- except KeyError:
- pass
- try:
- self.generator.bld.returned_tasks.append(self)
- self.log_display(self.generator.bld)
- ret=self.run()
- except Exception:
- self.err_msg=Utils.ex_stack()
- self.hasrun=EXCEPTION
- m.error_handler(self)
- m.out.put(self)
- return
- if ret:
- self.err_code=ret
- self.hasrun=CRASHED
- else:
- try:
- self.post_run()
- except Errors.WafError:
- pass
- except Exception:
- self.err_msg=Utils.ex_stack()
- self.hasrun=EXCEPTION
- else:
- self.hasrun=SUCCESS
- if self.hasrun!=SUCCESS:
- m.error_handler(self)
- m.out.put(self)
- def run(self):
- if hasattr(self,'fun'):
- return self.fun(self)
- return 0
- def post_run(self):
- pass
- def log_display(self,bld):
- bld.to_log(self.display())
- def display(self):
- col1=Logs.colors(self.color)
- col2=Logs.colors.NORMAL
- master=self.master
- def cur():
- tmp=-1
- if hasattr(master,'ready'):
- tmp-=master.ready.qsize()
- return master.processed+tmp
- if self.generator.bld.progress_bar==1:
- return self.generator.bld.progress_line(cur(),master.total,col1,col2)
- if self.generator.bld.progress_bar==2:
- ela=str(self.generator.bld.timer)
- try:
- ins=','.join([n.name for n in self.inputs])
- except AttributeError:
- ins=''
- try:
- outs=','.join([n.name for n in self.outputs])
- except AttributeError:
- outs=''
- return'|Total %s|Current %s|Inputs %s|Outputs %s|Time %s|\n'%(master.total,cur(),ins,outs,ela)
- s=str(self)
- if not s:
- return None
- total=master.total
- n=len(str(total))
- fs='[%%%dd/%%%dd] %%s%%s%%s'%(n,n)
- return fs%(cur(),total,col1,s,col2)
- def attr(self,att,default=None):
- ret=getattr(self,att,self)
- if ret is self:return getattr(self.__class__,att,default)
- return ret
- def hash_constraints(self):
- cls=self.__class__
- tup=(str(cls.before),str(cls.after),str(cls.ext_in),str(cls.ext_out),cls.__name__,cls.hcode)
- h=hash(tup)
- return h
- def format_error(self):
- msg=getattr(self,'last_cmd','')
- name=getattr(self.generator,'name','')
- if getattr(self,"err_msg",None):
- return self.err_msg
- elif not self.hasrun:
- return'task in %r was not executed for some reason: %r'%(name,self)
- elif self.hasrun==CRASHED:
- try:
- return' -> task in %r failed (exit status %r): %r\n%r'%(name,self.err_code,self,msg)
- except AttributeError:
- return' -> task in %r failed: %r\n%r'%(name,self,msg)
- elif self.hasrun==MISSING:
- return' -> missing files in %r: %r\n%r'%(name,self,msg)
- else:
- return'invalid status for task in %r: %r'%(name,self.hasrun)
- def colon(self,var1,var2):
- tmp=self.env[var1]
- if isinstance(var2,str):
- it=self.env[var2]
- else:
- it=var2
- if isinstance(tmp,str):
- return[tmp%x for x in it]
- else:
- if Logs.verbose and not tmp and it:
- Logs.warn('Missing env variable %r for task %r (generator %r)'%(var1,self,self.generator))
- lst=[]
- for y in it:
- lst.extend(tmp)
- lst.append(y)
- return lst
-class Task(TaskBase):
- vars=[]
- shell=False
- def __init__(self,*k,**kw):
- TaskBase.__init__(self,*k,**kw)
- self.env=kw['env']
- self.inputs=[]
- self.outputs=[]
- self.dep_nodes=[]
- self.run_after=set([])
- def __str__(self):
- env=self.env
- src_str=' '.join([a.nice_path()for a in self.inputs])
- tgt_str=' '.join([a.nice_path()for a in self.outputs])
- if self.outputs:sep=' -> '
- else:sep=''
- return'%s: %s%s%s\n'%(self.__class__.__name__.replace('_task',''),src_str,sep,tgt_str)
- def __repr__(self):
- try:
- ins=",".join([x.name for x in self.inputs])
- outs=",".join([x.name for x in self.outputs])
- except AttributeError:
- ins=",".join([str(x)for x in self.inputs])
- outs=",".join([str(x)for x in self.outputs])
- return"".join(['\n\t{task %r: '%id(self),self.__class__.__name__," ",ins," -> ",outs,'}'])
- def uid(self):
- try:
- return self.uid_
- except AttributeError:
- m=Utils.md5()
- up=m.update
- up(self.__class__.__name__)
- for x in self.inputs+self.outputs:
- up(x.abspath())
- self.uid_=m.digest()
- return self.uid_
- def set_inputs(self,inp):
- if isinstance(inp,list):self.inputs+=inp
- else:self.inputs.append(inp)
- def set_outputs(self,out):
- if isinstance(out,list):self.outputs+=out
- else:self.outputs.append(out)
- def set_run_after(self,task):
- assert isinstance(task,TaskBase)
- self.run_after.add(task)
- def signature(self):
- try:return self.cache_sig
- except AttributeError:pass
- self.m=Utils.md5()
- self.m.update(self.hcode)
- self.sig_explicit_deps()
- self.sig_vars()
- if self.scan:
- try:
- self.sig_implicit_deps()
- except Errors.TaskRescan:
- return self.signature()
- ret=self.cache_sig=self.m.digest()
- return ret
- def runnable_status(self):
- for t in self.run_after:
- if not t.hasrun:
- return ASK_LATER
- bld=self.generator.bld
- try:
- new_sig=self.signature()
- except Errors.TaskNotReady:
- return ASK_LATER
- key=self.uid()
- try:
- prev_sig=bld.task_sigs[key]
- except KeyError:
- Logs.debug("task: task %r must run as it was never run before or the task code changed"%self)
- return RUN_ME
- for node in self.outputs:
- try:
- if node.sig!=new_sig:
- return RUN_ME
- except AttributeError:
- Logs.debug("task: task %r must run as the output nodes do not exist"%self)
- return RUN_ME
- if new_sig!=prev_sig:
- return RUN_ME
- return SKIP_ME
- def post_run(self):
- bld=self.generator.bld
- sig=self.signature()
- for node in self.outputs:
- try:
- os.stat(node.abspath())
- except OSError:
- self.hasrun=MISSING
- self.err_msg='-> missing file: %r'%node.abspath()
- raise Errors.WafError(self.err_msg)
- node.sig=sig
- bld.task_sigs[self.uid()]=self.cache_sig
- def sig_explicit_deps(self):
- bld=self.generator.bld
- upd=self.m.update
- for x in self.inputs+self.dep_nodes:
- try:
- upd(x.get_bld_sig())
- except(AttributeError,TypeError):
- raise Errors.WafError('Missing node signature for %r (required by %r)'%(x,self))
- if bld.deps_man:
- additional_deps=bld.deps_man
- for x in self.inputs+self.outputs:
- try:
- d=additional_deps[id(x)]
- except KeyError:
- continue
- for v in d:
- if isinstance(v,bld.root.__class__):
- try:
- v=v.get_bld_sig()
- except AttributeError:
- raise Errors.WafError('Missing node signature for %r (required by %r)'%(v,self))
- elif hasattr(v,'__call__'):
- v=v()
- upd(v)
- return self.m.digest()
- def sig_vars(self):
- bld=self.generator.bld
- env=self.env
- upd=self.m.update
- act_sig=bld.hash_env_vars(env,self.__class__.vars)
- upd(act_sig)
- dep_vars=getattr(self,'dep_vars',None)
- if dep_vars:
- upd(bld.hash_env_vars(env,dep_vars))
- return self.m.digest()
- scan=None
- def sig_implicit_deps(self):
- bld=self.generator.bld
- key=self.uid()
- prev=bld.task_sigs.get((key,'imp'),[])
- if prev:
- try:
- if prev==self.compute_sig_implicit_deps():
- return prev
- except Exception:
- for x in bld.node_deps.get(self.uid(),[]):
- if x.is_child_of(bld.srcnode):
- try:
- os.stat(x.abspath())
- except OSError:
- try:
- del x.parent.children[x.name]
- except KeyError:
- pass
- del bld.task_sigs[(key,'imp')]
- raise Errors.TaskRescan('rescan')
- (nodes,names)=self.scan()
- if Logs.verbose:
- Logs.debug('deps: scanner for %s returned %s %s'%(str(self),str(nodes),str(names)))
- bld.node_deps[key]=nodes
- bld.raw_deps[key]=names
- self.are_implicit_nodes_ready()
- try:
- bld.task_sigs[(key,'imp')]=sig=self.compute_sig_implicit_deps()
- except Exception:
- if Logs.verbose:
- for k in bld.node_deps.get(self.uid(),[]):
- try:
- k.get_bld_sig()
- except Exception:
- Logs.warn('Missing signature for node %r (may cause rebuilds)'%k)
- else:
- return sig
- def compute_sig_implicit_deps(self):
- upd=self.m.update
- bld=self.generator.bld
- self.are_implicit_nodes_ready()
- for k in bld.node_deps.get(self.uid(),[]):
- upd(k.get_bld_sig())
- return self.m.digest()
- def are_implicit_nodes_ready(self):
- bld=self.generator.bld
- try:
- cache=bld.dct_implicit_nodes
- except AttributeError:
- bld.dct_implicit_nodes=cache={}
- try:
- dct=cache[bld.cur]
- except KeyError:
- dct=cache[bld.cur]={}
- for tsk in bld.cur_tasks:
- for x in tsk.outputs:
- dct[x]=tsk
- modified=False
- for x in bld.node_deps.get(self.uid(),[]):
- if x in dct:
- self.run_after.add(dct[x])
- modified=True
- if modified:
- for tsk in self.run_after:
- if not tsk.hasrun:
- raise Errors.TaskNotReady('not ready')
- def can_retrieve_cache(self):
- if not getattr(self,'outputs',None):
- return None
- sig=self.signature()
- ssig=Utils.to_hex(self.uid())+Utils.to_hex(sig)
- dname=os.path.join(self.generator.bld.cache_global,ssig)
- try:
- t1=os.stat(dname).st_mtime
- except OSError:
- return None
- for node in self.outputs:
- orig=os.path.join(dname,node.name)
- try:
- shutil.copy2(orig,node.abspath())
- os.utime(orig,None)
- except(OSError,IOError):
- Logs.debug('task: failed retrieving file')
- return None
- try:
- t2=os.stat(dname).st_mtime
- except OSError:
- return None
- if t1!=t2:
- return None
- for node in self.outputs:
- node.sig=sig
- if self.generator.bld.progress_bar<1:
- self.generator.bld.to_log('restoring from cache %r\n'%node.abspath())
- self.cached=True
- return True
- def put_files_cache(self):
- if getattr(self,'cached',None):
- return None
- if not getattr(self,'outputs',None):
- return None
- sig=self.signature()
- ssig=Utils.to_hex(self.uid())+Utils.to_hex(sig)
- dname=os.path.join(self.generator.bld.cache_global,ssig)
- tmpdir=tempfile.mkdtemp(prefix=self.generator.bld.cache_global+os.sep+'waf')
- try:
- shutil.rmtree(dname)
- except Exception:
- pass
- try:
- for node in self.outputs:
- dest=os.path.join(tmpdir,node.name)
- shutil.copy2(node.abspath(),dest)
- except(OSError,IOError):
- try:
- shutil.rmtree(tmpdir)
- except Exception:
- pass
- else:
- try:
- os.rename(tmpdir,dname)
- except OSError:
- try:
- shutil.rmtree(tmpdir)
- except Exception:
- pass
- else:
- try:
- os.chmod(dname,Utils.O755)
- except Exception:
- pass
-def is_before(t1,t2):
- to_list=Utils.to_list
- for k in to_list(t2.ext_in):
- if k in to_list(t1.ext_out):
- return 1
- if t1.__class__.__name__ in to_list(t2.after):
- return 1
- if t2.__class__.__name__ in to_list(t1.before):
- return 1
- return 0
-def set_file_constraints(tasks):
- ins=Utils.defaultdict(set)
- outs=Utils.defaultdict(set)
- for x in tasks:
- for a in getattr(x,'inputs',[])+getattr(x,'dep_nodes',[]):
- ins[id(a)].add(x)
- for a in getattr(x,'outputs',[]):
- outs[id(a)].add(x)
- links=set(ins.keys()).intersection(outs.keys())
- for k in links:
- for a in ins[k]:
- a.run_after.update(outs[k])
-def set_precedence_constraints(tasks):
- cstr_groups=Utils.defaultdict(list)
- for x in tasks:
- h=x.hash_constraints()
- cstr_groups[h].append(x)
- keys=list(cstr_groups.keys())
- maxi=len(keys)
- for i in range(maxi):
- t1=cstr_groups[keys[i]][0]
- for j in range(i+1,maxi):
- t2=cstr_groups[keys[j]][0]
- if is_before(t1,t2):
- a=i
- b=j
- elif is_before(t2,t1):
- a=j
- b=i
- else:
- continue
- aval=set(cstr_groups[keys[a]])
- for x in cstr_groups[keys[b]]:
- x.run_after.update(aval)
-def funex(c):
- dc={}
- exec(c,dc)
- return dc['f']
-reg_act=re.compile(r"(?P<backslash>\\)|(?P<dollar>\$\$)|(?P<subst>\$\{(?P<var>\w+)(?P<code>.*?)\})",re.M)
-def compile_fun_shell(line):
- extr=[]
- def repl(match):
- g=match.group
- if g('dollar'):return"$"
- elif g('backslash'):return'\\\\'
- elif g('subst'):extr.append((g('var'),g('code')));return"%s"
- return None
- line=reg_act.sub(repl,line)or line
- parm=[]
- dvars=[]
- app=parm.append
- for(var,meth)in extr:
- if var=='SRC':
- if meth:app('tsk.inputs%s'%meth)
- else:app('" ".join([a.path_from(bld.bldnode) for a in tsk.inputs])')
- elif var=='TGT':
- if meth:app('tsk.outputs%s'%meth)
- else:app('" ".join([a.path_from(bld.bldnode) for a in tsk.outputs])')
- elif meth:
- if meth.startswith(':'):
- m=meth[1:]
- if m=='SRC':
- m='[a.path_from(bld.bldnode) for a in tsk.inputs]'
- elif m=='TGT':
- m='[a.path_from(bld.bldnode) for a in tsk.outputs]'
- elif m[:3]not in('tsk','gen','bld'):
- dvars.extend([var,meth[1:]])
- m='%r'%m
- app('" ".join(tsk.colon(%r, %s))'%(var,m))
- else:
- app('%s%s'%(var,meth))
- else:
- if not var in dvars:dvars.append(var)
- app("p('%s')"%var)
- if parm:parm="%% (%s) "%(',\n\t\t'.join(parm))
- else:parm=''
- c=COMPILE_TEMPLATE_SHELL%(line,parm)
- Logs.debug('action: %s'%c.strip().splitlines())
- return(funex(c),dvars)
-def compile_fun_noshell(line):
- extr=[]
- def repl(match):
- g=match.group
- if g('dollar'):return"$"
- elif g('subst'):extr.append((g('var'),g('code')));return"<<|@|>>"
- return None
- line2=reg_act.sub(repl,line)
- params=line2.split('<<|@|>>')
- assert(extr)
- buf=[]
- dvars=[]
- app=buf.append
- for x in range(len(extr)):
- params[x]=params[x].strip()
- if params[x]:
- app("lst.extend(%r)"%params[x].split())
- (var,meth)=extr[x]
- if var=='SRC':
- if meth:app('lst.append(tsk.inputs%s)'%meth)
- else:app("lst.extend([a.path_from(bld.bldnode) for a in tsk.inputs])")
- elif var=='TGT':
- if meth:app('lst.append(tsk.outputs%s)'%meth)
- else:app("lst.extend([a.path_from(bld.bldnode) for a in tsk.outputs])")
- elif meth:
- if meth.startswith(':'):
- m=meth[1:]
- if m=='SRC':
- m='[a.path_from(bld.bldnode) for a in tsk.inputs]'
- elif m=='TGT':
- m='[a.path_from(bld.bldnode) for a in tsk.outputs]'
- elif m[:3]not in('tsk','gen','bld'):
- dvars.extend([var,m])
- m='%r'%m
- app('lst.extend(tsk.colon(%r, %s))'%(var,m))
- else:
- app('lst.extend(gen.to_list(%s%s))'%(var,meth))
- else:
- app('lst.extend(to_list(env[%r]))'%var)
- if not var in dvars:dvars.append(var)
- if extr:
- if params[-1]:
- app("lst.extend(%r)"%params[-1].split())
- fun=COMPILE_TEMPLATE_NOSHELL%"\n\t".join(buf)
- Logs.debug('action: %s'%fun.strip().splitlines())
- return(funex(fun),dvars)
-def compile_fun(line,shell=False):
- if line.find('<')>0 or line.find('>')>0 or line.find('&&')>0:
- shell=True
- if shell:
- return compile_fun_shell(line)
- else:
- return compile_fun_noshell(line)
-def task_factory(name,func=None,vars=None,color='GREEN',ext_in=[],ext_out=[],before=[],after=[],shell=False,scan=None):
- params={'vars':vars or[],'color':color,'name':name,'ext_in':Utils.to_list(ext_in),'ext_out':Utils.to_list(ext_out),'before':Utils.to_list(before),'after':Utils.to_list(after),'shell':shell,'scan':scan,}
- if isinstance(func,str):
- params['run_str']=func
- else:
- params['run']=func
- cls=type(Task)(name,(Task,),params)
- global classes
- classes[name]=cls
- return cls
-def always_run(cls):
- old=cls.runnable_status
- def always(self):
- ret=old(self)
- if ret==SKIP_ME:
- ret=RUN_ME
- return ret
- cls.runnable_status=always
- return cls
-def update_outputs(cls):
- old_post_run=cls.post_run
- def post_run(self):
- old_post_run(self)
- for node in self.outputs:
- node.sig=Utils.h_file(node.abspath())
- self.generator.bld.task_sigs[node.abspath()]=self.uid()
- cls.post_run=post_run
- old_runnable_status=cls.runnable_status
- def runnable_status(self):
- status=old_runnable_status(self)
- if status!=RUN_ME:
- return status
- try:
- bld=self.generator.bld
- prev_sig=bld.task_sigs[self.uid()]
- if prev_sig==self.signature():
- for x in self.outputs:
- if not x.sig or bld.task_sigs[x.abspath()]!=self.uid():
- return RUN_ME
- return SKIP_ME
- except KeyError:
- pass
- except IndexError:
- pass
- except AttributeError:
- pass
- return RUN_ME
- cls.runnable_status=runnable_status
- return cls
diff --git a/waflib/TaskGen.py b/waflib/TaskGen.py
deleted file mode 100644
index acd5166..0000000
--- a/waflib/TaskGen.py
+++ /dev/null
@@ -1,400 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import copy,re,os
-from waflib import Task,Utils,Logs,Errors,ConfigSet,Node
-feats=Utils.defaultdict(set)
-class task_gen(object):
- mappings={}
- prec=Utils.defaultdict(list)
- def __init__(self,*k,**kw):
- self.source=''
- self.target=''
- self.meths=[]
- self.prec=Utils.defaultdict(list)
- self.mappings={}
- self.features=[]
- self.tasks=[]
- if not'bld'in kw:
- self.env=ConfigSet.ConfigSet()
- self.idx=0
- self.path=None
- else:
- self.bld=kw['bld']
- self.env=self.bld.env.derive()
- self.path=self.bld.path
- try:
- self.idx=self.bld.idx[id(self.path)]=self.bld.idx.get(id(self.path),0)+1
- except AttributeError:
- self.bld.idx={}
- self.idx=self.bld.idx[id(self.path)]=1
- for key,val in kw.items():
- setattr(self,key,val)
- def __str__(self):
- return"<task_gen %r declared in %s>"%(self.name,self.path.abspath())
- def __repr__(self):
- lst=[]
- for x in self.__dict__.keys():
- if x not in['env','bld','compiled_tasks','tasks']:
- lst.append("%s=%s"%(x,repr(getattr(self,x))))
- return"bld(%s) in %s"%(", ".join(lst),self.path.abspath())
- def get_name(self):
- try:
- return self._name
- except AttributeError:
- if isinstance(self.target,list):
- lst=[str(x)for x in self.target]
- name=self._name=','.join(lst)
- else:
- name=self._name=str(self.target)
- return name
- def set_name(self,name):
- self._name=name
- name=property(get_name,set_name)
- def to_list(self,val):
- if isinstance(val,str):return val.split()
- else:return val
- def post(self):
- if getattr(self,'posted',None):
- return False
- self.posted=True
- keys=set(self.meths)
- self.features=Utils.to_list(self.features)
- for x in self.features+['*']:
- st=feats[x]
- if not st:
- if not x in Task.classes:
- Logs.warn('feature %r does not exist - bind at least one method to it'%x)
- keys.update(list(st))
- prec={}
- prec_tbl=self.prec or task_gen.prec
- for x in prec_tbl:
- if x in keys:
- prec[x]=prec_tbl[x]
- tmp=[]
- for a in keys:
- for x in prec.values():
- if a in x:break
- else:
- tmp.append(a)
- tmp.sort()
- out=[]
- while tmp:
- e=tmp.pop()
- if e in keys:out.append(e)
- try:
- nlst=prec[e]
- except KeyError:
- pass
- else:
- del prec[e]
- for x in nlst:
- for y in prec:
- if x in prec[y]:
- break
- else:
- tmp.append(x)
- if prec:
- raise Errors.WafError('Cycle detected in the method execution %r'%prec)
- out.reverse()
- self.meths=out
- Logs.debug('task_gen: posting %s %d'%(self,id(self)))
- for x in out:
- try:
- v=getattr(self,x)
- except AttributeError:
- raise Errors.WafError('%r is not a valid task generator method'%x)
- Logs.debug('task_gen: -> %s (%d)'%(x,id(self)))
- v()
- Logs.debug('task_gen: posted %s'%self.name)
- return True
- def get_hook(self,node):
- name=node.name
- for k in self.mappings:
- if name.endswith(k):
- return self.mappings[k]
- for k in task_gen.mappings:
- if name.endswith(k):
- return task_gen.mappings[k]
- raise Errors.WafError("File %r has no mapping in %r (did you forget to load a waf tool?)"%(node,task_gen.mappings.keys()))
- def create_task(self,name,src=None,tgt=None):
- task=Task.classes[name](env=self.env.derive(),generator=self)
- if src:
- task.set_inputs(src)
- if tgt:
- task.set_outputs(tgt)
- self.tasks.append(task)
- return task
- def clone(self,env):
- newobj=self.bld()
- for x in self.__dict__:
- if x in['env','bld']:
- continue
- elif x in['path','features']:
- setattr(newobj,x,getattr(self,x))
- else:
- setattr(newobj,x,copy.copy(getattr(self,x)))
- newobj.posted=False
- if isinstance(env,str):
- newobj.env=self.bld.all_envs[env].derive()
- else:
- newobj.env=env.derive()
- return newobj
-def declare_chain(name='',rule=None,reentrant=None,color='BLUE',ext_in=[],ext_out=[],before=[],after=[],decider=None,scan=None,install_path=None,shell=False):
- ext_in=Utils.to_list(ext_in)
- ext_out=Utils.to_list(ext_out)
- if not name:
- name=rule
- cls=Task.task_factory(name,rule,color=color,ext_in=ext_in,ext_out=ext_out,before=before,after=after,scan=scan,shell=shell)
- def x_file(self,node):
- ext=decider and decider(self,node)or cls.ext_out
- if ext_in:
- _ext_in=ext_in[0]
- tsk=self.create_task(name,node)
- cnt=0
- keys=list(self.mappings.keys())+list(self.__class__.mappings.keys())
- for x in ext:
- k=node.change_ext(x,ext_in=_ext_in)
- tsk.outputs.append(k)
- if reentrant!=None:
- if cnt<int(reentrant):
- self.source.append(k)
- else:
- for y in keys:
- if k.name.endswith(y):
- self.source.append(k)
- break
- cnt+=1
- if install_path:
- self.bld.install_files(install_path,tsk.outputs)
- return tsk
- for x in cls.ext_in:
- task_gen.mappings[x]=x_file
- return x_file
-def taskgen_method(func):
- setattr(task_gen,func.__name__,func)
- return func
-def feature(*k):
- def deco(func):
- setattr(task_gen,func.__name__,func)
- for name in k:
- feats[name].update([func.__name__])
- return func
- return deco
-def before_method(*k):
- def deco(func):
- setattr(task_gen,func.__name__,func)
- for fun_name in k:
- if not func.__name__ in task_gen.prec[fun_name]:
- task_gen.prec[fun_name].append(func.__name__)
- return func
- return deco
-before=before_method
-def after_method(*k):
- def deco(func):
- setattr(task_gen,func.__name__,func)
- for fun_name in k:
- if not fun_name in task_gen.prec[func.__name__]:
- task_gen.prec[func.__name__].append(fun_name)
- return func
- return deco
-after=after_method
-def extension(*k):
- def deco(func):
- setattr(task_gen,func.__name__,func)
- for x in k:
- task_gen.mappings[x]=func
- return func
- return deco
- at taskgen_method
-def to_nodes(self,lst,path=None):
- tmp=[]
- path=path or self.path
- find=path.find_resource
- if isinstance(lst,self.path.__class__):
- lst=[lst]
- for x in Utils.to_list(lst):
- if isinstance(x,str):
- node=find(x)
- else:
- node=x
- if not node:
- raise Errors.WafError("source not found: %r in %r"%(x,self))
- tmp.append(node)
- return tmp
- at feature('*')
-def process_source(self):
- self.source=self.to_nodes(getattr(self,'source',[]))
- for node in self.source:
- self.get_hook(node)(self,node)
- at feature('*')
- at before_method('process_source')
-def process_rule(self):
- if not getattr(self,'rule',None):
- return
- name=str(getattr(self,'name',None)or self.target or getattr(self.rule,'__name__',self.rule))
- try:
- cache=self.bld.cache_rule_attr
- except AttributeError:
- cache=self.bld.cache_rule_attr={}
- cls=None
- if getattr(self,'cache_rule','True'):
- try:
- cls=cache[(name,self.rule)]
- except KeyError:
- pass
- if not cls:
- cls=Task.task_factory(name,self.rule,getattr(self,'vars',[]),shell=getattr(self,'shell',True),color=getattr(self,'color','BLUE'),scan=getattr(self,'scan',None))
- if getattr(self,'scan',None):
- cls.scan=self.scan
- elif getattr(self,'deps',None):
- def scan(self):
- nodes=[]
- for x in self.generator.to_list(getattr(self.generator,'deps',None)):
- node=self.generator.path.find_resource(x)
- if not node:
- self.generator.bld.fatal('Could not find %r (was it declared?)'%x)
- nodes.append(node)
- return[nodes,[]]
- cls.scan=scan
- if getattr(self,'update_outputs',None):
- Task.update_outputs(cls)
- if getattr(self,'always',None):
- Task.always_run(cls)
- for x in['after','before','ext_in','ext_out']:
- setattr(cls,x,getattr(self,x,[]))
- if getattr(self,'cache_rule','True'):
- cache[(name,self.rule)]=cls
- tsk=self.create_task(name)
- if getattr(self,'target',None):
- if isinstance(self.target,str):
- self.target=self.target.split()
- if not isinstance(self.target,list):
- self.target=[self.target]
- for x in self.target:
- if isinstance(x,str):
- tsk.outputs.append(self.path.find_or_declare(x))
- else:
- x.parent.mkdir()
- tsk.outputs.append(x)
- if getattr(self,'install_path',None):
- self.bld.install_files(self.install_path,tsk.outputs)
- if getattr(self,'source',None):
- tsk.inputs=self.to_nodes(self.source)
- self.source=[]
- if getattr(self,'cwd',None):
- tsk.cwd=self.cwd
- at feature('seq')
-def sequence_order(self):
- if self.meths and self.meths[-1]!='sequence_order':
- self.meths.append('sequence_order')
- return
- if getattr(self,'seq_start',None):
- return
- if getattr(self.bld,'prev',None):
- self.bld.prev.post()
- for x in self.bld.prev.tasks:
- for y in self.tasks:
- y.set_run_after(x)
- self.bld.prev=self
-re_m4=re.compile('@(\w+)@',re.M)
-class subst_pc(Task.Task):
- def run(self):
- if getattr(self.generator,'is_copy',None):
- self.outputs[0].write(self.inputs[0].read('rb'),'wb')
- if getattr(self.generator,'chmod',None):
- os.chmod(self.outputs[0].abspath(),self.generator.chmod)
- return
- code=self.inputs[0].read(encoding=getattr(self.generator,'encoding','ISO8859-1'))
- if getattr(self.generator,'subst_fun',None):
- code=self.generator.subst_fun(self,code)
- if code:
- self.outputs[0].write(code,encoding=getattr(self.generator,'encoding','ISO8859-1'))
- return
- code=code.replace('%','%%')
- lst=[]
- def repl(match):
- g=match.group
- if g(1):
- lst.append(g(1))
- return"%%(%s)s"%g(1)
- return''
- code=re_m4.sub(repl,code)
- try:
- d=self.generator.dct
- except AttributeError:
- d={}
- for x in lst:
- tmp=getattr(self.generator,x,'')or self.env.get_flat(x)or self.env.get_flat(x.upper())
- d[x]=str(tmp)
- code=code%d
- self.outputs[0].write(code,encoding=getattr(self.generator,'encoding','ISO8859-1'))
- self.generator.bld.raw_deps[self.uid()]=self.dep_vars=lst
- try:delattr(self,'cache_sig')
- except AttributeError:pass
- if getattr(self.generator,'chmod',None):
- os.chmod(self.outputs[0].abspath(),self.generator.chmod)
- def sig_vars(self):
- bld=self.generator.bld
- env=self.env
- upd=self.m.update
- if getattr(self.generator,'subst_fun',None):
- upd(Utils.h_fun(self.generator.subst_fun))
- vars=self.generator.bld.raw_deps.get(self.uid(),[])
- act_sig=bld.hash_env_vars(env,vars)
- upd(act_sig)
- lst=[getattr(self.generator,x,'')for x in vars]
- upd(Utils.h_list(lst))
- return self.m.digest()
- at extension('.pc.in')
-def add_pcfile(self,node):
- tsk=self.create_task('subst_pc',node,node.change_ext('.pc','.pc.in'))
- self.bld.install_files(getattr(self,'install_path','${LIBDIR}/pkgconfig/'),tsk.outputs)
-class subst(subst_pc):
- pass
- at feature('subst')
- at before_method('process_source','process_rule')
-def process_subst(self):
- src=Utils.to_list(getattr(self,'source',[]))
- if isinstance(src,Node.Node):
- src=[src]
- tgt=Utils.to_list(getattr(self,'target',[]))
- if isinstance(tgt,Node.Node):
- tgt=[tgt]
- if len(src)!=len(tgt):
- raise Errors.WafError('invalid number of source/target for %r'%self)
- for x,y in zip(src,tgt):
- if not x or not y:
- raise Errors.WafError('null source or target for %r'%self)
- a,b=None,None
- if isinstance(x,str)and isinstance(y,str)and x==y:
- a=self.path.find_node(x)
- b=self.path.get_bld().make_node(y)
- if not os.path.isfile(b.abspath()):
- b.sig=None
- b.parent.mkdir()
- else:
- if isinstance(x,str):
- a=self.path.find_resource(x)
- elif isinstance(x,Node.Node):
- a=x
- if isinstance(y,str):
- b=self.path.find_or_declare(y)
- elif isinstance(y,Node.Node):
- b=y
- if not a:
- raise Errors.WafError('cound not find %r for %r'%(x,self))
- has_constraints=False
- tsk=self.create_task('subst',a,b)
- for k in('after','before','ext_in','ext_out'):
- val=getattr(self,k,None)
- if val:
- has_constraints=True
- setattr(tsk,k,val)
- if not has_constraints and b.name.endswith('.h'):
- tsk.before=[k for k in('c','cxx')if k in Task.classes]
- inst_to=getattr(self,'install_path',None)
- if inst_to:
- self.bld.install_files(inst_to,b,chmod=getattr(self,'chmod',Utils.O644))
- self.source=[]
diff --git a/waflib/Tools/__init__.py b/waflib/Tools/__init__.py
deleted file mode 100644
index efeed79..0000000
--- a/waflib/Tools/__init__.py
+++ /dev/null
@@ -1,4 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
diff --git a/waflib/Tools/ar.py b/waflib/Tools/ar.py
deleted file mode 100644
index 7a16dfe..0000000
--- a/waflib/Tools/ar.py
+++ /dev/null
@@ -1,11 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-from waflib.Configure import conf
- at conf
-def find_ar(conf):
- conf.load('ar')
-def configure(conf):
- conf.find_program('ar',var='AR')
- conf.env.ARFLAGS='rcs'
diff --git a/waflib/Tools/asm.py b/waflib/Tools/asm.py
deleted file mode 100644
index b9ed5f4..0000000
--- a/waflib/Tools/asm.py
+++ /dev/null
@@ -1,25 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import os,sys
-from waflib import Task,Utils
-import waflib.Task
-from waflib.Tools.ccroot import link_task,stlink_task
-from waflib.TaskGen import extension,feature
-class asm(Task.Task):
- color='BLUE'
- run_str='${AS} ${ASFLAGS} ${ASMPATH_ST:INCPATHS} ${AS_SRC_F}${SRC} ${AS_TGT_F}${TGT}'
- at extension('.s','.S','.asm','.ASM','.spp','.SPP')
-def asm_hook(self,node):
- return self.create_compiled_task('asm',node)
-class asmprogram(link_task):
- run_str='${ASLINK} ${ASLINKFLAGS} ${ASLNK_TGT_F}${TGT} ${ASLNK_SRC_F}${SRC}'
- ext_out=['.bin']
- inst_to='${BINDIR}'
-class asmshlib(asmprogram):
- inst_to='${LIBDIR}'
-class asmstlib(stlink_task):
- pass
-def configure(conf):
- conf.env['ASMPATH_ST']='-I%s'
diff --git a/waflib/Tools/bison.py b/waflib/Tools/bison.py
deleted file mode 100644
index 6ae7898..0000000
--- a/waflib/Tools/bison.py
+++ /dev/null
@@ -1,28 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-from waflib import Task
-from waflib.TaskGen import extension
-class bison(Task.Task):
- color='BLUE'
- run_str='${BISON} ${BISONFLAGS} ${SRC[0].abspath()} -o ${TGT[0].name}'
- ext_out=['.h']
- at extension('.y','.yc','.yy')
-def big_bison(self,node):
- has_h='-d'in self.env['BISONFLAGS']
- outs=[]
- if node.name.endswith('.yc'):
- outs.append(node.change_ext('.tab.cc'))
- if has_h:
- outs.append(node.change_ext('.tab.hh'))
- else:
- outs.append(node.change_ext('.tab.c'))
- if has_h:
- outs.append(node.change_ext('.tab.h'))
- tsk=self.create_task('bison',node,outs)
- tsk.cwd=node.parent.get_bld().abspath()
- self.source.append(outs[0])
-def configure(conf):
- conf.find_program('bison',var='BISON')
- conf.env.BISONFLAGS=['-d']
diff --git a/waflib/Tools/c.py b/waflib/Tools/c.py
deleted file mode 100644
index 4d8cbd5..0000000
--- a/waflib/Tools/c.py
+++ /dev/null
@@ -1,24 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-from waflib import TaskGen,Task,Utils
-from waflib.Tools import c_preproc
-from waflib.Tools.ccroot import link_task,stlink_task
- at TaskGen.extension('.c')
-def c_hook(self,node):
- return self.create_compiled_task('c',node)
-class c(Task.Task):
- run_str='${CC} ${ARCH_ST:ARCH} ${CFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CC_SRC_F}${SRC} ${CC_TGT_F}${TGT}'
- vars=['CCDEPS']
- ext_in=['.h']
- scan=c_preproc.scan
-class cprogram(link_task):
- run_str='${LINK_CC} ${LINKFLAGS} ${CCLNK_SRC_F}${SRC} ${CCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB}'
- ext_out=['.bin']
- vars=['LINKDEPS']
- inst_to='${BINDIR}'
-class cshlib(cprogram):
- inst_to='${LIBDIR}'
-class cstlib(stlink_task):
- pass
diff --git a/waflib/Tools/c_aliases.py b/waflib/Tools/c_aliases.py
deleted file mode 100644
index a3a2bb9..0000000
--- a/waflib/Tools/c_aliases.py
+++ /dev/null
@@ -1,55 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import os,sys,re
-from waflib import Utils,Build
-from waflib.Configure import conf
-def get_extensions(lst):
- ret=[]
- for x in Utils.to_list(lst):
- try:
- if not isinstance(x,str):
- x=x.name
- ret.append(x[x.rfind('.')+1:])
- except Exception:
- pass
- return ret
-def sniff_features(**kw):
- exts=get_extensions(kw['source'])
- type=kw['_type']
- feats=[]
- if'cxx'in exts or'cpp'in exts or'c++'in exts or'cc'in exts or'C'in exts:
- feats.append('cxx')
- if'c'in exts or'vala'in exts:
- feats.append('c')
- if'd'in exts:
- feats.append('d')
- if'java'in exts:
- feats.append('java')
- if'java'in exts:
- return'java'
- if type in['program','shlib','stlib']:
- for x in feats:
- if x in['cxx','d','c']:
- feats.append(x+type)
- return feats
-def set_features(kw,_type):
- kw['_type']=_type
- kw['features']=Utils.to_list(kw.get('features',[]))+Utils.to_list(sniff_features(**kw))
- at conf
-def program(bld,*k,**kw):
- set_features(kw,'program')
- return bld(*k,**kw)
- at conf
-def shlib(bld,*k,**kw):
- set_features(kw,'shlib')
- return bld(*k,**kw)
- at conf
-def stlib(bld,*k,**kw):
- set_features(kw,'stlib')
- return bld(*k,**kw)
- at conf
-def objects(bld,*k,**kw):
- set_features(kw,'objects')
- return bld(*k,**kw)
diff --git a/waflib/Tools/c_config.py b/waflib/Tools/c_config.py
deleted file mode 100755
index e6d3b5d..0000000
--- a/waflib/Tools/c_config.py
+++ /dev/null
@@ -1,728 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import os,re,shlex,sys
-from waflib import Build,Utils,Task,Options,Logs,Errors,ConfigSet,Runner
-from waflib.TaskGen import after_method,feature
-from waflib.Configure import conf
-WAF_CONFIG_H='config.h'
-DEFKEYS='define_key'
-INCKEYS='include_key'
-cfg_ver={'atleast-version':'>=','exact-version':'==','max-version':'<=',}
-SNIP_FUNCTION='''
-int main(int argc, char **argv) {
- void *p;
- (void)argc; (void)argv;
- p=(void*)(%s);
- return 0;
-}
-'''
-SNIP_TYPE='''
-int main(int argc, char **argv) {
- (void)argc; (void)argv;
- if ((%(type_name)s *) 0) return 0;
- if (sizeof (%(type_name)s)) return 0;
- return 1;
-}
-'''
-SNIP_EMPTY_PROGRAM='''
-int main(int argc, char **argv) {
- (void)argc; (void)argv;
- return 0;
-}
-'''
-SNIP_FIELD='''
-int main(int argc, char **argv) {
- char *off;
- (void)argc; (void)argv;
- off = (char*) &((%(type_name)s*)0)->%(field_name)s;
- return (size_t) off < sizeof(%(type_name)s);
-}
-'''
-MACRO_TO_DESTOS={'__linux__':'linux','__GNU__':'gnu','__FreeBSD__':'freebsd','__NetBSD__':'netbsd','__OpenBSD__':'openbsd','__sun':'sunos','__hpux':'hpux','__sgi':'irix','_AIX':'aix','__CYGWIN__':'cygwin','__MSYS__':'msys','_UWIN':'uwin','_WIN64':'win32','_WIN32':'win32','__ENVIRONMENT_MAC_OS_X_VERSION_MIN_REQUIRED__':'darwin','__ENVIRONMENT_IPHONE_OS_VERSION_MIN_REQUIRED__':'darwin','__QNX__':'qnx','__native_client__':'nacl'}
-MACRO_TO_DEST_CPU={'__x86_64__':'x86_64','__amd64__':'x86_64','__i386__':'x86','__ia64__':'ia','__mips__':'mips','__sparc__':'sparc','__alpha__':'alpha','__aarch64__':'aarch64','__thumb__':'thumb','__arm__':'arm','__hppa__':'hppa','__powerpc__':'powerpc','__ppc__':'powerpc','__convex__':'convex','__m68k__':'m68k','__s390x__':'s390x','__s390__':'s390','__sh__':'sh',}
- at conf
-def parse_flags(self,line,uselib_store,env=None,force_static=False):
- assert(isinstance(line,str))
- env=env or self.env
- app=env.append_value
- appu=env.append_unique
- lex=shlex.shlex(line,posix=False)
- lex.whitespace_split=True
- lex.commenters=''
- lst=list(lex)
- uselib=uselib_store
- while lst:
- x=lst.pop(0)
- st=x[:2]
- ot=x[2:]
- if st=='-I'or st=='/I':
- if not ot:ot=lst.pop(0)
- appu('INCLUDES_'+uselib,[ot])
- elif st=='-include':
- tmp=[x,lst.pop(0)]
- app('CFLAGS',tmp)
- app('CXXFLAGS',tmp)
- elif st=='-D'or(env.CXX_NAME=='msvc'and st=='/D'):
- if not ot:ot=lst.pop(0)
- app('DEFINES_'+uselib,[ot])
- elif st=='-l':
- if not ot:ot=lst.pop(0)
- prefix=force_static and'STLIB_'or'LIB_'
- appu(prefix+uselib,[ot])
- elif st=='-L':
- if not ot:ot=lst.pop(0)
- appu('LIBPATH_'+uselib,[ot])
- elif x.startswith('/LIBPATH:'):
- appu('LIBPATH_'+uselib,[x.replace('/LIBPATH:','')])
- elif x=='-pthread'or x.startswith('+')or x.startswith('-std'):
- app('CFLAGS_'+uselib,[x])
- app('CXXFLAGS_'+uselib,[x])
- app('LINKFLAGS_'+uselib,[x])
- elif x=='-framework':
- appu('FRAMEWORK_'+uselib,[lst.pop(0)])
- elif x.startswith('-F'):
- appu('FRAMEWORKPATH_'+uselib,[x[2:]])
- elif x.startswith('-Wl'):
- app('LINKFLAGS_'+uselib,[x])
- elif x.startswith('-m')or x.startswith('-f')or x.startswith('-dynamic'):
- app('CFLAGS_'+uselib,[x])
- app('CXXFLAGS_'+uselib,[x])
- elif x.startswith('-bundle'):
- app('LINKFLAGS_'+uselib,[x])
- elif x.startswith('-undefined'):
- arg=lst.pop(0)
- app('LINKFLAGS_'+uselib,[x,arg])
- elif x.startswith('-arch')or x.startswith('-isysroot'):
- tmp=[x,lst.pop(0)]
- app('CFLAGS_'+uselib,tmp)
- app('CXXFLAGS_'+uselib,tmp)
- app('LINKFLAGS_'+uselib,tmp)
- elif x.endswith('.a')or x.endswith('.so')or x.endswith('.dylib')or x.endswith('.lib'):
- appu('LINKFLAGS_'+uselib,[x])
- at conf
-def ret_msg(self,f,kw):
- if isinstance(f,str):
- return f
- return f(kw)
- at conf
-def validate_cfg(self,kw):
- if not'path'in kw:
- if not self.env.PKGCONFIG:
- self.find_program('pkg-config',var='PKGCONFIG')
- kw['path']=self.env.PKGCONFIG
- if'atleast_pkgconfig_version'in kw:
- if not'msg'in kw:
- kw['msg']='Checking for pkg-config version >= %r'%kw['atleast_pkgconfig_version']
- return
- if not'okmsg'in kw:
- kw['okmsg']='yes'
- if not'errmsg'in kw:
- kw['errmsg']='not found'
- if'modversion'in kw:
- if not'msg'in kw:
- kw['msg']='Checking for %r version'%kw['modversion']
- return
- for x in cfg_ver.keys():
- y=x.replace('-','_')
- if y in kw:
- if not'package'in kw:
- raise ValueError('%s requires a package'%x)
- if not'msg'in kw:
- kw['msg']='Checking for %r %s %s'%(kw['package'],cfg_ver[x],kw[y])
- return
- if not'msg'in kw:
- kw['msg']='Checking for %r'%(kw['package']or kw['path'])
- at conf
-def exec_cfg(self,kw):
- def define_it():
- self.define(self.have_define(kw.get('uselib_store',kw['package'])),1,0)
- if'atleast_pkgconfig_version'in kw:
- cmd=[kw['path'],'--atleast-pkgconfig-version=%s'%kw['atleast_pkgconfig_version']]
- self.cmd_and_log(cmd)
- if not'okmsg'in kw:
- kw['okmsg']='yes'
- return
- for x in cfg_ver:
- y=x.replace('-','_')
- if y in kw:
- self.cmd_and_log([kw['path'],'--%s=%s'%(x,kw[y]),kw['package']])
- if not'okmsg'in kw:
- kw['okmsg']='yes'
- define_it()
- break
- if'modversion'in kw:
- version=self.cmd_and_log([kw['path'],'--modversion',kw['modversion']]).strip()
- self.define('%s_VERSION'%Utils.quote_define_name(kw.get('uselib_store',kw['modversion'])),version)
- return version
- lst=[kw['path']]
- defi=kw.get('define_variable',None)
- if not defi:
- defi=self.env.PKG_CONFIG_DEFINES or{}
- for key,val in defi.items():
- lst.append('--define-variable=%s=%s'%(key,val))
- if'variables'in kw:
- env=kw.get('env',self.env)
- uselib=kw.get('uselib_store',kw['package'].upper())
- vars=Utils.to_list(kw['variables'])
- for v in vars:
- val=self.cmd_and_log(lst+['--variable='+v]).strip()
- var='%s_%s'%(uselib,v)
- env[var]=val
- if not'okmsg'in kw:
- kw['okmsg']='yes'
- return
- static=False
- if'args'in kw:
- args=Utils.to_list(kw['args'])
- if'--static'in args or'--static-libs'in args:
- static=True
- lst+=args
- lst.extend(Utils.to_list(kw['package']))
- ret=self.cmd_and_log(lst)
- if not'okmsg'in kw:
- kw['okmsg']='yes'
- define_it()
- self.parse_flags(ret,kw.get('uselib_store',kw['package'].upper()),kw.get('env',self.env),force_static=static)
- return ret
- at conf
-def check_cfg(self,*k,**kw):
- if k:
- lst=k[0].split()
- kw['package']=lst[0]
- kw['args']=' '.join(lst[1:])
- self.validate_cfg(kw)
- if'msg'in kw:
- self.start_msg(kw['msg'])
- ret=None
- try:
- ret=self.exec_cfg(kw)
- except self.errors.WafError:
- if'errmsg'in kw:
- self.end_msg(kw['errmsg'],'YELLOW')
- if Logs.verbose>1:
- raise
- else:
- self.fatal('The configuration failed')
- else:
- kw['success']=ret
- if'okmsg'in kw:
- self.end_msg(self.ret_msg(kw['okmsg'],kw))
- return ret
- at conf
-def validate_c(self,kw):
- if not'env'in kw:
- kw['env']=self.env.derive()
- env=kw['env']
- if not'compiler'in kw and not'features'in kw:
- kw['compiler']='c'
- if env['CXX_NAME']and Task.classes.get('cxx',None):
- kw['compiler']='cxx'
- if not self.env['CXX']:
- self.fatal('a c++ compiler is required')
- else:
- if not self.env['CC']:
- self.fatal('a c compiler is required')
- if not'compile_mode'in kw:
- kw['compile_mode']='c'
- if'cxx'in Utils.to_list(kw.get('features',[]))or kw.get('compiler','')=='cxx':
- kw['compile_mode']='cxx'
- if not'type'in kw:
- kw['type']='cprogram'
- if not'features'in kw:
- kw['features']=[kw['compile_mode'],kw['type']]
- else:
- kw['features']=Utils.to_list(kw['features'])
- if not'compile_filename'in kw:
- kw['compile_filename']='test.c'+((kw['compile_mode']=='cxx')and'pp'or'')
- def to_header(dct):
- if'header_name'in dct:
- dct=Utils.to_list(dct['header_name'])
- return''.join(['#include <%s>\n'%x for x in dct])
- return''
- if'framework_name'in kw:
- fwkname=kw['framework_name']
- if not'uselib_store'in kw:
- kw['uselib_store']=fwkname.upper()
- if not kw.get('no_header',False):
- if not'header_name'in kw:
- kw['header_name']=[]
- fwk='%s/%s.h'%(fwkname,fwkname)
- if kw.get('remove_dot_h',None):
- fwk=fwk[:-2]
- kw['header_name']=Utils.to_list(kw['header_name'])+[fwk]
- kw['msg']='Checking for framework %s'%fwkname
- kw['framework']=fwkname
- if'function_name'in kw:
- fu=kw['function_name']
- if not'msg'in kw:
- kw['msg']='Checking for function %s'%fu
- kw['code']=to_header(kw)+SNIP_FUNCTION%fu
- if not'uselib_store'in kw:
- kw['uselib_store']=fu.upper()
- if not'define_name'in kw:
- kw['define_name']=self.have_define(fu)
- elif'type_name'in kw:
- tu=kw['type_name']
- if not'header_name'in kw:
- kw['header_name']='stdint.h'
- if'field_name'in kw:
- field=kw['field_name']
- kw['code']=to_header(kw)+SNIP_FIELD%{'type_name':tu,'field_name':field}
- if not'msg'in kw:
- kw['msg']='Checking for field %s in %s'%(field,tu)
- if not'define_name'in kw:
- kw['define_name']=self.have_define((tu+'_'+field).upper())
- else:
- kw['code']=to_header(kw)+SNIP_TYPE%{'type_name':tu}
- if not'msg'in kw:
- kw['msg']='Checking for type %s'%tu
- if not'define_name'in kw:
- kw['define_name']=self.have_define(tu.upper())
- elif'header_name'in kw:
- if not'msg'in kw:
- kw['msg']='Checking for header %s'%kw['header_name']
- l=Utils.to_list(kw['header_name'])
- assert len(l)>0,'list of headers in header_name is empty'
- kw['code']=to_header(kw)+SNIP_EMPTY_PROGRAM
- if not'uselib_store'in kw:
- kw['uselib_store']=l[0].upper()
- if not'define_name'in kw:
- kw['define_name']=self.have_define(l[0])
- if'lib'in kw:
- if not'msg'in kw:
- kw['msg']='Checking for library %s'%kw['lib']
- if not'uselib_store'in kw:
- kw['uselib_store']=kw['lib'].upper()
- if'stlib'in kw:
- if not'msg'in kw:
- kw['msg']='Checking for static library %s'%kw['stlib']
- if not'uselib_store'in kw:
- kw['uselib_store']=kw['stlib'].upper()
- if'fragment'in kw:
- kw['code']=kw['fragment']
- if not'msg'in kw:
- kw['msg']='Checking for code snippet'
- if not'errmsg'in kw:
- kw['errmsg']='no'
- for(flagsname,flagstype)in[('cxxflags','compiler'),('cflags','compiler'),('linkflags','linker')]:
- if flagsname in kw:
- if not'msg'in kw:
- kw['msg']='Checking for %s flags %s'%(flagstype,kw[flagsname])
- if not'errmsg'in kw:
- kw['errmsg']='no'
- if not'execute'in kw:
- kw['execute']=False
- if kw['execute']:
- kw['features'].append('test_exec')
- if not'errmsg'in kw:
- kw['errmsg']='not found'
- if not'okmsg'in kw:
- kw['okmsg']='yes'
- if not'code'in kw:
- kw['code']=SNIP_EMPTY_PROGRAM
- if self.env[INCKEYS]:
- kw['code']='\n'.join(['#include <%s>'%x for x in self.env[INCKEYS]])+'\n'+kw['code']
- if not kw.get('success'):kw['success']=None
- if'define_name'in kw:
- self.undefine(kw['define_name'])
- assert'msg'in kw,'invalid parameters, read http://freehackers.org/~tnagy/wafbook/single.html#config_helpers_c'
- at conf
-def post_check(self,*k,**kw):
- is_success=0
- if kw['execute']:
- if kw['success']is not None:
- if kw.get('define_ret',False):
- is_success=kw['success']
- else:
- is_success=(kw['success']==0)
- else:
- is_success=(kw['success']==0)
- if'define_name'in kw:
- if'header_name'in kw or'function_name'in kw or'type_name'in kw or'fragment'in kw:
- if kw['execute']and kw.get('define_ret',None)and isinstance(is_success,str):
- self.define(kw['define_name'],is_success,quote=kw.get('quote',1))
- else:
- self.define_cond(kw['define_name'],is_success)
- else:
- self.define_cond(kw['define_name'],is_success)
- if'header_name'in kw:
- if kw.get('auto_add_header_name',False):
- self.env.append_value(INCKEYS,Utils.to_list(kw['header_name']))
- if is_success and'uselib_store'in kw:
- from waflib.Tools import ccroot
- _vars=set([])
- for x in kw['features']:
- if x in ccroot.USELIB_VARS:
- _vars|=ccroot.USELIB_VARS[x]
- for k in _vars:
- lk=k.lower()
- if k=='INCLUDES':lk='includes'
- if k=='DEFINES':lk='defines'
- if lk in kw:
- val=kw[lk]
- if isinstance(val,str):
- val=val.rstrip(os.path.sep)
- self.env.append_unique(k+'_'+kw['uselib_store'],val)
- return is_success
- at conf
-def check(self,*k,**kw):
- self.validate_c(kw)
- self.start_msg(kw['msg'])
- ret=None
- try:
- ret=self.run_c_code(*k,**kw)
- except self.errors.ConfigurationError:
- self.end_msg(kw['errmsg'],'YELLOW')
- if Logs.verbose>1:
- raise
- else:
- self.fatal('The configuration failed')
- else:
- kw['success']=ret
- ret=self.post_check(*k,**kw)
- if not ret:
- self.end_msg(kw['errmsg'],'YELLOW')
- self.fatal('The configuration failed %r'%ret)
- else:
- self.end_msg(self.ret_msg(kw['okmsg'],kw))
- return ret
-class test_exec(Task.Task):
- color='PINK'
- def run(self):
- if getattr(self.generator,'rpath',None):
- if getattr(self.generator,'define_ret',False):
- self.generator.bld.retval=self.generator.bld.cmd_and_log([self.inputs[0].abspath()])
- else:
- self.generator.bld.retval=self.generator.bld.exec_command([self.inputs[0].abspath()])
- else:
- env=self.env.env or{}
- env.update(dict(os.environ))
- for var in('LD_LIBRARY_PATH','DYLD_LIBRARY_PATH','PATH'):
- env[var]=self.inputs[0].parent.abspath()+os.path.pathsep+env.get(var,'')
- if getattr(self.generator,'define_ret',False):
- self.generator.bld.retval=self.generator.bld.cmd_and_log([self.inputs[0].abspath()],env=env)
- else:
- self.generator.bld.retval=self.generator.bld.exec_command([self.inputs[0].abspath()],env=env)
- at feature('test_exec')
- at after_method('apply_link')
-def test_exec_fun(self):
- self.create_task('test_exec',self.link_task.outputs[0])
-CACHE_RESULTS=1
-COMPILE_ERRORS=2
- at conf
-def run_c_code(self,*k,**kw):
- lst=[str(v)for(p,v)in kw.items()if p!='env']
- h=Utils.h_list(lst)
- dir=self.bldnode.abspath()+os.sep+(not Utils.is_win32 and'.'or'')+'conf_check_'+Utils.to_hex(h)
- try:
- os.makedirs(dir)
- except OSError:
- pass
- try:
- os.stat(dir)
- except OSError:
- self.fatal('cannot use the configuration test folder %r'%dir)
- cachemode=getattr(Options.options,'confcache',None)
- if cachemode==CACHE_RESULTS:
- try:
- proj=ConfigSet.ConfigSet(os.path.join(dir,'cache_run_c_code'))
- except OSError:
- pass
- else:
- ret=proj['cache_run_c_code']
- if isinstance(ret,str)and ret.startswith('Test does not build'):
- self.fatal(ret)
- return ret
- bdir=os.path.join(dir,'testbuild')
- if not os.path.exists(bdir):
- os.makedirs(bdir)
- self.test_bld=bld=Build.BuildContext(top_dir=dir,out_dir=bdir)
- bld.init_dirs()
- bld.progress_bar=0
- bld.targets='*'
- if kw['compile_filename']:
- node=bld.srcnode.make_node(kw['compile_filename'])
- node.write(kw['code'])
- bld.logger=self.logger
- bld.all_envs.update(self.all_envs)
- bld.env=kw['env']
- o=bld(features=kw['features'],source=kw['compile_filename'],target='testprog')
- for k,v in kw.items():
- setattr(o,k,v)
- self.to_log("==>\n%s\n<=="%kw['code'])
- bld.targets='*'
- ret=-1
- try:
- try:
- bld.compile()
- except Errors.WafError:
- ret='Test does not build: %s'%Utils.ex_stack()
- self.fatal(ret)
- else:
- ret=getattr(bld,'retval',0)
- finally:
- proj=ConfigSet.ConfigSet()
- proj['cache_run_c_code']=ret
- proj.store(os.path.join(dir,'cache_run_c_code'))
- return ret
- at conf
-def check_cxx(self,*k,**kw):
- kw['compiler']='cxx'
- return self.check(*k,**kw)
- at conf
-def check_cc(self,*k,**kw):
- kw['compiler']='c'
- return self.check(*k,**kw)
- at conf
-def define(self,key,val,quote=True):
- assert key and isinstance(key,str)
- if val is True:
- val=1
- elif val in(False,None):
- val=0
- if isinstance(val,int)or isinstance(val,float):
- s='%s=%s'
- else:
- s=quote and'%s="%s"'or'%s=%s'
- app=s%(key,str(val))
- ban=key+'='
- lst=self.env['DEFINES']
- for x in lst:
- if x.startswith(ban):
- lst[lst.index(x)]=app
- break
- else:
- self.env.append_value('DEFINES',app)
- self.env.append_unique(DEFKEYS,key)
- at conf
-def undefine(self,key):
- assert key and isinstance(key,str)
- ban=key+'='
- lst=[x for x in self.env['DEFINES']if not x.startswith(ban)]
- self.env['DEFINES']=lst
- self.env.append_unique(DEFKEYS,key)
- at conf
-def define_cond(self,key,val):
- assert key and isinstance(key,str)
- if val:
- self.define(key,1)
- else:
- self.undefine(key)
- at conf
-def is_defined(self,key):
- assert key and isinstance(key,str)
- ban=key+'='
- for x in self.env['DEFINES']:
- if x.startswith(ban):
- return True
- return False
- at conf
-def get_define(self,key):
- assert key and isinstance(key,str)
- ban=key+'='
- for x in self.env['DEFINES']:
- if x.startswith(ban):
- return x[len(ban):]
- return None
- at conf
-def have_define(self,key):
- return(self.env.HAVE_PAT or'HAVE_%s')%Utils.quote_define_name(key)
- at conf
-def write_config_header(self,configfile='',guard='',top=False,env=None,defines=True,headers=False,remove=True,define_prefix=''):
- if env:
- Logs.warn('Cannot pass env to write_config_header')
- if not configfile:configfile=WAF_CONFIG_H
- waf_guard=guard or'W_%s_WAF'%Utils.quote_define_name(configfile)
- node=top and self.bldnode or self.path.get_bld()
- node=node.make_node(configfile)
- node.parent.mkdir()
- lst=['/* WARNING! All changes made to this file will be lost! */\n']
- lst.append('#ifndef %s\n#define %s\n'%(waf_guard,waf_guard))
- lst.append(self.get_config_header(defines,headers,define_prefix=define_prefix))
- lst.append('\n#endif /* %s */\n'%waf_guard)
- node.write('\n'.join(lst))
- self.env.append_unique(Build.CFG_FILES,[node.abspath()])
- if remove:
- for key in self.env[DEFKEYS]:
- self.undefine(key)
- self.env[DEFKEYS]=[]
- at conf
-def get_config_header(self,defines=True,headers=False,define_prefix=''):
- lst=[]
- if headers:
- for x in self.env[INCKEYS]:
- lst.append('#include <%s>'%x)
- if defines:
- for x in self.env[DEFKEYS]:
- if self.is_defined(x):
- val=self.get_define(x)
- lst.append('#define %s %s'%(define_prefix+x,val))
- else:
- lst.append('/* #undef %s */'%(define_prefix+x))
- return"\n".join(lst)
- at conf
-def cc_add_flags(conf):
- conf.add_os_flags('CPPFLAGS','CFLAGS')
- conf.add_os_flags('CFLAGS')
- at conf
-def cxx_add_flags(conf):
- conf.add_os_flags('CPPFLAGS','CXXFLAGS')
- conf.add_os_flags('CXXFLAGS')
- at conf
-def link_add_flags(conf):
- conf.add_os_flags('LINKFLAGS')
- conf.add_os_flags('LDFLAGS','LINKFLAGS')
- at conf
-def cc_load_tools(conf):
- if not conf.env.DEST_OS:
- conf.env.DEST_OS=Utils.unversioned_sys_platform()
- conf.load('c')
- at conf
-def cxx_load_tools(conf):
- if not conf.env.DEST_OS:
- conf.env.DEST_OS=Utils.unversioned_sys_platform()
- conf.load('cxx')
- at conf
-def get_cc_version(conf,cc,gcc=False,icc=False):
- cmd=cc+['-dM','-E','-']
- env=conf.env.env or None
- try:
- p=Utils.subprocess.Popen(cmd,stdin=Utils.subprocess.PIPE,stdout=Utils.subprocess.PIPE,stderr=Utils.subprocess.PIPE,env=env)
- p.stdin.write('\n')
- out=p.communicate()[0]
- except Exception:
- conf.fatal('Could not determine the compiler version %r'%cmd)
- if not isinstance(out,str):
- out=out.decode(sys.stdout.encoding or'iso8859-1')
- if gcc:
- if out.find('__INTEL_COMPILER')>=0:
- conf.fatal('The intel compiler pretends to be gcc')
- if out.find('__GNUC__')<0:
- conf.fatal('Could not determine the compiler type')
- if icc and out.find('__INTEL_COMPILER')<0:
- conf.fatal('Not icc/icpc')
- k={}
- if icc or gcc:
- out=out.splitlines()
- for line in out:
- lst=shlex.split(line)
- if len(lst)>2:
- key=lst[1]
- val=lst[2]
- k[key]=val
- def isD(var):
- return var in k
- def isT(var):
- return var in k and k[var]!='0'
- if not conf.env.DEST_OS:
- conf.env.DEST_OS=''
- for i in MACRO_TO_DESTOS:
- if isD(i):
- conf.env.DEST_OS=MACRO_TO_DESTOS[i]
- break
- else:
- if isD('__APPLE__')and isD('__MACH__'):
- conf.env.DEST_OS='darwin'
- elif isD('__unix__'):
- conf.env.DEST_OS='generic'
- if isD('__ELF__'):
- conf.env.DEST_BINFMT='elf'
- elif isD('__WINNT__')or isD('__CYGWIN__'):
- conf.env.DEST_BINFMT='pe'
- conf.env.LIBDIR=conf.env['PREFIX']+'/bin'
- elif isD('__APPLE__'):
- conf.env.DEST_BINFMT='mac-o'
- if not conf.env.DEST_BINFMT:
- conf.env.DEST_BINFMT=Utils.destos_to_binfmt(conf.env.DEST_OS)
- for i in MACRO_TO_DEST_CPU:
- if isD(i):
- conf.env.DEST_CPU=MACRO_TO_DEST_CPU[i]
- break
- Logs.debug('ccroot: dest platform: '+' '.join([conf.env[x]or'?'for x in('DEST_OS','DEST_BINFMT','DEST_CPU')]))
- if icc:
- ver=k['__INTEL_COMPILER']
- conf.env['CC_VERSION']=(ver[:-2],ver[-2],ver[-1])
- else:
- if isD('__clang__'):
- conf.env['CC_VERSION']=(k['__clang_major__'],k['__clang_minor__'],k['__clang_patchlevel__'])
- else:
- conf.env['CC_VERSION']=(k['__GNUC__'],k['__GNUC_MINOR__'],k['__GNUC_PATCHLEVEL__'])
- return k
- at conf
-def get_xlc_version(conf,cc):
- cmd=cc+['-qversion']
- try:
- out,err=conf.cmd_and_log(cmd,output=0)
- except Errors.WafError:
- conf.fatal('Could not find xlc %r'%cmd)
- for v in(r"IBM XL C/C\+\+.* V(?P<major>\d*)\.(?P<minor>\d*)",):
- version_re=re.compile(v,re.I).search
- match=version_re(out or err)
- if match:
- k=match.groupdict()
- conf.env['CC_VERSION']=(k['major'],k['minor'])
- break
- else:
- conf.fatal('Could not determine the XLC version.')
- at conf
-def add_as_needed(self):
- if self.env.DEST_BINFMT=='elf'and'gcc'in(self.env.CXX_NAME,self.env.CC_NAME):
- self.env.append_unique('LINKFLAGS','--as-needed')
-class cfgtask(Task.TaskBase):
- def display(self):
- return''
- def runnable_status(self):
- return Task.RUN_ME
- def uid(self):
- return Utils.SIG_NIL
- def run(self):
- conf=self.conf
- bld=Build.BuildContext(top_dir=conf.srcnode.abspath(),out_dir=conf.bldnode.abspath())
- bld.env=conf.env
- bld.init_dirs()
- bld.in_msg=1
- bld.logger=self.logger
- try:
- bld.check(**self.args)
- except Exception:
- return 1
- at conf
-def multicheck(self,*k,**kw):
- self.start_msg(kw.get('msg','Executing %d configuration tests'%len(k)))
- class par(object):
- def __init__(self):
- self.keep=False
- self.cache_global=Options.cache_global
- self.nocache=Options.options.nocache
- self.returned_tasks=[]
- self.task_sigs={}
- def total(self):
- return len(tasks)
- def to_log(self,*k,**kw):
- return
- bld=par()
- tasks=[]
- for dct in k:
- x=cfgtask(bld=bld)
- tasks.append(x)
- x.args=dct
- x.bld=bld
- x.conf=self
- x.args=dct
- x.logger=Logs.make_mem_logger(str(id(x)),self.logger)
- def it():
- yield tasks
- while 1:
- yield[]
- p=Runner.Parallel(bld,Options.options.jobs)
- p.biter=it()
- p.start()
- for x in tasks:
- x.logger.memhandler.flush()
- for x in tasks:
- if x.hasrun!=Task.SUCCESS:
- self.end_msg(kw.get('errmsg','no'),color='YELLOW')
- self.fatal(kw.get('fatalmsg',None)or'One of the tests has failed, see the config.log for more information')
- self.end_msg('ok')
diff --git a/waflib/Tools/c_osx.py b/waflib/Tools/c_osx.py
deleted file mode 100644
index 579b2a7..0000000
--- a/waflib/Tools/c_osx.py
+++ /dev/null
@@ -1,120 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import os,shutil,sys,platform
-from waflib import TaskGen,Task,Build,Options,Utils,Errors
-from waflib.TaskGen import taskgen_method,feature,after_method,before_method
-app_info='''
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE plist SYSTEM "file://localhost/System/Library/DTDs/PropertyList.dtd">
-<plist version="0.9">
-<dict>
- <key>CFBundlePackageType</key>
- <string>APPL</string>
- <key>CFBundleGetInfoString</key>
- <string>Created by Waf</string>
- <key>CFBundleSignature</key>
- <string>????</string>
- <key>NOTE</key>
- <string>THIS IS A GENERATED FILE, DO NOT MODIFY</string>
- <key>CFBundleExecutable</key>
- <string>%s</string>
-</dict>
-</plist>
-'''
- at feature('c','cxx')
-def set_macosx_deployment_target(self):
- if self.env['MACOSX_DEPLOYMENT_TARGET']:
- os.environ['MACOSX_DEPLOYMENT_TARGET']=self.env['MACOSX_DEPLOYMENT_TARGET']
- elif'MACOSX_DEPLOYMENT_TARGET'not in os.environ:
- if Utils.unversioned_sys_platform()=='darwin':
- os.environ['MACOSX_DEPLOYMENT_TARGET']='.'.join(platform.mac_ver()[0].split('.')[:2])
- at taskgen_method
-def create_bundle_dirs(self,name,out):
- bld=self.bld
- dir=out.parent.find_or_declare(name)
- dir.mkdir()
- macos=dir.find_or_declare(['Contents','MacOS'])
- macos.mkdir()
- return dir
-def bundle_name_for_output(out):
- name=out.name
- k=name.rfind('.')
- if k>=0:
- name=name[:k]+'.app'
- else:
- name=name+'.app'
- return name
- at feature('cprogram','cxxprogram')
- at after_method('apply_link')
-def create_task_macapp(self):
- if self.env['MACAPP']or getattr(self,'mac_app',False):
- out=self.link_task.outputs[0]
- name=bundle_name_for_output(out)
- dir=self.create_bundle_dirs(name,out)
- n1=dir.find_or_declare(['Contents','MacOS',out.name])
- self.apptask=self.create_task('macapp',self.link_task.outputs,n1)
- inst_to=getattr(self,'install_path','/Applications')+'/%s/Contents/MacOS/'%name
- self.bld.install_files(inst_to,n1,chmod=Utils.O755)
- if getattr(self,'mac_resources',None):
- res_dir=n1.parent.parent.make_node('Resources')
- inst_to=getattr(self,'install_path','/Applications')+'/%s/Resources'%name
- for x in self.to_list(self.mac_resources):
- node=self.path.find_node(x)
- if not node:
- raise Errors.WafError('Missing mac_resource %r in %r'%(x,self))
- parent=node.parent
- if os.path.isdir(node.abspath()):
- nodes=node.ant_glob('**')
- else:
- nodes=[node]
- for node in nodes:
- rel=node.path_from(parent)
- tsk=self.create_task('macapp',node,res_dir.make_node(rel))
- self.bld.install_as(inst_to+'/%s'%rel,node)
- if getattr(self.bld,'is_install',None):
- self.install_task.hasrun=Task.SKIP_ME
- at feature('cprogram','cxxprogram')
- at after_method('apply_link')
-def create_task_macplist(self):
- if self.env['MACAPP']or getattr(self,'mac_app',False):
- out=self.link_task.outputs[0]
- name=bundle_name_for_output(out)
- dir=self.create_bundle_dirs(name,out)
- n1=dir.find_or_declare(['Contents','Info.plist'])
- self.plisttask=plisttask=self.create_task('macplist',[],n1)
- if getattr(self,'mac_plist',False):
- node=self.path.find_resource(self.mac_plist)
- if node:
- plisttask.inputs.append(node)
- else:
- plisttask.code=self.mac_plist
- else:
- plisttask.code=app_info%self.link_task.outputs[0].name
- inst_to=getattr(self,'install_path','/Applications')+'/%s/Contents/'%name
- self.bld.install_files(inst_to,n1)
- at feature('cshlib','cxxshlib')
- at before_method('apply_link','propagate_uselib_vars')
-def apply_bundle(self):
- if self.env['MACBUNDLE']or getattr(self,'mac_bundle',False):
- self.env['LINKFLAGS_cshlib']=self.env['LINKFLAGS_cxxshlib']=[]
- self.env['cshlib_PATTERN']=self.env['cxxshlib_PATTERN']=self.env['macbundle_PATTERN']
- use=self.use=self.to_list(getattr(self,'use',[]))
- if not'MACBUNDLE'in use:
- use.append('MACBUNDLE')
-app_dirs=['Contents','Contents/MacOS','Contents/Resources']
-class macapp(Task.Task):
- color='PINK'
- def run(self):
- self.outputs[0].parent.mkdir()
- shutil.copy2(self.inputs[0].srcpath(),self.outputs[0].abspath())
-class macplist(Task.Task):
- color='PINK'
- ext_in=['.bin']
- def run(self):
- if getattr(self,'code',None):
- txt=self.code
- else:
- txt=self.inputs[0].read()
- self.outputs[0].write(txt)
diff --git a/waflib/Tools/c_preproc.py b/waflib/Tools/c_preproc.py
deleted file mode 100644
index 9dfd8ca..0000000
--- a/waflib/Tools/c_preproc.py
+++ /dev/null
@@ -1,604 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import re,string,traceback
-from waflib import Logs,Utils,Errors
-from waflib.Logs import debug,error
-class PreprocError(Errors.WafError):
- pass
-POPFILE='-'
-recursion_limit=150
-go_absolute=False
-standard_includes=['/usr/include']
-if Utils.is_win32:
- standard_includes=[]
-use_trigraphs=0
-strict_quotes=0
-g_optrans={'not':'!','and':'&&','bitand':'&','and_eq':'&=','or':'||','bitor':'|','or_eq':'|=','xor':'^','xor_eq':'^=','compl':'~',}
-re_lines=re.compile('^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*)\r*$',re.IGNORECASE|re.MULTILINE)
-re_mac=re.compile("^[a-zA-Z_]\w*")
-re_fun=re.compile('^[a-zA-Z_][a-zA-Z0-9_]*[(]')
-re_pragma_once=re.compile('^\s*once\s*',re.IGNORECASE)
-re_nl=re.compile('\\\\\r*\n',re.MULTILINE)
-re_cpp=re.compile(r"""(/\*[^*]*\*+([^/*][^*]*\*+)*/)|//[^\n]*|("(\\.|[^"\\])*"|'(\\.|[^'\\])*'|.[^/"'\\]*)""",re.MULTILINE)
-trig_def=[('??'+a,b)for a,b in zip("=-/!'()<>",r'#~\|^[]{}')]
-chr_esc={'0':0,'a':7,'b':8,'t':9,'n':10,'f':11,'v':12,'r':13,'\\':92,"'":39}
-NUM='i'
-OP='O'
-IDENT='T'
-STR='s'
-CHAR='c'
-tok_types=[NUM,STR,IDENT,OP]
-exp_types=[r"""0[xX](?P<hex>[a-fA-F0-9]+)(?P<qual1>[uUlL]*)|L*?'(?P<char>(\\.|[^\\'])+)'|(?P<n1>\d+)[Ee](?P<exp0>[+-]*?\d+)(?P<float0>[fFlL]*)|(?P<n2>\d*\.\d+)([Ee](?P<exp1>[+-]*?\d+))?(?P<float1>[fFlL]*)|(?P<n4>\d+\.\d*)([Ee](?P<exp2>[+-]*?\d+))?(?P<float2>[fFlL]*)|(?P<oct>0*)(?P<n0>\d+)(?P<qual2>[uUlL]*)""",r'L?"([^"\\]|\\.)*"',r'[a-zA-Z_]\w*',r'%:%:|<<=|>>=|\.\.\.|<<|<%|<:|<=|>>|>=|\+\+|\+=|--|->|-=|\*=|/=|%:|%=|%>|==|&&|&=|\|\||\|=|\^=|:>|!=|##|[\(\)\{\}\[\]<>\?\|\^\*\+&=:!#;,%/\-\?\~\.]',]
-re_clexer=re.compile('|'.join(["(?P<%s>%s)"%(name,part)for name,part in zip(tok_types,exp_types)]),re.M)
-accepted='a'
-ignored='i'
-undefined='u'
-skipped='s'
-def repl(m):
- s=m.group(1)
- if s:
- return' '
- return m.group(3)or''
-def filter_comments(filename):
- code=Utils.readf(filename)
- if use_trigraphs:
- for(a,b)in trig_def:code=code.split(a).join(b)
- code=re_nl.sub('',code)
- code=re_cpp.sub(repl,code)
- return[(m.group(2),m.group(3))for m in re.finditer(re_lines,code)]
-prec={}
-ops=['* / %','+ -','<< >>','< <= >= >','== !=','& | ^','&& ||',',']
-for x in range(len(ops)):
- syms=ops[x]
- for u in syms.split():
- prec[u]=x
-def trimquotes(s):
- if not s:return''
- s=s.rstrip()
- if s[0]=="'"and s[-1]=="'":return s[1:-1]
- return s
-def reduce_nums(val_1,val_2,val_op):
- try:a=0+val_1
- except TypeError:a=int(val_1)
- try:b=0+val_2
- except TypeError:b=int(val_2)
- d=val_op
- if d=='%':c=a%b
- elif d=='+':c=a+b
- elif d=='-':c=a-b
- elif d=='*':c=a*b
- elif d=='/':c=a/b
- elif d=='^':c=a^b
- elif d=='|':c=a|b
- elif d=='||':c=int(a or b)
- elif d=='&':c=a&b
- elif d=='&&':c=int(a and b)
- elif d=='==':c=int(a==b)
- elif d=='!=':c=int(a!=b)
- elif d=='<=':c=int(a<=b)
- elif d=='<':c=int(a<b)
- elif d=='>':c=int(a>b)
- elif d=='>=':c=int(a>=b)
- elif d=='^':c=int(a^b)
- elif d=='<<':c=a<<b
- elif d=='>>':c=a>>b
- else:c=0
- return c
-def get_num(lst):
- if not lst:raise PreprocError("empty list for get_num")
- (p,v)=lst[0]
- if p==OP:
- if v=='(':
- count_par=1
- i=1
- while i<len(lst):
- (p,v)=lst[i]
- if p==OP:
- if v==')':
- count_par-=1
- if count_par==0:
- break
- elif v=='(':
- count_par+=1
- i+=1
- else:
- raise PreprocError("rparen expected %r"%lst)
- (num,_)=get_term(lst[1:i])
- return(num,lst[i+1:])
- elif v=='+':
- return get_num(lst[1:])
- elif v=='-':
- num,lst=get_num(lst[1:])
- return(reduce_nums('-1',num,'*'),lst)
- elif v=='!':
- num,lst=get_num(lst[1:])
- return(int(not int(num)),lst)
- elif v=='~':
- num,lst=get_num(lst[1:])
- return(~int(num),lst)
- else:
- raise PreprocError("Invalid op token %r for get_num"%lst)
- elif p==NUM:
- return v,lst[1:]
- elif p==IDENT:
- return 0,lst[1:]
- else:
- raise PreprocError("Invalid token %r for get_num"%lst)
-def get_term(lst):
- if not lst:raise PreprocError("empty list for get_term")
- num,lst=get_num(lst)
- if not lst:
- return(num,[])
- (p,v)=lst[0]
- if p==OP:
- if v==',':
- return get_term(lst[1:])
- elif v=='?':
- count_par=0
- i=1
- while i<len(lst):
- (p,v)=lst[i]
- if p==OP:
- if v==')':
- count_par-=1
- elif v=='(':
- count_par+=1
- elif v==':':
- if count_par==0:
- break
- i+=1
- else:
- raise PreprocError("rparen expected %r"%lst)
- if int(num):
- return get_term(lst[1:i])
- else:
- return get_term(lst[i+1:])
- else:
- num2,lst=get_num(lst[1:])
- if not lst:
- num2=reduce_nums(num,num2,v)
- return get_term([(NUM,num2)]+lst)
- p2,v2=lst[0]
- if p2!=OP:
- raise PreprocError("op expected %r"%lst)
- if prec[v2]>=prec[v]:
- num2=reduce_nums(num,num2,v)
- return get_term([(NUM,num2)]+lst)
- else:
- num3,lst=get_num(lst[1:])
- num3=reduce_nums(num2,num3,v2)
- return get_term([(NUM,num),(p,v),(NUM,num3)]+lst)
- raise PreprocError("cannot reduce %r"%lst)
-def reduce_eval(lst):
- num,lst=get_term(lst)
- return(NUM,num)
-def stringize(lst):
- lst=[str(v2)for(p2,v2)in lst]
- return"".join(lst)
-def paste_tokens(t1,t2):
- p1=None
- if t1[0]==OP and t2[0]==OP:
- p1=OP
- elif t1[0]==IDENT and(t2[0]==IDENT or t2[0]==NUM):
- p1=IDENT
- elif t1[0]==NUM and t2[0]==NUM:
- p1=NUM
- if not p1:
- raise PreprocError('tokens do not make a valid paste %r and %r'%(t1,t2))
- return(p1,t1[1]+t2[1])
-def reduce_tokens(lst,defs,ban=[]):
- i=0
- while i<len(lst):
- (p,v)=lst[i]
- if p==IDENT and v=="defined":
- del lst[i]
- if i<len(lst):
- (p2,v2)=lst[i]
- if p2==IDENT:
- if v2 in defs:
- lst[i]=(NUM,1)
- else:
- lst[i]=(NUM,0)
- elif p2==OP and v2=='(':
- del lst[i]
- (p2,v2)=lst[i]
- del lst[i]
- if v2 in defs:
- lst[i]=(NUM,1)
- else:
- lst[i]=(NUM,0)
- else:
- raise PreprocError("Invalid define expression %r"%lst)
- elif p==IDENT and v in defs:
- if isinstance(defs[v],str):
- a,b=extract_macro(defs[v])
- defs[v]=b
- macro_def=defs[v]
- to_add=macro_def[1]
- if isinstance(macro_def[0],list):
- del lst[i]
- accu=to_add[:]
- reduce_tokens(accu,defs,ban+[v])
- for x in range(len(accu)):
- lst.insert(i,accu[x])
- i+=1
- else:
- args=[]
- del lst[i]
- if i>=len(lst):
- raise PreprocError("expected '(' after %r (got nothing)"%v)
- (p2,v2)=lst[i]
- if p2!=OP or v2!='(':
- raise PreprocError("expected '(' after %r"%v)
- del lst[i]
- one_param=[]
- count_paren=0
- while i<len(lst):
- p2,v2=lst[i]
- del lst[i]
- if p2==OP and count_paren==0:
- if v2=='(':
- one_param.append((p2,v2))
- count_paren+=1
- elif v2==')':
- if one_param:args.append(one_param)
- break
- elif v2==',':
- if not one_param:raise PreprocError("empty param in funcall %s"%p)
- args.append(one_param)
- one_param=[]
- else:
- one_param.append((p2,v2))
- else:
- one_param.append((p2,v2))
- if v2=='(':count_paren+=1
- elif v2==')':count_paren-=1
- else:
- raise PreprocError('malformed macro')
- accu=[]
- arg_table=macro_def[0]
- j=0
- while j<len(to_add):
- (p2,v2)=to_add[j]
- if p2==OP and v2=='#':
- if j+1<len(to_add)and to_add[j+1][0]==IDENT and to_add[j+1][1]in arg_table:
- toks=args[arg_table[to_add[j+1][1]]]
- accu.append((STR,stringize(toks)))
- j+=1
- else:
- accu.append((p2,v2))
- elif p2==OP and v2=='##':
- if accu and j+1<len(to_add):
- t1=accu[-1]
- if to_add[j+1][0]==IDENT and to_add[j+1][1]in arg_table:
- toks=args[arg_table[to_add[j+1][1]]]
- if toks:
- accu[-1]=paste_tokens(t1,toks[0])
- accu.extend(toks[1:])
- else:
- accu.append((p2,v2))
- accu.extend(toks)
- elif to_add[j+1][0]==IDENT and to_add[j+1][1]=='__VA_ARGS__':
- va_toks=[]
- st=len(macro_def[0])
- pt=len(args)
- for x in args[pt-st+1:]:
- va_toks.extend(x)
- va_toks.append((OP,','))
- if va_toks:va_toks.pop()
- if len(accu)>1:
- (p3,v3)=accu[-1]
- (p4,v4)=accu[-2]
- if v3=='##':
- accu.pop()
- if v4==','and pt<st:
- accu.pop()
- accu+=va_toks
- else:
- accu[-1]=paste_tokens(t1,to_add[j+1])
- j+=1
- else:
- accu.append((p2,v2))
- elif p2==IDENT and v2 in arg_table:
- toks=args[arg_table[v2]]
- reduce_tokens(toks,defs,ban+[v])
- accu.extend(toks)
- else:
- accu.append((p2,v2))
- j+=1
- reduce_tokens(accu,defs,ban+[v])
- for x in range(len(accu)-1,-1,-1):
- lst.insert(i,accu[x])
- i+=1
-def eval_macro(lst,defs):
- reduce_tokens(lst,defs,[])
- if not lst:raise PreprocError("missing tokens to evaluate")
- (p,v)=reduce_eval(lst)
- return int(v)!=0
-def extract_macro(txt):
- t=tokenize(txt)
- if re_fun.search(txt):
- p,name=t[0]
- p,v=t[1]
- if p!=OP:raise PreprocError("expected open parenthesis")
- i=1
- pindex=0
- params={}
- prev='('
- while 1:
- i+=1
- p,v=t[i]
- if prev=='(':
- if p==IDENT:
- params[v]=pindex
- pindex+=1
- prev=p
- elif p==OP and v==')':
- break
- else:
- raise PreprocError("unexpected token (3)")
- elif prev==IDENT:
- if p==OP and v==',':
- prev=v
- elif p==OP and v==')':
- break
- else:
- raise PreprocError("comma or ... expected")
- elif prev==',':
- if p==IDENT:
- params[v]=pindex
- pindex+=1
- prev=p
- elif p==OP and v=='...':
- raise PreprocError("not implemented (1)")
- else:
- raise PreprocError("comma or ... expected (2)")
- elif prev=='...':
- raise PreprocError("not implemented (2)")
- else:
- raise PreprocError("unexpected else")
- return(name,[params,t[i+1:]])
- else:
- (p,v)=t[0]
- return(v,[[],t[1:]])
-re_include=re.compile('^\s*(<(?P<a>.*)>|"(?P<b>.*)")')
-def extract_include(txt,defs):
- m=re_include.search(txt)
- if m:
- if m.group('a'):return'<',m.group('a')
- if m.group('b'):return'"',m.group('b')
- toks=tokenize(txt)
- reduce_tokens(toks,defs,['waf_include'])
- if not toks:
- raise PreprocError("could not parse include %s"%txt)
- if len(toks)==1:
- if toks[0][0]==STR:
- return'"',toks[0][1]
- else:
- if toks[0][1]=='<'and toks[-1][1]=='>':
- return stringize(toks).lstrip('<').rstrip('>')
- raise PreprocError("could not parse include %s."%txt)
-def parse_char(txt):
- if not txt:raise PreprocError("attempted to parse a null char")
- if txt[0]!='\\':
- return ord(txt)
- c=txt[1]
- if c=='x':
- if len(txt)==4 and txt[3]in string.hexdigits:return int(txt[2:],16)
- return int(txt[2:],16)
- elif c.isdigit():
- if c=='0'and len(txt)==2:return 0
- for i in 3,2,1:
- if len(txt)>i and txt[1:1+i].isdigit():
- return(1+i,int(txt[1:1+i],8))
- else:
- try:return chr_esc[c]
- except KeyError:raise PreprocError("could not parse char literal '%s'"%txt)
-def tokenize(s):
- return tokenize_private(s)[:]
- at Utils.run_once
-def tokenize_private(s):
- ret=[]
- for match in re_clexer.finditer(s):
- m=match.group
- for name in tok_types:
- v=m(name)
- if v:
- if name==IDENT:
- try:v=g_optrans[v];name=OP
- except KeyError:
- if v.lower()=="true":
- v=1
- name=NUM
- elif v.lower()=="false":
- v=0
- name=NUM
- elif name==NUM:
- if m('oct'):v=int(v,8)
- elif m('hex'):v=int(m('hex'),16)
- elif m('n0'):v=m('n0')
- else:
- v=m('char')
- if v:v=parse_char(v)
- else:v=m('n2')or m('n4')
- elif name==OP:
- if v=='%:':v='#'
- elif v=='%:%:':v='##'
- elif name==STR:
- v=v[1:-1]
- ret.append((name,v))
- break
- return ret
- at Utils.run_once
-def define_name(line):
- return re_mac.match(line).group(0)
-class c_parser(object):
- def __init__(self,nodepaths=None,defines=None):
- self.lines=[]
- if defines is None:
- self.defs={}
- else:
- self.defs=dict(defines)
- self.state=[]
- self.count_files=0
- self.currentnode_stack=[]
- self.nodepaths=nodepaths or[]
- self.nodes=[]
- self.names=[]
- self.curfile=''
- self.ban_includes=set([])
- def cached_find_resource(self,node,filename):
- try:
- nd=node.ctx.cache_nd
- except AttributeError:
- nd=node.ctx.cache_nd={}
- tup=(node,filename)
- try:
- return nd[tup]
- except KeyError:
- ret=node.find_resource(filename)
- if ret:
- if getattr(ret,'children',None):
- ret=None
- elif ret.is_child_of(node.ctx.bldnode):
- tmp=node.ctx.srcnode.search_node(ret.path_from(node.ctx.bldnode))
- if tmp and getattr(tmp,'children',None):
- ret=None
- nd[tup]=ret
- return ret
- def tryfind(self,filename):
- self.curfile=filename
- found=self.cached_find_resource(self.currentnode_stack[-1],filename)
- for n in self.nodepaths:
- if found:
- break
- found=self.cached_find_resource(n,filename)
- if found:
- self.nodes.append(found)
- if filename[-4:]!='.moc':
- self.addlines(found)
- else:
- if not filename in self.names:
- self.names.append(filename)
- return found
- def addlines(self,node):
- self.currentnode_stack.append(node.parent)
- filepath=node.abspath()
- self.count_files+=1
- if self.count_files>recursion_limit:
- raise PreprocError("recursion limit exceeded")
- pc=self.parse_cache
- debug('preproc: reading file %r',filepath)
- try:
- lns=pc[filepath]
- except KeyError:
- pass
- else:
- self.lines.extend(lns)
- return
- try:
- lines=filter_comments(filepath)
- lines.append((POPFILE,''))
- lines.reverse()
- pc[filepath]=lines
- self.lines.extend(lines)
- except IOError:
- raise PreprocError("could not read the file %s"%filepath)
- except Exception:
- if Logs.verbose>0:
- error("parsing %s failed"%filepath)
- traceback.print_exc()
- def start(self,node,env):
- debug('preproc: scanning %s (in %s)',node.name,node.parent.name)
- bld=node.ctx
- try:
- self.parse_cache=bld.parse_cache
- except AttributeError:
- bld.parse_cache={}
- self.parse_cache=bld.parse_cache
- self.addlines(node)
- if env['DEFINES']:
- try:
- lst=['%s %s'%(x[0],trimquotes('='.join(x[1:])))for x in[y.split('=')for y in env['DEFINES']]]
- lst.reverse()
- self.lines.extend([('define',x)for x in lst])
- except AttributeError:
- pass
- while self.lines:
- (token,line)=self.lines.pop()
- if token==POPFILE:
- self.count_files-=1
- self.currentnode_stack.pop()
- continue
- try:
- ve=Logs.verbose
- if ve:debug('preproc: line is %s - %s state is %s',token,line,self.state)
- state=self.state
- if token[:2]=='if':
- state.append(undefined)
- elif token=='endif':
- state.pop()
- if token[0]!='e':
- if skipped in self.state or ignored in self.state:
- continue
- if token=='if':
- ret=eval_macro(tokenize(line),self.defs)
- if ret:state[-1]=accepted
- else:state[-1]=ignored
- elif token=='ifdef':
- m=re_mac.match(line)
- if m and m.group(0)in self.defs:state[-1]=accepted
- else:state[-1]=ignored
- elif token=='ifndef':
- m=re_mac.match(line)
- if m and m.group(0)in self.defs:state[-1]=ignored
- else:state[-1]=accepted
- elif token=='include'or token=='import':
- (kind,inc)=extract_include(line,self.defs)
- if inc in self.ban_includes:
- continue
- if token=='import':self.ban_includes.add(inc)
- if ve:debug('preproc: include found %s (%s) ',inc,kind)
- if kind=='"'or not strict_quotes:
- self.tryfind(inc)
- elif token=='elif':
- if state[-1]==accepted:
- state[-1]=skipped
- elif state[-1]==ignored:
- if eval_macro(tokenize(line),self.defs):
- state[-1]=accepted
- elif token=='else':
- if state[-1]==accepted:state[-1]=skipped
- elif state[-1]==ignored:state[-1]=accepted
- elif token=='define':
- try:
- self.defs[define_name(line)]=line
- except Exception:
- raise PreprocError("Invalid define line %s"%line)
- elif token=='undef':
- m=re_mac.match(line)
- if m and m.group(0)in self.defs:
- self.defs.__delitem__(m.group(0))
- elif token=='pragma':
- if re_pragma_once.match(line.lower()):
- self.ban_includes.add(self.curfile)
- except Exception ,e:
- if Logs.verbose:
- debug('preproc: line parsing failed (%s): %s %s',e,line,Utils.ex_stack())
-def scan(task):
- global go_absolute
- try:
- incn=task.generator.includes_nodes
- except AttributeError:
- raise Errors.WafError('%r is missing a feature such as "c", "cxx" or "includes": '%task.generator)
- if go_absolute:
- nodepaths=incn+[task.generator.bld.root.find_dir(x)for x in standard_includes]
- else:
- nodepaths=[x for x in incn if x.is_child_of(x.ctx.srcnode)or x.is_child_of(x.ctx.bldnode)]
- tmp=c_parser(nodepaths)
- tmp.start(task.inputs[0],task.env)
- if Logs.verbose:
- debug('deps: deps for %r: %r; unresolved %r'%(task.inputs,tmp.nodes,tmp.names))
- return(tmp.nodes,tmp.names)
diff --git a/waflib/Tools/c_tests.py b/waflib/Tools/c_tests.py
deleted file mode 100644
index f275977..0000000
--- a/waflib/Tools/c_tests.py
+++ /dev/null
@@ -1,153 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-from waflib import Task
-from waflib.Configure import conf
-from waflib.TaskGen import feature,before_method,after_method
-import sys
-LIB_CODE='''
-#ifdef _MSC_VER
-#define testEXPORT __declspec(dllexport)
-#else
-#define testEXPORT
-#endif
-testEXPORT int lib_func(void) { return 9; }
-'''
-MAIN_CODE='''
-#ifdef _MSC_VER
-#define testEXPORT __declspec(dllimport)
-#else
-#define testEXPORT
-#endif
-testEXPORT int lib_func(void);
-int main(int argc, char **argv) {
- (void)argc; (void)argv;
- return !(lib_func() == 9);
-}
-'''
- at feature('link_lib_test')
- at before_method('process_source')
-def link_lib_test_fun(self):
- def write_test_file(task):
- task.outputs[0].write(task.generator.code)
- rpath=[]
- if getattr(self,'add_rpath',False):
- rpath=[self.bld.path.get_bld().abspath()]
- mode=self.mode
- m='%s %s'%(mode,mode)
- ex=self.test_exec and'test_exec'or''
- bld=self.bld
- bld(rule=write_test_file,target='test.'+mode,code=LIB_CODE)
- bld(rule=write_test_file,target='main.'+mode,code=MAIN_CODE)
- bld(features='%sshlib'%m,source='test.'+mode,target='test')
- bld(features='%sprogram %s'%(m,ex),source='main.'+mode,target='app',use='test',rpath=rpath)
- at conf
-def check_library(self,mode=None,test_exec=True):
- if not mode:
- mode='c'
- if self.env.CXX:
- mode='cxx'
- self.check(compile_filename=[],features='link_lib_test',msg='Checking for libraries',mode=mode,test_exec=test_exec,)
-INLINE_CODE='''
-typedef int foo_t;
-static %s foo_t static_foo () {return 0; }
-%s foo_t foo () {
- return 0;
-}
-'''
-INLINE_VALUES=['inline','__inline__','__inline']
- at conf
-def check_inline(self,**kw):
- self.start_msg('Checking for inline')
- if not'define_name'in kw:
- kw['define_name']='INLINE_MACRO'
- if not'features'in kw:
- if self.env.CXX:
- kw['features']=['cxx']
- else:
- kw['features']=['c']
- for x in INLINE_VALUES:
- kw['fragment']=INLINE_CODE%(x,x)
- try:
- self.check(**kw)
- except self.errors.ConfigurationError:
- continue
- else:
- self.end_msg(x)
- if x!='inline':
- self.define('inline',x,quote=False)
- return x
- self.fatal('could not use inline functions')
-LARGE_FRAGMENT='''#include <unistd.h>
-int main(int argc, char **argv) {
- (void)argc; (void)argv;
- return !(sizeof(off_t) >= 8);
-}
-'''
- at conf
-def check_large_file(self,**kw):
- if not'define_name'in kw:
- kw['define_name']='HAVE_LARGEFILE'
- if not'execute'in kw:
- kw['execute']=True
- if not'features'in kw:
- if self.env.CXX:
- kw['features']=['cxx','cxxprogram']
- else:
- kw['features']=['c','cprogram']
- kw['fragment']=LARGE_FRAGMENT
- kw['msg']='Checking for large file support'
- ret=True
- try:
- if self.env.DEST_BINFMT!='pe':
- ret=self.check(**kw)
- except self.errors.ConfigurationError:
- pass
- else:
- if ret:
- return True
- kw['msg']='Checking for -D_FILE_OFFSET_BITS=64'
- kw['defines']=['_FILE_OFFSET_BITS=64']
- try:
- ret=self.check(**kw)
- except self.errors.ConfigurationError:
- pass
- else:
- self.define('_FILE_OFFSET_BITS',64)
- return ret
- self.fatal('There is no support for large files')
-ENDIAN_FRAGMENT='''
-short int ascii_mm[] = { 0x4249, 0x4765, 0x6E44, 0x6961, 0x6E53, 0x7953, 0 };
-short int ascii_ii[] = { 0x694C, 0x5454, 0x656C, 0x6E45, 0x6944, 0x6E61, 0 };
-int use_ascii (int i) {
- return ascii_mm[i] + ascii_ii[i];
-}
-short int ebcdic_ii[] = { 0x89D3, 0xE3E3, 0x8593, 0x95C5, 0x89C4, 0x9581, 0 };
-short int ebcdic_mm[] = { 0xC2C9, 0xC785, 0x95C4, 0x8981, 0x95E2, 0xA8E2, 0 };
-int use_ebcdic (int i) {
- return ebcdic_mm[i] + ebcdic_ii[i];
-}
-extern int foo;
-'''
-class grep_for_endianness(Task.Task):
- color='PINK'
- def run(self):
- txt=self.inputs[0].read(flags='rb').decode('iso8859-1')
- if txt.find('LiTTleEnDian')>-1:
- self.generator.tmp.append('little')
- elif txt.find('BIGenDianSyS')>-1:
- self.generator.tmp.append('big')
- else:
- return-1
- at feature('grep_for_endianness')
- at after_method('process_source')
-def grep_for_endianness_fun(self):
- self.create_task('grep_for_endianness',self.compiled_tasks[0].outputs[0])
- at conf
-def check_endianness(self):
- tmp=[]
- def check_msg(self):
- return tmp[0]
- self.check(fragment=ENDIAN_FRAGMENT,features='c grep_for_endianness',msg="Checking for endianness",define='ENDIANNESS',tmp=tmp,okmsg=check_msg)
- return tmp[0]
diff --git a/waflib/Tools/ccroot.py b/waflib/Tools/ccroot.py
deleted file mode 100644
index 18c57ce..0000000
--- a/waflib/Tools/ccroot.py
+++ /dev/null
@@ -1,391 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import os
-from waflib import Task,Utils,Node,Errors
-from waflib.TaskGen import after_method,before_method,feature,taskgen_method,extension
-from waflib.Tools import c_aliases,c_preproc,c_config,c_osx,c_tests
-from waflib.Configure import conf
-SYSTEM_LIB_PATHS=['/usr/lib64','/usr/lib','/usr/local/lib64','/usr/local/lib']
-USELIB_VARS=Utils.defaultdict(set)
-USELIB_VARS['c']=set(['INCLUDES','FRAMEWORKPATH','DEFINES','CPPFLAGS','CCDEPS','CFLAGS','ARCH'])
-USELIB_VARS['cxx']=set(['INCLUDES','FRAMEWORKPATH','DEFINES','CPPFLAGS','CXXDEPS','CXXFLAGS','ARCH'])
-USELIB_VARS['d']=set(['INCLUDES','DFLAGS'])
-USELIB_VARS['includes']=set(['INCLUDES','FRAMEWORKPATH','ARCH'])
-USELIB_VARS['cprogram']=USELIB_VARS['cxxprogram']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS','FRAMEWORK','FRAMEWORKPATH','ARCH'])
-USELIB_VARS['cshlib']=USELIB_VARS['cxxshlib']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS','FRAMEWORK','FRAMEWORKPATH','ARCH'])
-USELIB_VARS['cstlib']=USELIB_VARS['cxxstlib']=set(['ARFLAGS','LINKDEPS'])
-USELIB_VARS['dprogram']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS'])
-USELIB_VARS['dshlib']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS'])
-USELIB_VARS['dstlib']=set(['ARFLAGS','LINKDEPS'])
-USELIB_VARS['asm']=set(['ASFLAGS'])
- at taskgen_method
-def create_compiled_task(self,name,node):
- out='%s.%d.o'%(node.name,self.idx)
- task=self.create_task(name,node,node.parent.find_or_declare(out))
- try:
- self.compiled_tasks.append(task)
- except AttributeError:
- self.compiled_tasks=[task]
- return task
- at taskgen_method
-def to_incnodes(self,inlst):
- lst=[]
- seen=set([])
- for x in self.to_list(inlst):
- if x in seen or not x:
- continue
- seen.add(x)
- if isinstance(x,Node.Node):
- lst.append(x)
- else:
- if os.path.isabs(x):
- lst.append(self.bld.root.make_node(x)or x)
- else:
- if x[0]=='#':
- p=self.bld.bldnode.make_node(x[1:])
- v=self.bld.srcnode.make_node(x[1:])
- else:
- p=self.path.get_bld().make_node(x)
- v=self.path.make_node(x)
- if p.is_child_of(self.bld.bldnode):
- p.mkdir()
- lst.append(p)
- lst.append(v)
- return lst
- at feature('c','cxx','d','asm','fc','includes')
- at after_method('propagate_uselib_vars','process_source')
-def apply_incpaths(self):
- lst=self.to_incnodes(self.to_list(getattr(self,'includes',[]))+self.env['INCLUDES'])
- self.includes_nodes=lst
- self.env['INCPATHS']=[x.abspath()for x in lst]
-class link_task(Task.Task):
- color='YELLOW'
- inst_to=None
- chmod=Utils.O755
- def add_target(self,target):
- if isinstance(target,str):
- pattern=self.env[self.__class__.__name__+'_PATTERN']
- if not pattern:
- pattern='%s'
- folder,name=os.path.split(target)
- if self.__class__.__name__.find('shlib')>0:
- if self.env.DEST_BINFMT=='pe'and getattr(self.generator,'vnum',None):
- name=name+'-'+self.generator.vnum.split('.')[0]
- tmp=folder+os.sep+pattern%name
- target=self.generator.path.find_or_declare(tmp)
- self.set_outputs(target)
-class stlink_task(link_task):
- run_str='${AR} ${ARFLAGS} ${AR_TGT_F}${TGT} ${AR_SRC_F}${SRC}'
-def rm_tgt(cls):
- old=cls.run
- def wrap(self):
- try:os.remove(self.outputs[0].abspath())
- except OSError:pass
- return old(self)
- setattr(cls,'run',wrap)
-rm_tgt(stlink_task)
- at feature('c','cxx','d','fc','asm')
- at after_method('process_source')
-def apply_link(self):
- for x in self.features:
- if x=='cprogram'and'cxx'in self.features:
- x='cxxprogram'
- elif x=='cshlib'and'cxx'in self.features:
- x='cxxshlib'
- if x in Task.classes:
- if issubclass(Task.classes[x],link_task):
- link=x
- break
- else:
- return
- objs=[t.outputs[0]for t in getattr(self,'compiled_tasks',[])]
- self.link_task=self.create_task(link,objs)
- self.link_task.add_target(self.target)
- try:
- inst_to=self.install_path
- except AttributeError:
- inst_to=self.link_task.__class__.inst_to
- if inst_to:
- self.install_task=self.bld.install_files(inst_to,self.link_task.outputs[:],env=self.env,chmod=self.link_task.chmod)
- at taskgen_method
-def use_rec(self,name,**kw):
- if name in self.tmp_use_not or name in self.tmp_use_seen:
- return
- try:
- y=self.bld.get_tgen_by_name(name)
- except Errors.WafError:
- self.uselib.append(name)
- self.tmp_use_not.add(name)
- return
- self.tmp_use_seen.append(name)
- y.post()
- y.tmp_use_objects=objects=kw.get('objects',True)
- y.tmp_use_stlib=stlib=kw.get('stlib',True)
- try:
- link_task=y.link_task
- except AttributeError:
- y.tmp_use_var=''
- else:
- objects=False
- if not isinstance(link_task,stlink_task):
- stlib=False
- y.tmp_use_var='LIB'
- else:
- y.tmp_use_var='STLIB'
- p=self.tmp_use_prec
- for x in self.to_list(getattr(y,'use',[])):
- try:
- p[x].append(name)
- except KeyError:
- p[x]=[name]
- self.use_rec(x,objects=objects,stlib=stlib)
- at feature('c','cxx','d','use','fc')
- at before_method('apply_incpaths','propagate_uselib_vars')
- at after_method('apply_link','process_source')
-def process_use(self):
- use_not=self.tmp_use_not=set([])
- self.tmp_use_seen=[]
- use_prec=self.tmp_use_prec={}
- self.uselib=self.to_list(getattr(self,'uselib',[]))
- self.includes=self.to_list(getattr(self,'includes',[]))
- names=self.to_list(getattr(self,'use',[]))
- for x in names:
- self.use_rec(x)
- for x in use_not:
- if x in use_prec:
- del use_prec[x]
- out=[]
- tmp=[]
- for x in self.tmp_use_seen:
- for k in use_prec.values():
- if x in k:
- break
- else:
- tmp.append(x)
- while tmp:
- e=tmp.pop()
- out.append(e)
- try:
- nlst=use_prec[e]
- except KeyError:
- pass
- else:
- del use_prec[e]
- for x in nlst:
- for y in use_prec:
- if x in use_prec[y]:
- break
- else:
- tmp.append(x)
- if use_prec:
- raise Errors.WafError('Cycle detected in the use processing %r'%use_prec)
- out.reverse()
- link_task=getattr(self,'link_task',None)
- for x in out:
- y=self.bld.get_tgen_by_name(x)
- var=y.tmp_use_var
- if var and link_task:
- if var=='LIB'or y.tmp_use_stlib:
- self.env.append_value(var,[y.target[y.target.rfind(os.sep)+1:]])
- self.link_task.dep_nodes.extend(y.link_task.outputs)
- tmp_path=y.link_task.outputs[0].parent.path_from(self.bld.bldnode)
- self.env.append_value(var+'PATH',[tmp_path])
- else:
- if y.tmp_use_objects:
- self.add_objects_from_tgen(y)
- if getattr(y,'export_includes',None):
- self.includes.extend(y.to_incnodes(y.export_includes))
- for x in names:
- try:
- y=self.bld.get_tgen_by_name(x)
- except Exception:
- if not self.env['STLIB_'+x]and not x in self.uselib:
- self.uselib.append(x)
- else:
- for k in self.to_list(getattr(y,'uselib',[])):
- if not self.env['STLIB_'+k]and not k in self.uselib:
- self.uselib.append(k)
- at taskgen_method
-def accept_node_to_link(self,node):
- return not node.name.endswith('.pdb')
- at taskgen_method
-def add_objects_from_tgen(self,tg):
- try:
- link_task=self.link_task
- except AttributeError:
- pass
- else:
- for tsk in getattr(tg,'compiled_tasks',[]):
- for x in tsk.outputs:
- if self.accept_node_to_link(x):
- link_task.inputs.append(x)
- at taskgen_method
-def get_uselib_vars(self):
- _vars=set([])
- for x in self.features:
- if x in USELIB_VARS:
- _vars|=USELIB_VARS[x]
- return _vars
- at feature('c','cxx','d','fc','javac','cs','uselib','asm')
- at after_method('process_use')
-def propagate_uselib_vars(self):
- _vars=self.get_uselib_vars()
- env=self.env
- for x in _vars:
- y=x.lower()
- env.append_unique(x,self.to_list(getattr(self,y,[])))
- for x in self.features:
- for var in _vars:
- compvar='%s_%s'%(var,x)
- env.append_value(var,env[compvar])
- for x in self.to_list(getattr(self,'uselib',[])):
- for v in _vars:
- env.append_value(v,env[v+'_'+x])
- at feature('cshlib','cxxshlib','fcshlib')
- at after_method('apply_link')
-def apply_implib(self):
- if not self.env.DEST_BINFMT=='pe':
- return
- dll=self.link_task.outputs[0]
- if isinstance(self.target,Node.Node):
- name=self.target.name
- else:
- name=os.path.split(self.target)[1]
- implib=self.env['implib_PATTERN']%name
- implib=dll.parent.find_or_declare(implib)
- self.env.append_value('LINKFLAGS',self.env['IMPLIB_ST']%implib.bldpath())
- self.link_task.outputs.append(implib)
- if getattr(self,'defs',None)and self.env.DEST_BINFMT=='pe':
- node=self.path.find_resource(self.defs)
- if not node:
- raise Errors.WafError('invalid def file %r'%self.defs)
- if'msvc'in(self.env.CC_NAME,self.env.CXX_NAME):
- self.env.append_value('LINKFLAGS','/def:%s'%node.path_from(self.bld.bldnode))
- self.link_task.dep_nodes.append(node)
- else:
- self.link_task.inputs.append(node)
- try:
- inst_to=self.install_path
- except AttributeError:
- inst_to=self.link_task.__class__.inst_to
- if not inst_to:
- return
- self.implib_install_task=self.bld.install_as('${LIBDIR}/%s'%implib.name,implib,self.env)
- at feature('cshlib','cxxshlib','dshlib','fcshlib','vnum')
- at after_method('apply_link','propagate_uselib_vars')
-def apply_vnum(self):
- if not getattr(self,'vnum','')or os.name!='posix'or self.env.DEST_BINFMT not in('elf','mac-o'):
- return
- link=self.link_task
- nums=self.vnum.split('.')
- node=link.outputs[0]
- libname=node.name
- if libname.endswith('.dylib'):
- name3=libname.replace('.dylib','.%s.dylib'%self.vnum)
- name2=libname.replace('.dylib','.%s.dylib'%nums[0])
- else:
- name3=libname+'.'+self.vnum
- name2=libname+'.'+nums[0]
- if self.env.SONAME_ST:
- v=self.env.SONAME_ST%name2
- self.env.append_value('LINKFLAGS',v.split())
- self.create_task('vnum',node,[node.parent.find_or_declare(name2),node.parent.find_or_declare(name3)])
- if getattr(self,'install_task',None):
- self.install_task.hasrun=Task.SKIP_ME
- bld=self.bld
- path=self.install_task.dest
- t1=bld.install_as(path+os.sep+name3,node,env=self.env,chmod=self.link_task.chmod)
- t2=bld.symlink_as(path+os.sep+name2,name3)
- t3=bld.symlink_as(path+os.sep+libname,name3)
- self.vnum_install_task=(t1,t2,t3)
- if'-dynamiclib'in self.env['LINKFLAGS']:
- try:
- inst_to=self.install_path
- except AttributeError:
- inst_to=self.link_task.__class__.inst_to
- if inst_to:
- p=Utils.subst_vars(inst_to,self.env)
- path=os.path.join(p,self.link_task.outputs[0].name)
- self.env.append_value('LINKFLAGS',['-install_name',path])
-class vnum(Task.Task):
- color='CYAN'
- quient=True
- ext_in=['.bin']
- def run(self):
- for x in self.outputs:
- path=x.abspath()
- try:
- os.remove(path)
- except OSError:
- pass
- try:
- os.symlink(self.inputs[0].name,path)
- except OSError:
- return 1
-class fake_shlib(link_task):
- def runnable_status(self):
- for t in self.run_after:
- if not t.hasrun:
- return Task.ASK_LATER
- for x in self.outputs:
- x.sig=Utils.h_file(x.abspath())
- return Task.SKIP_ME
-class fake_stlib(stlink_task):
- def runnable_status(self):
- for t in self.run_after:
- if not t.hasrun:
- return Task.ASK_LATER
- for x in self.outputs:
- x.sig=Utils.h_file(x.abspath())
- return Task.SKIP_ME
- at conf
-def read_shlib(self,name,paths=[]):
- return self(name=name,features='fake_lib',lib_paths=paths,lib_type='shlib')
- at conf
-def read_stlib(self,name,paths=[]):
- return self(name=name,features='fake_lib',lib_paths=paths,lib_type='stlib')
-lib_patterns={'shlib':['lib%s.so','%s.so','lib%s.dylib','lib%s.dll','%s.dll'],'stlib':['lib%s.a','%s.a','lib%s.dll','%s.dll','lib%s.lib','%s.lib'],}
- at feature('fake_lib')
-def process_lib(self):
- node=None
- names=[x%self.name for x in lib_patterns[self.lib_type]]
- for x in self.lib_paths+[self.path]+SYSTEM_LIB_PATHS:
- if not isinstance(x,Node.Node):
- x=self.bld.root.find_node(x)or self.path.find_node(x)
- if not x:
- continue
- for y in names:
- node=x.find_node(y)
- if node:
- node.sig=Utils.h_file(node.abspath())
- break
- else:
- continue
- break
- else:
- raise Errors.WafError('could not find library %r'%self.name)
- self.link_task=self.create_task('fake_%s'%self.lib_type,[],[node])
- self.target=self.name
-class fake_o(Task.Task):
- def runnable_status(self):
- return Task.SKIP_ME
- at extension('.o','.obj')
-def add_those_o_files(self,node):
- tsk=self.create_task('fake_o',[],node)
- try:
- self.compiled_tasks.append(tsk)
- except AttributeError:
- self.compiled_tasks=[tsk]
- at feature('fake_obj')
- at before_method('process_source')
-def process_objs(self):
- for node in self.to_nodes(self.source):
- self.add_those_o_files(node)
- self.source=[]
- at conf
-def read_object(self,obj):
- if not isinstance(obj,self.path.__class__):
- obj=self.path.find_resource(obj)
- return self(features='fake_obj',source=obj,name=obj.name)
diff --git a/waflib/Tools/compiler_c.py b/waflib/Tools/compiler_c.py
deleted file mode 100644
index 04504fa..0000000
--- a/waflib/Tools/compiler_c.py
+++ /dev/null
@@ -1,39 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import os,sys,imp,types
-from waflib.Tools import ccroot
-from waflib import Utils,Configure
-from waflib.Logs import debug
-c_compiler={'win32':['msvc','gcc'],'cygwin':['gcc'],'darwin':['gcc'],'aix':['xlc','gcc'],'linux':['gcc','icc'],'sunos':['suncc','gcc'],'irix':['gcc','irixcc'],'hpux':['gcc'],'gnu':['gcc'],'java':['gcc','msvc','icc'],'default':['gcc'],}
-def configure(conf):
- try:test_for_compiler=conf.options.check_c_compiler
- except AttributeError:conf.fatal("Add options(opt): opt.load('compiler_c')")
- for compiler in test_for_compiler.split():
- conf.env.stash()
- conf.start_msg('Checking for %r (c compiler)'%compiler)
- try:
- conf.load(compiler)
- except conf.errors.ConfigurationError ,e:
- conf.env.revert()
- conf.end_msg(False)
- debug('compiler_c: %r'%e)
- else:
- if conf.env['CC']:
- conf.end_msg(conf.env.get_flat('CC'))
- conf.env['COMPILER_CC']=compiler
- break
- conf.end_msg(False)
- else:
- conf.fatal('could not configure a c compiler!')
-def options(opt):
- opt.load_special_tools('c_*.py',ban=['c_dumbpreproc.py'])
- global c_compiler
- build_platform=Utils.unversioned_sys_platform()
- possible_compiler_list=c_compiler[build_platform in c_compiler and build_platform or'default']
- test_for_compiler=' '.join(possible_compiler_list)
- cc_compiler_opts=opt.add_option_group("C Compiler Options")
- cc_compiler_opts.add_option('--check-c-compiler',default="%s"%test_for_compiler,help='On this platform (%s) the following C-Compiler will be checked by default: "%s"'%(build_platform,test_for_compiler),dest="check_c_compiler")
- for x in test_for_compiler.split():
- opt.load('%s'%x)
diff --git a/waflib/Tools/compiler_cxx.py b/waflib/Tools/compiler_cxx.py
deleted file mode 100644
index 14b7c7d..0000000
--- a/waflib/Tools/compiler_cxx.py
+++ /dev/null
@@ -1,39 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import os,sys,imp,types
-from waflib.Tools import ccroot
-from waflib import Utils,Configure
-from waflib.Logs import debug
-cxx_compiler={'win32':['msvc','g++'],'cygwin':['g++'],'darwin':['g++'],'aix':['xlc++','g++'],'linux':['g++','icpc'],'sunos':['sunc++','g++'],'irix':['g++'],'hpux':['g++'],'gnu':['g++'],'java':['g++','msvc','icpc'],'default':['g++']}
-def configure(conf):
- try:test_for_compiler=conf.options.check_cxx_compiler
- except AttributeError:conf.fatal("Add options(opt): opt.load('compiler_cxx')")
- for compiler in test_for_compiler.split():
- conf.env.stash()
- conf.start_msg('Checking for %r (c++ compiler)'%compiler)
- try:
- conf.load(compiler)
- except conf.errors.ConfigurationError ,e:
- conf.env.revert()
- conf.end_msg(False)
- debug('compiler_cxx: %r'%e)
- else:
- if conf.env['CXX']:
- conf.end_msg(conf.env.get_flat('CXX'))
- conf.env['COMPILER_CXX']=compiler
- break
- conf.end_msg(False)
- else:
- conf.fatal('could not configure a c++ compiler!')
-def options(opt):
- opt.load_special_tools('cxx_*.py')
- global cxx_compiler
- build_platform=Utils.unversioned_sys_platform()
- possible_compiler_list=cxx_compiler[build_platform in cxx_compiler and build_platform or'default']
- test_for_compiler=' '.join(possible_compiler_list)
- cxx_compiler_opts=opt.add_option_group('C++ Compiler Options')
- cxx_compiler_opts.add_option('--check-cxx-compiler',default="%s"%test_for_compiler,help='On this platform (%s) the following C++ Compiler will be checked by default: "%s"'%(build_platform,test_for_compiler),dest="check_cxx_compiler")
- for x in test_for_compiler.split():
- opt.load('%s'%x)
diff --git a/waflib/Tools/compiler_d.py b/waflib/Tools/compiler_d.py
deleted file mode 100644
index ee173e1..0000000
--- a/waflib/Tools/compiler_d.py
+++ /dev/null
@@ -1,29 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import os,sys,imp,types
-from waflib import Utils,Configure,Options,Logs
-def configure(conf):
- for compiler in conf.options.dcheck.split(','):
- conf.env.stash()
- conf.start_msg('Checking for %r (d compiler)'%compiler)
- try:
- conf.load(compiler)
- except conf.errors.ConfigurationError ,e:
- conf.env.revert()
- conf.end_msg(False)
- Logs.debug('compiler_d: %r'%e)
- else:
- if conf.env.D:
- conf.end_msg(conf.env.get_flat('D'))
- conf.env['COMPILER_D']=compiler
- break
- conf.end_msg(False)
- else:
- conf.fatal('no suitable d compiler was found')
-def options(opt):
- d_compiler_opts=opt.add_option_group('D Compiler Options')
- d_compiler_opts.add_option('--check-d-compiler',default='gdc,dmd,ldc2',action='store',help='check for the compiler [Default:gdc,dmd,ldc2]',dest='dcheck')
- for d_compiler in['gdc','dmd','ldc2']:
- opt.load('%s'%d_compiler)
diff --git a/waflib/Tools/compiler_fc.py b/waflib/Tools/compiler_fc.py
deleted file mode 100644
index ec5d2ea..0000000
--- a/waflib/Tools/compiler_fc.py
+++ /dev/null
@@ -1,43 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import os,sys,imp,types
-from waflib import Utils,Configure,Options,Logs,Errors
-from waflib.Tools import fc
-fc_compiler={'win32':['gfortran','ifort'],'darwin':['gfortran','g95','ifort'],'linux':['gfortran','g95','ifort'],'java':['gfortran','g95','ifort'],'default':['gfortran'],'aix':['gfortran']}
-def __list_possible_compiler(platform):
- try:
- return fc_compiler[platform]
- except KeyError:
- return fc_compiler["default"]
-def configure(conf):
- try:test_for_compiler=conf.options.check_fc
- except AttributeError:conf.fatal("Add options(opt): opt.load('compiler_fc')")
- for compiler in test_for_compiler.split():
- conf.env.stash()
- conf.start_msg('Checking for %r (fortran compiler)'%compiler)
- try:
- conf.load(compiler)
- except conf.errors.ConfigurationError ,e:
- conf.env.revert()
- conf.end_msg(False)
- Logs.debug('compiler_fortran: %r'%e)
- else:
- if conf.env['FC']:
- conf.end_msg(conf.env.get_flat('FC'))
- conf.env.COMPILER_FORTRAN=compiler
- break
- conf.end_msg(False)
- else:
- conf.fatal('could not configure a fortran compiler!')
-def options(opt):
- opt.load_special_tools('fc_*.py')
- build_platform=Utils.unversioned_sys_platform()
- detected_platform=Options.platform
- possible_compiler_list=__list_possible_compiler(detected_platform)
- test_for_compiler=' '.join(possible_compiler_list)
- fortran_compiler_opts=opt.add_option_group("Fortran Compiler Options")
- fortran_compiler_opts.add_option('--check-fortran-compiler',default="%s"%test_for_compiler,help='On this platform (%s) the following Fortran Compiler will be checked by default: "%s"'%(detected_platform,test_for_compiler),dest="check_fc")
- for compiler in test_for_compiler.split():
- opt.load('%s'%compiler)
diff --git a/waflib/Tools/cs.py b/waflib/Tools/cs.py
deleted file mode 100644
index ee4d319..0000000
--- a/waflib/Tools/cs.py
+++ /dev/null
@@ -1,132 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-from waflib import Utils,Task,Options,Logs,Errors
-from waflib.TaskGen import before_method,after_method,feature
-from waflib.Tools import ccroot
-from waflib.Configure import conf
-import os,tempfile
-ccroot.USELIB_VARS['cs']=set(['CSFLAGS','ASSEMBLIES','RESOURCES'])
-ccroot.lib_patterns['csshlib']=['%s']
- at feature('cs')
- at before_method('process_source')
-def apply_cs(self):
- cs_nodes=[]
- no_nodes=[]
- for x in self.to_nodes(self.source):
- if x.name.endswith('.cs'):
- cs_nodes.append(x)
- else:
- no_nodes.append(x)
- self.source=no_nodes
- bintype=getattr(self,'bintype',self.gen.endswith('.dll')and'library'or'exe')
- self.cs_task=tsk=self.create_task('mcs',cs_nodes,self.path.find_or_declare(self.gen))
- tsk.env.CSTYPE='/target:%s'%bintype
- tsk.env.OUT='/out:%s'%tsk.outputs[0].abspath()
- self.env.append_value('CSFLAGS','/platform:%s'%getattr(self,'platform','anycpu'))
- inst_to=getattr(self,'install_path',bintype=='exe'and'${BINDIR}'or'${LIBDIR}')
- if inst_to:
- mod=getattr(self,'chmod',bintype=='exe'and Utils.O755 or Utils.O644)
- self.install_task=self.bld.install_files(inst_to,self.cs_task.outputs[:],env=self.env,chmod=mod)
- at feature('cs')
- at after_method('apply_cs')
-def use_cs(self):
- names=self.to_list(getattr(self,'use',[]))
- get=self.bld.get_tgen_by_name
- for x in names:
- try:
- y=get(x)
- except Errors.WafError:
- self.env.append_value('CSFLAGS','/reference:%s'%x)
- continue
- y.post()
- tsk=getattr(y,'cs_task',None)or getattr(y,'link_task',None)
- if not tsk:
- self.bld.fatal('cs task has no link task for use %r'%self)
- self.cs_task.dep_nodes.extend(tsk.outputs)
- self.cs_task.set_run_after(tsk)
- self.env.append_value('CSFLAGS','/reference:%s'%tsk.outputs[0].abspath())
- at feature('cs')
- at after_method('apply_cs','use_cs')
-def debug_cs(self):
- csdebug=getattr(self,'csdebug',self.env.CSDEBUG)
- if not csdebug:
- return
- node=self.cs_task.outputs[0]
- if self.env.CS_NAME=='mono':
- out=node.parent.find_or_declare(node.name+'.mdb')
- else:
- out=node.change_ext('.pdb')
- self.cs_task.outputs.append(out)
- try:
- self.install_task.source.append(out)
- except AttributeError:
- pass
- if csdebug=='pdbonly':
- val=['/debug+','/debug:pdbonly']
- elif csdebug=='full':
- val=['/debug+','/debug:full']
- else:
- val=['/debug-']
- self.env.append_value('CSFLAGS',val)
-class mcs(Task.Task):
- color='YELLOW'
- run_str='${MCS} ${CSTYPE} ${CSFLAGS} ${ASS_ST:ASSEMBLIES} ${RES_ST:RESOURCES} ${OUT} ${SRC}'
- def exec_command(self,cmd,**kw):
- bld=self.generator.bld
- try:
- if not kw.get('cwd',None):
- kw['cwd']=bld.cwd
- except AttributeError:
- bld.cwd=kw['cwd']=bld.variant_dir
- try:
- tmp=None
- if isinstance(cmd,list)and len(' '.join(cmd))>=8192:
- program=cmd[0]
- cmd=[self.quote_response_command(x)for x in cmd]
- (fd,tmp)=tempfile.mkstemp()
- os.write(fd,'\r\n'.join(i.replace('\\','\\\\')for i in cmd[1:]))
- os.close(fd)
- cmd=[program,'@'+tmp]
- ret=self.generator.bld.exec_command(cmd,**kw)
- finally:
- if tmp:
- try:
- os.remove(tmp)
- except OSError:
- pass
- return ret
- def quote_response_command(self,flag):
- if flag.lower()=='/noconfig':
- return''
- if flag.find(' ')>-1:
- for x in('/r:','/reference:','/resource:','/lib:','/out:'):
- if flag.startswith(x):
- flag='%s"%s"'%(x,flag[len(x):])
- break
- else:
- flag='"%s"'%flag
- return flag
-def configure(conf):
- csc=getattr(Options.options,'cscbinary',None)
- if csc:
- conf.env.MCS=csc
- conf.find_program(['csc','mcs','gmcs'],var='MCS')
- conf.env.ASS_ST='/r:%s'
- conf.env.RES_ST='/resource:%s'
- conf.env.CS_NAME='csc'
- if str(conf.env.MCS).lower().find('mcs')>-1:
- conf.env.CS_NAME='mono'
-def options(opt):
- opt.add_option('--with-csc-binary',type='string',dest='cscbinary')
-class fake_csshlib(Task.Task):
- color='YELLOW'
- inst_to=None
- def runnable_status(self):
- for x in self.outputs:
- x.sig=Utils.h_file(x.abspath())
- return Task.SKIP_ME
- at conf
-def read_csshlib(self,name,paths=[]):
- return self(name=name,features='fake_lib',lib_paths=paths,lib_type='csshlib')
diff --git a/waflib/Tools/cxx.py b/waflib/Tools/cxx.py
deleted file mode 100644
index b744a8d..0000000
--- a/waflib/Tools/cxx.py
+++ /dev/null
@@ -1,26 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-from waflib import TaskGen,Task,Utils
-from waflib.Tools import c_preproc
-from waflib.Tools.ccroot import link_task,stlink_task
- at TaskGen.extension('.cpp','.cc','.cxx','.C','.c++')
-def cxx_hook(self,node):
- return self.create_compiled_task('cxx',node)
-if not'.c'in TaskGen.task_gen.mappings:
- TaskGen.task_gen.mappings['.c']=TaskGen.task_gen.mappings['.cpp']
-class cxx(Task.Task):
- run_str='${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CXX_SRC_F}${SRC} ${CXX_TGT_F}${TGT}'
- vars=['CXXDEPS']
- ext_in=['.h']
- scan=c_preproc.scan
-class cxxprogram(link_task):
- run_str='${LINK_CXX} ${LINKFLAGS} ${CXXLNK_SRC_F}${SRC} ${CXXLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB}'
- vars=['LINKDEPS']
- ext_out=['.bin']
- inst_to='${BINDIR}'
-class cxxshlib(cxxprogram):
- inst_to='${LIBDIR}'
-class cxxstlib(stlink_task):
- pass
diff --git a/waflib/Tools/d.py b/waflib/Tools/d.py
deleted file mode 100644
index 1838740..0000000
--- a/waflib/Tools/d.py
+++ /dev/null
@@ -1,54 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-from waflib import Utils,Task,Errors
-from waflib.TaskGen import taskgen_method,feature,extension
-from waflib.Tools import d_scan,d_config
-from waflib.Tools.ccroot import link_task,stlink_task
-class d(Task.Task):
- color='GREEN'
- run_str='${D} ${DFLAGS} ${DINC_ST:INCPATHS} ${D_SRC_F:SRC} ${D_TGT_F:TGT}'
- scan=d_scan.scan
-class d_with_header(d):
- run_str='${D} ${DFLAGS} ${DINC_ST:INCPATHS} ${D_HDR_F:tgt.outputs[1].bldpath()} ${D_SRC_F:SRC} ${D_TGT_F:tgt.outputs[0].bldpath()}'
-class d_header(Task.Task):
- color='BLUE'
- run_str='${D} ${D_HEADER} ${SRC}'
-class dprogram(link_task):
- run_str='${D_LINKER} ${LINKFLAGS} ${DLNK_SRC_F}${SRC} ${DLNK_TGT_F:TGT} ${RPATH_ST:RPATH} ${DSTLIB_MARKER} ${DSTLIBPATH_ST:STLIBPATH} ${DSTLIB_ST:STLIB} ${DSHLIB_MARKER} ${DLIBPATH_ST:LIBPATH} ${DSHLIB_ST:LIB}'
- inst_to='${BINDIR}'
-class dshlib(dprogram):
- inst_to='${LIBDIR}'
-class dstlib(stlink_task):
- pass
- at extension('.d','.di','.D')
-def d_hook(self,node):
- ext=Utils.destos_to_binfmt(self.env.DEST_OS)=='pe'and'obj'or'o'
- out='%s.%d.%s'%(node.name,self.idx,ext)
- def create_compiled_task(self,name,node):
- task=self.create_task(name,node,node.parent.find_or_declare(out))
- try:
- self.compiled_tasks.append(task)
- except AttributeError:
- self.compiled_tasks=[task]
- return task
- if getattr(self,'generate_headers',None):
- tsk=create_compiled_task(self,'d_with_header',node)
- tsk.outputs.append(node.change_ext(self.env['DHEADER_ext']))
- else:
- tsk=create_compiled_task(self,'d',node)
- return tsk
- at taskgen_method
-def generate_header(self,filename):
- try:
- self.header_lst.append([filename,self.install_path])
- except AttributeError:
- self.header_lst=[[filename,self.install_path]]
- at feature('d')
-def process_header(self):
- for i in getattr(self,'header_lst',[]):
- node=self.path.find_resource(i[0])
- if not node:
- raise Errors.WafError('file %r not found on d obj'%i[0])
- self.create_task('d_header',node,node.change_ext('.di'))
diff --git a/waflib/Tools/d_config.py b/waflib/Tools/d_config.py
deleted file mode 100644
index 50660ea..0000000
--- a/waflib/Tools/d_config.py
+++ /dev/null
@@ -1,52 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-from waflib import Utils
-from waflib.Configure import conf
- at conf
-def d_platform_flags(self):
- v=self.env
- if not v.DEST_OS:
- v.DEST_OS=Utils.unversioned_sys_platform()
- binfmt=Utils.destos_to_binfmt(self.env.DEST_OS)
- if binfmt=='pe':
- v['dprogram_PATTERN']='%s.exe'
- v['dshlib_PATTERN']='lib%s.dll'
- v['dstlib_PATTERN']='lib%s.a'
- elif binfmt=='mac-o':
- v['dprogram_PATTERN']='%s'
- v['dshlib_PATTERN']='lib%s.dylib'
- v['dstlib_PATTERN']='lib%s.a'
- else:
- v['dprogram_PATTERN']='%s'
- v['dshlib_PATTERN']='lib%s.so'
- v['dstlib_PATTERN']='lib%s.a'
-DLIB='''
-version(D_Version2) {
- import std.stdio;
- int main() {
- writefln("phobos2");
- return 0;
- }
-} else {
- version(Tango) {
- import tango.stdc.stdio;
- int main() {
- printf("tango");
- return 0;
- }
- } else {
- import std.stdio;
- int main() {
- writefln("phobos1");
- return 0;
- }
- }
-}
-'''
- at conf
-def check_dlibrary(self,execute=True):
- ret=self.check_cc(features='d dprogram',fragment=DLIB,compile_filename='test.d',execute=execute,define_ret=True)
- if execute:
- self.env.DLIBRARY=ret.strip()
diff --git a/waflib/Tools/d_scan.py b/waflib/Tools/d_scan.py
deleted file mode 100644
index ee80c5f..0000000
--- a/waflib/Tools/d_scan.py
+++ /dev/null
@@ -1,133 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import re
-from waflib import Utils,Logs
-def filter_comments(filename):
- txt=Utils.readf(filename)
- i=0
- buf=[]
- max=len(txt)
- begin=0
- while i<max:
- c=txt[i]
- if c=='"'or c=="'":
- buf.append(txt[begin:i])
- delim=c
- i+=1
- while i<max:
- c=txt[i]
- if c==delim:break
- elif c=='\\':
- i+=1
- i+=1
- i+=1
- begin=i
- elif c=='/':
- buf.append(txt[begin:i])
- i+=1
- if i==max:break
- c=txt[i]
- if c=='+':
- i+=1
- nesting=1
- c=None
- while i<max:
- prev=c
- c=txt[i]
- if prev=='/'and c=='+':
- nesting+=1
- c=None
- elif prev=='+'and c=='/':
- nesting-=1
- if nesting==0:break
- c=None
- i+=1
- elif c=='*':
- i+=1
- c=None
- while i<max:
- prev=c
- c=txt[i]
- if prev=='*'and c=='/':break
- i+=1
- elif c=='/':
- i+=1
- while i<max and txt[i]!='\n':
- i+=1
- else:
- begin=i-1
- continue
- i+=1
- begin=i
- buf.append(' ')
- else:
- i+=1
- buf.append(txt[begin:])
- return buf
-class d_parser(object):
- def __init__(self,env,incpaths):
- self.allnames=[]
- self.re_module=re.compile("module\s+([^;]+)")
- self.re_import=re.compile("import\s+([^;]+)")
- self.re_import_bindings=re.compile("([^:]+):(.*)")
- self.re_import_alias=re.compile("[^=]+=(.+)")
- self.env=env
- self.nodes=[]
- self.names=[]
- self.incpaths=incpaths
- def tryfind(self,filename):
- found=0
- for n in self.incpaths:
- found=n.find_resource(filename.replace('.','/')+'.d')
- if found:
- self.nodes.append(found)
- self.waiting.append(found)
- break
- if not found:
- if not filename in self.names:
- self.names.append(filename)
- def get_strings(self,code):
- self.module=''
- lst=[]
- mod_name=self.re_module.search(code)
- if mod_name:
- self.module=re.sub('\s+','',mod_name.group(1))
- import_iterator=self.re_import.finditer(code)
- if import_iterator:
- for import_match in import_iterator:
- import_match_str=re.sub('\s+','',import_match.group(1))
- bindings_match=self.re_import_bindings.match(import_match_str)
- if bindings_match:
- import_match_str=bindings_match.group(1)
- matches=import_match_str.split(',')
- for match in matches:
- alias_match=self.re_import_alias.match(match)
- if alias_match:
- match=alias_match.group(1)
- lst.append(match)
- return lst
- def start(self,node):
- self.waiting=[node]
- while self.waiting:
- nd=self.waiting.pop(0)
- self.iter(nd)
- def iter(self,node):
- path=node.abspath()
- code="".join(filter_comments(path))
- names=self.get_strings(code)
- for x in names:
- if x in self.allnames:continue
- self.allnames.append(x)
- self.tryfind(x)
-def scan(self):
- env=self.env
- gruik=d_parser(env,self.generator.includes_nodes)
- node=self.inputs[0]
- gruik.start(node)
- nodes=gruik.nodes
- names=gruik.names
- if Logs.verbose:
- Logs.debug('deps: deps for %s: %r; unresolved %r'%(str(node),nodes,names))
- return(nodes,names)
diff --git a/waflib/Tools/dbus.py b/waflib/Tools/dbus.py
deleted file mode 100644
index ccea278..0000000
--- a/waflib/Tools/dbus.py
+++ /dev/null
@@ -1,29 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-from waflib import Task,Errors
-from waflib.TaskGen import taskgen_method,before_method
- at taskgen_method
-def add_dbus_file(self,filename,prefix,mode):
- if not hasattr(self,'dbus_lst'):
- self.dbus_lst=[]
- if not'process_dbus'in self.meths:
- self.meths.append('process_dbus')
- self.dbus_lst.append([filename,prefix,mode])
- at before_method('apply_core')
-def process_dbus(self):
- for filename,prefix,mode in getattr(self,'dbus_lst',[]):
- node=self.path.find_resource(filename)
- if not node:
- raise Errors.WafError('file not found '+filename)
- tsk=self.create_task('dbus_binding_tool',node,node.change_ext('.h'))
- tsk.env.DBUS_BINDING_TOOL_PREFIX=prefix
- tsk.env.DBUS_BINDING_TOOL_MODE=mode
-class dbus_binding_tool(Task.Task):
- color='BLUE'
- ext_out=['.h']
- run_str='${DBUS_BINDING_TOOL} --prefix=${DBUS_BINDING_TOOL_PREFIX} --mode=${DBUS_BINDING_TOOL_MODE} --output=${TGT} ${SRC}'
- shell=True
-def configure(conf):
- dbus_binding_tool=conf.find_program('dbus-binding-tool',var='DBUS_BINDING_TOOL')
diff --git a/waflib/Tools/dmd.py b/waflib/Tools/dmd.py
deleted file mode 100644
index b6e3303..0000000
--- a/waflib/Tools/dmd.py
+++ /dev/null
@@ -1,51 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import sys
-from waflib.Tools import ar,d
-from waflib.Configure import conf
- at conf
-def find_dmd(conf):
- conf.find_program(['dmd','dmd2','ldc'],var='D')
- out=conf.cmd_and_log([conf.env.D,'--help'])
- if out.find("D Compiler v")==-1:
- out=conf.cmd_and_log([conf.env.D,'-version'])
- if out.find("based on DMD v1.")==-1:
- conf.fatal("detected compiler is not dmd/ldc")
- at conf
-def common_flags_ldc(conf):
- v=conf.env
- v['DFLAGS']=['-d-version=Posix']
- v['LINKFLAGS']=[]
- v['DFLAGS_dshlib']=['-relocation-model=pic']
- at conf
-def common_flags_dmd(conf):
- v=conf.env
- v['D_SRC_F']=['-c']
- v['D_TGT_F']='-of%s'
- v['D_LINKER']=v['D']
- v['DLNK_SRC_F']=''
- v['DLNK_TGT_F']='-of%s'
- v['DINC_ST']='-I%s'
- v['DSHLIB_MARKER']=v['DSTLIB_MARKER']=''
- v['DSTLIB_ST']=v['DSHLIB_ST']='-L-l%s'
- v['DSTLIBPATH_ST']=v['DLIBPATH_ST']='-L-L%s'
- v['LINKFLAGS_dprogram']=['-quiet']
- v['DFLAGS_dshlib']=['-fPIC']
- v['LINKFLAGS_dshlib']=['-L-shared']
- v['DHEADER_ext']='.di'
- v.DFLAGS_d_with_header=['-H','-Hf']
- v['D_HDR_F']='%s'
-def configure(conf):
- conf.find_dmd()
- if sys.platform=='win32':
- out=conf.cmd_and_log([conf.env.D,'--help'])
- if out.find("D Compiler v2.")>-1:
- conf.fatal('dmd2 on Windows is not supported, use gdc or ldc2 instead')
- conf.load('ar')
- conf.load('d')
- conf.common_flags_dmd()
- conf.d_platform_flags()
- if str(conf.env.D).find('ldc')>-1:
- conf.common_flags_ldc()
diff --git a/waflib/Tools/errcheck.py b/waflib/Tools/errcheck.py
deleted file mode 100644
index 3b06493..0000000
--- a/waflib/Tools/errcheck.py
+++ /dev/null
@@ -1,161 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-typos={'feature':'features','sources':'source','targets':'target','include':'includes','export_include':'export_includes','define':'defines','importpath':'includes','installpath':'install_path','iscopy':'is_copy',}
-meths_typos=['__call__','program','shlib','stlib','objects']
-from waflib import Logs,Build,Node,Task,TaskGen,ConfigSet,Errors,Utils
-import waflib.Tools.ccroot
-def check_same_targets(self):
- mp=Utils.defaultdict(list)
- uids={}
- def check_task(tsk):
- if not isinstance(tsk,Task.Task):
- return
- for node in tsk.outputs:
- mp[node].append(tsk)
- try:
- uids[tsk.uid()].append(tsk)
- except KeyError:
- uids[tsk.uid()]=[tsk]
- for g in self.groups:
- for tg in g:
- try:
- for tsk in tg.tasks:
- check_task(tsk)
- except AttributeError:
- check_task(tg)
- dupe=False
- for(k,v)in mp.items():
- if len(v)>1:
- dupe=True
- msg='* Node %r is created more than once%s. The task generators are:'%(k,Logs.verbose==1 and" (full message on 'waf -v -v')"or"")
- Logs.error(msg)
- for x in v:
- if Logs.verbose>1:
- Logs.error(' %d. %r'%(1+v.index(x),x.generator))
- else:
- Logs.error(' %d. %r in %r'%(1+v.index(x),x.generator.name,getattr(x.generator,'path',None)))
- if not dupe:
- for(k,v)in uids.items():
- if len(v)>1:
- Logs.error('* Several tasks use the same identifier. Please check the information on\n http://docs.waf.googlecode.com/git/apidocs_16/Task.html#waflib.Task.Task.uid')
- for tsk in v:
- Logs.error(' - object %r (%r) defined in %r'%(tsk.__class__.__name__,tsk,tsk.generator))
-def check_invalid_constraints(self):
- feat=set([])
- for x in list(TaskGen.feats.values()):
- feat.union(set(x))
- for(x,y)in TaskGen.task_gen.prec.items():
- feat.add(x)
- feat.union(set(y))
- ext=set([])
- for x in TaskGen.task_gen.mappings.values():
- ext.add(x.__name__)
- invalid=ext&feat
- if invalid:
- Logs.error('The methods %r have invalid annotations: @extension <-> @feature/@before_method/@after_method'%list(invalid))
- for cls in list(Task.classes.values()):
- for x in('before','after'):
- for y in Utils.to_list(getattr(cls,x,[])):
- if not Task.classes.get(y,None):
- Logs.error('Erroneous order constraint %r=%r on task class %r'%(x,y,cls.__name__))
- if getattr(cls,'rule',None):
- Logs.error('Erroneous attribute "rule" on task class %r (rename to "run_str")'%cls.__name__)
-def replace(m):
- oldcall=getattr(Build.BuildContext,m)
- def call(self,*k,**kw):
- ret=oldcall(self,*k,**kw)
- for x in typos:
- if x in kw:
- if x=='iscopy'and'subst'in getattr(self,'features',''):
- continue
- err=True
- Logs.error('Fix the typo %r -> %r on %r'%(x,typos[x],ret))
- return ret
- setattr(Build.BuildContext,m,call)
-def enhance_lib():
- for m in meths_typos:
- replace(m)
- def ant_glob(self,*k,**kw):
- if k:
- lst=Utils.to_list(k[0])
- for pat in lst:
- if'..'in pat.split('/'):
- Logs.error("In ant_glob pattern %r: '..' means 'two dots', not 'parent directory'"%k[0])
- if kw.get('remove',True):
- try:
- if self.is_child_of(self.ctx.bldnode)and not kw.get('quiet',False):
- Logs.error('Using ant_glob on the build folder (%r) is dangerous (quiet=True to disable this warning)'%self)
- except AttributeError:
- pass
- return self.old_ant_glob(*k,**kw)
- Node.Node.old_ant_glob=Node.Node.ant_glob
- Node.Node.ant_glob=ant_glob
- old=Task.is_before
- def is_before(t1,t2):
- ret=old(t1,t2)
- if ret and old(t2,t1):
- Logs.error('Contradictory order constraints in classes %r %r'%(t1,t2))
- return ret
- Task.is_before=is_before
- def check_err_features(self):
- lst=self.to_list(self.features)
- if'shlib'in lst:
- Logs.error('feature shlib -> cshlib, dshlib or cxxshlib')
- for x in('c','cxx','d','fc'):
- if not x in lst and lst and lst[0]in[x+y for y in('program','shlib','stlib')]:
- Logs.error('%r features is probably missing %r'%(self,x))
- TaskGen.feature('*')(check_err_features)
- def check_err_order(self):
- if not hasattr(self,'rule')and not'subst'in Utils.to_list(self.features):
- for x in('before','after','ext_in','ext_out'):
- if hasattr(self,x):
- Logs.warn('Erroneous order constraint %r on non-rule based task generator %r'%(x,self))
- else:
- for x in('before','after'):
- for y in self.to_list(getattr(self,x,[])):
- if not Task.classes.get(y,None):
- Logs.error('Erroneous order constraint %s=%r on %r (no such class)'%(x,y,self))
- TaskGen.feature('*')(check_err_order)
- def check_compile(self):
- check_invalid_constraints(self)
- try:
- ret=self.orig_compile()
- finally:
- check_same_targets(self)
- return ret
- Build.BuildContext.orig_compile=Build.BuildContext.compile
- Build.BuildContext.compile=check_compile
- def use_rec(self,name,**kw):
- try:
- y=self.bld.get_tgen_by_name(name)
- except Errors.WafError:
- pass
- else:
- idx=self.bld.get_group_idx(self)
- odx=self.bld.get_group_idx(y)
- if odx>idx:
- msg="Invalid 'use' across build groups:"
- if Logs.verbose>1:
- msg+='\n target %r\n uses:\n %r'%(self,y)
- else:
- msg+=" %r uses %r (try 'waf -v -v' for the full error)"%(self.name,name)
- raise Errors.WafError(msg)
- self.orig_use_rec(name,**kw)
- TaskGen.task_gen.orig_use_rec=TaskGen.task_gen.use_rec
- TaskGen.task_gen.use_rec=use_rec
- def getattri(self,name,default=None):
- if name=='append'or name=='add':
- raise Errors.WafError('env.append and env.add do not exist: use env.append_value/env.append_unique')
- elif name=='prepend':
- raise Errors.WafError('env.prepend does not exist: use env.prepend_value')
- if name in self.__slots__:
- return object.__getattr__(self,name,default)
- else:
- return self[name]
- ConfigSet.ConfigSet.__getattr__=getattri
-def options(opt):
- enhance_lib()
-def configure(conf):
- pass
diff --git a/waflib/Tools/fc.py b/waflib/Tools/fc.py
deleted file mode 100644
index 3589799..0000000
--- a/waflib/Tools/fc.py
+++ /dev/null
@@ -1,116 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import re
-from waflib import Utils,Task,TaskGen,Logs
-from waflib.Tools import ccroot,fc_config,fc_scan
-from waflib.TaskGen import feature,before_method,after_method,extension
-from waflib.Configure import conf
-ccroot.USELIB_VARS['fc']=set(['FCFLAGS','DEFINES','INCLUDES'])
-ccroot.USELIB_VARS['fcprogram_test']=ccroot.USELIB_VARS['fcprogram']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS'])
-ccroot.USELIB_VARS['fcshlib']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS'])
-ccroot.USELIB_VARS['fcstlib']=set(['ARFLAGS','LINKDEPS'])
- at feature('fcprogram','fcshlib','fcstlib','fcprogram_test')
-def dummy(self):
- pass
- at extension('.f','.f90','.F','.F90','.for','.FOR')
-def fc_hook(self,node):
- return self.create_compiled_task('fc',node)
- at conf
-def modfile(conf,name):
- return{'lower':name.lower()+'.mod','lower.MOD':name.upper()+'.MOD','UPPER.mod':name.upper()+'.mod','UPPER':name.upper()+'.MOD'}[conf.env.FC_MOD_CAPITALIZATION or'lower']
-def get_fortran_tasks(tsk):
- bld=tsk.generator.bld
- tasks=bld.get_tasks_group(bld.get_group_idx(tsk.generator))
- return[x for x in tasks if isinstance(x,fc)and not getattr(x,'nomod',None)and not getattr(x,'mod_fortran_done',None)]
-class fc(Task.Task):
- color='GREEN'
- run_str='${FC} ${FCFLAGS} ${FCINCPATH_ST:INCPATHS} ${FCDEFINES_ST:DEFINES} ${_FCMODOUTFLAGS} ${FC_TGT_F}${TGT[0].abspath()} ${FC_SRC_F}${SRC[0].abspath()}'
- vars=["FORTRANMODPATHFLAG"]
- def scan(self):
- tmp=fc_scan.fortran_parser(self.generator.includes_nodes)
- tmp.task=self
- tmp.start(self.inputs[0])
- if Logs.verbose:
- Logs.debug('deps: deps for %r: %r; unresolved %r'%(self.inputs,tmp.nodes,tmp.names))
- return(tmp.nodes,tmp.names)
- def runnable_status(self):
- if getattr(self,'mod_fortran_done',None):
- return super(fc,self).runnable_status()
- bld=self.generator.bld
- lst=get_fortran_tasks(self)
- for tsk in lst:
- tsk.mod_fortran_done=True
- for tsk in lst:
- ret=tsk.runnable_status()
- if ret==Task.ASK_LATER:
- for x in lst:
- x.mod_fortran_done=None
- return Task.ASK_LATER
- ins=Utils.defaultdict(set)
- outs=Utils.defaultdict(set)
- for tsk in lst:
- key=tsk.uid()
- for x in bld.raw_deps[key]:
- if x.startswith('MOD@'):
- name=bld.modfile(x.replace('MOD@',''))
- node=bld.srcnode.find_or_declare(name)
- tsk.set_outputs(node)
- outs[id(node)].add(tsk)
- for tsk in lst:
- key=tsk.uid()
- for x in bld.raw_deps[key]:
- if x.startswith('USE@'):
- name=bld.modfile(x.replace('USE@',''))
- node=bld.srcnode.find_resource(name)
- if node and node not in tsk.outputs:
- if not node in bld.node_deps[key]:
- bld.node_deps[key].append(node)
- ins[id(node)].add(tsk)
- for k in ins.keys():
- for a in ins[k]:
- a.run_after.update(outs[k])
- tmp=[]
- for t in outs[k]:
- tmp.extend(t.outputs)
- a.dep_nodes.extend(tmp)
- a.dep_nodes.sort(key=lambda x:x.abspath())
- for tsk in lst:
- try:
- delattr(tsk,'cache_sig')
- except AttributeError:
- pass
- return super(fc,self).runnable_status()
-class fcprogram(ccroot.link_task):
- color='YELLOW'
- run_str='${FC} ${LINKFLAGS} ${FCLNK_SRC_F}${SRC} ${FCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FCSTLIB_MARKER} ${FCSTLIBPATH_ST:STLIBPATH} ${FCSTLIB_ST:STLIB} ${FCSHLIB_MARKER} ${FCLIBPATH_ST:LIBPATH} ${FCLIB_ST:LIB}'
- inst_to='${BINDIR}'
-class fcshlib(fcprogram):
- inst_to='${LIBDIR}'
-class fcprogram_test(fcprogram):
- def can_retrieve_cache(self):
- return False
- def runnable_status(self):
- ret=super(fcprogram_test,self).runnable_status()
- if ret==Task.SKIP_ME:
- ret=Task.RUN_ME
- return ret
- def exec_command(self,cmd,**kw):
- bld=self.generator.bld
- kw['shell']=isinstance(cmd,str)
- kw['stdout']=kw['stderr']=Utils.subprocess.PIPE
- kw['cwd']=bld.variant_dir
- bld.out=bld.err=''
- bld.to_log('command: %s\n'%cmd)
- kw['output']=0
- try:
- (bld.out,bld.err)=bld.cmd_and_log(cmd,**kw)
- except Exception ,e:
- return-1
- if bld.out:
- bld.to_log("out: %s\n"%bld.out)
- if bld.err:
- bld.to_log("err: %s\n"%bld.err)
-class fcstlib(ccroot.stlink_task):
- pass
diff --git a/waflib/Tools/fc_config.py b/waflib/Tools/fc_config.py
deleted file mode 100644
index 0130a57..0000000
--- a/waflib/Tools/fc_config.py
+++ /dev/null
@@ -1,285 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import re,shutil,os,sys,string,shlex
-from waflib.Configure import conf
-from waflib.TaskGen import feature,after_method,before_method
-from waflib import Build,Utils
-FC_FRAGMENT=' program main\n end program main\n'
-FC_FRAGMENT2=' PROGRAM MAIN\n END\n'
- at conf
-def fc_flags(conf):
- v=conf.env
- v['FC_SRC_F']=[]
- v['FC_TGT_F']=['-c','-o']
- v['FCINCPATH_ST']='-I%s'
- v['FCDEFINES_ST']='-D%s'
- if not v['LINK_FC']:v['LINK_FC']=v['FC']
- v['FCLNK_SRC_F']=[]
- v['FCLNK_TGT_F']=['-o']
- v['FCFLAGS_fcshlib']=['-fpic']
- v['LINKFLAGS_fcshlib']=['-shared']
- v['fcshlib_PATTERN']='lib%s.so'
- v['fcstlib_PATTERN']='lib%s.a'
- v['FCLIB_ST']='-l%s'
- v['FCLIBPATH_ST']='-L%s'
- v['FCSTLIB_ST']='-l%s'
- v['FCSTLIBPATH_ST']='-L%s'
- v['FCSTLIB_MARKER']='-Wl,-Bstatic'
- v['FCSHLIB_MARKER']='-Wl,-Bdynamic'
- v['SONAME_ST']='-Wl,-h,%s'
- at conf
-def fc_add_flags(conf):
- conf.add_os_flags('FCFLAGS')
- conf.add_os_flags('LDFLAGS','LINKFLAGS')
- at conf
-def check_fortran(self,*k,**kw):
- self.check_cc(fragment=FC_FRAGMENT,compile_filename='test.f',features='fc fcprogram',msg='Compiling a simple fortran app')
- at conf
-def check_fc(self,*k,**kw):
- kw['compiler']='fc'
- if not'compile_mode'in kw:
- kw['compile_mode']='fc'
- if not'type'in kw:
- kw['type']='fcprogram'
- if not'compile_filename'in kw:
- kw['compile_filename']='test.f90'
- if not'code'in kw:
- kw['code']=FC_FRAGMENT
- return self.check(*k,**kw)
- at conf
-def fortran_modifier_darwin(conf):
- v=conf.env
- v['FCFLAGS_fcshlib']=['-fPIC','-compatibility_version','1','-current_version','1']
- v['LINKFLAGS_fcshlib']=['-dynamiclib']
- v['fcshlib_PATTERN']='lib%s.dylib'
- v['FRAMEWORKPATH_ST']='-F%s'
- v['FRAMEWORK_ST']='-framework %s'
- v['LINKFLAGS_fcstlib']=[]
- v['FCSHLIB_MARKER']=''
- v['FCSTLIB_MARKER']=''
- v['SONAME_ST']=''
- at conf
-def fortran_modifier_win32(conf):
- v=conf.env
- v['fcprogram_PATTERN']=v['fcprogram_test_PATTERN']='%s.exe'
- v['fcshlib_PATTERN']='%s.dll'
- v['implib_PATTERN']='lib%s.dll.a'
- v['IMPLIB_ST']='-Wl,--out-implib,%s'
- v['FCFLAGS_fcshlib']=[]
- v.append_value('FCFLAGS_fcshlib',['-DDLL_EXPORT'])
- v.append_value('LINKFLAGS',['-Wl,--enable-auto-import'])
- at conf
-def fortran_modifier_cygwin(conf):
- fortran_modifier_win32(conf)
- v=conf.env
- v['fcshlib_PATTERN']='cyg%s.dll'
- v.append_value('LINKFLAGS_fcshlib',['-Wl,--enable-auto-image-base'])
- v['FCFLAGS_fcshlib']=[]
- at conf
-def check_fortran_dummy_main(self,*k,**kw):
- if not self.env.CC:
- self.fatal('A c compiler is required for check_fortran_dummy_main')
- lst=['MAIN__','__MAIN','_MAIN','MAIN_','MAIN']
- lst.extend([m.lower()for m in lst])
- lst.append('')
- self.start_msg('Detecting whether we need a dummy main')
- for main in lst:
- kw['fortran_main']=main
- try:
- self.check_cc(fragment='int %s() { return 0; }\n'%(main or'test'),features='c fcprogram',mandatory=True)
- if not main:
- self.env.FC_MAIN=-1
- self.end_msg('no')
- else:
- self.env.FC_MAIN=main
- self.end_msg('yes %s'%main)
- break
- except self.errors.ConfigurationError:
- pass
- else:
- self.end_msg('not found')
- self.fatal('could not detect whether fortran requires a dummy main, see the config.log')
-GCC_DRIVER_LINE=re.compile('^Driving:')
-POSIX_STATIC_EXT=re.compile('\S+\.a')
-POSIX_LIB_FLAGS=re.compile('-l\S+')
- at conf
-def is_link_verbose(self,txt):
- assert isinstance(txt,str)
- for line in txt.splitlines():
- if not GCC_DRIVER_LINE.search(line):
- if POSIX_STATIC_EXT.search(line)or POSIX_LIB_FLAGS.search(line):
- return True
- return False
- at conf
-def check_fortran_verbose_flag(self,*k,**kw):
- self.start_msg('fortran link verbose flag')
- for x in['-v','--verbose','-verbose','-V']:
- try:
- self.check_cc(features='fc fcprogram_test',fragment=FC_FRAGMENT2,compile_filename='test.f',linkflags=[x],mandatory=True)
- except self.errors.ConfigurationError:
- pass
- else:
- if self.is_link_verbose(self.test_bld.err)or self.is_link_verbose(self.test_bld.out):
- self.end_msg(x)
- break
- else:
- self.end_msg('failure')
- self.fatal('Could not obtain the fortran link verbose flag (see config.log)')
- self.env.FC_VERBOSE_FLAG=x
- return x
-LINKFLAGS_IGNORED=[r'-lang*',r'-lcrt[a-zA-Z0-9\.]*\.o',r'-lc$',r'-lSystem',r'-libmil',r'-LIST:*',r'-LNO:*']
-if os.name=='nt':
- LINKFLAGS_IGNORED.extend([r'-lfrt*',r'-luser32',r'-lkernel32',r'-ladvapi32',r'-lmsvcrt',r'-lshell32',r'-lmingw',r'-lmoldname'])
-else:
- LINKFLAGS_IGNORED.append(r'-lgcc*')
-RLINKFLAGS_IGNORED=[re.compile(f)for f in LINKFLAGS_IGNORED]
-def _match_ignore(line):
- for i in RLINKFLAGS_IGNORED:
- if i.match(line):
- return True
- return False
-def parse_fortran_link(lines):
- final_flags=[]
- for line in lines:
- if not GCC_DRIVER_LINE.match(line):
- _parse_flink_line(line,final_flags)
- return final_flags
-SPACE_OPTS=re.compile('^-[LRuYz]$')
-NOSPACE_OPTS=re.compile('^-[RL]')
-def _parse_flink_line(line,final_flags):
- lexer=shlex.shlex(line,posix=True)
- lexer.whitespace_split=True
- t=lexer.get_token()
- tmp_flags=[]
- while t:
- def parse(token):
- if _match_ignore(token):
- pass
- elif token.startswith('-lkernel32')and sys.platform=='cygwin':
- tmp_flags.append(token)
- elif SPACE_OPTS.match(token):
- t=lexer.get_token()
- if t.startswith('P,'):
- t=t[2:]
- for opt in t.split(os.pathsep):
- tmp_flags.append('-L%s'%opt)
- elif NOSPACE_OPTS.match(token):
- tmp_flags.append(token)
- elif POSIX_LIB_FLAGS.match(token):
- tmp_flags.append(token)
- else:
- pass
- t=lexer.get_token()
- return t
- t=parse(t)
- final_flags.extend(tmp_flags)
- return final_flags
- at conf
-def check_fortran_clib(self,autoadd=True,*k,**kw):
- if not self.env.FC_VERBOSE_FLAG:
- self.fatal('env.FC_VERBOSE_FLAG is not set: execute check_fortran_verbose_flag?')
- self.start_msg('Getting fortran runtime link flags')
- try:
- self.check_cc(fragment=FC_FRAGMENT2,compile_filename='test.f',features='fc fcprogram_test',linkflags=[self.env.FC_VERBOSE_FLAG])
- except Exception:
- self.end_msg(False)
- if kw.get('mandatory',True):
- conf.fatal('Could not find the c library flags')
- else:
- out=self.test_bld.err
- flags=parse_fortran_link(out.splitlines())
- self.end_msg('ok (%s)'%' '.join(flags))
- self.env.LINKFLAGS_CLIB=flags
- return flags
- return[]
-def getoutput(conf,cmd,stdin=False):
- if stdin:
- stdin=Utils.subprocess.PIPE
- else:
- stdin=None
- env=conf.env.env or None
- try:
- p=Utils.subprocess.Popen(cmd,stdin=stdin,stdout=Utils.subprocess.PIPE,stderr=Utils.subprocess.PIPE,env=env)
- if stdin:
- p.stdin.write('\n')
- out,err=p.communicate()
- except Exception:
- conf.fatal('could not determine the compiler version %r'%cmd)
- if not isinstance(out,str):
- out=out.decode(sys.stdout.encoding or'iso8859-1')
- if not isinstance(err,str):
- err=err.decode(sys.stdout.encoding or'iso8859-1')
- return(out,err)
-ROUTINES_CODE="""\
- subroutine foobar()
- return
- end
- subroutine foo_bar()
- return
- end
-"""
-MAIN_CODE="""
-void %(dummy_func_nounder)s(void);
-void %(dummy_func_under)s(void);
-int %(main_func_name)s() {
- %(dummy_func_nounder)s();
- %(dummy_func_under)s();
- return 0;
-}
-"""
- at feature('link_main_routines_func')
- at before_method('process_source')
-def link_main_routines_tg_method(self):
- def write_test_file(task):
- task.outputs[0].write(task.generator.code)
- bld=self.bld
- bld(rule=write_test_file,target='main.c',code=MAIN_CODE%self.__dict__)
- bld(rule=write_test_file,target='test.f',code=ROUTINES_CODE)
- bld(features='fc fcstlib',source='test.f',target='test')
- bld(features='c fcprogram',source='main.c',target='app',use='test')
-def mangling_schemes():
- for u in['_','']:
- for du in['','_']:
- for c in["lower","upper"]:
- yield(u,du,c)
-def mangle_name(u,du,c,name):
- return getattr(name,c)()+u+(name.find('_')!=-1 and du or'')
- at conf
-def check_fortran_mangling(self,*k,**kw):
- if not self.env.CC:
- self.fatal('A c compiler is required for link_main_routines')
- if not self.env.FC:
- self.fatal('A fortran compiler is required for link_main_routines')
- if not self.env.FC_MAIN:
- self.fatal('Checking for mangling requires self.env.FC_MAIN (execute "check_fortran_dummy_main" first?)')
- self.start_msg('Getting fortran mangling scheme')
- for(u,du,c)in mangling_schemes():
- try:
- self.check_cc(compile_filename=[],features='link_main_routines_func',msg='nomsg',errmsg='nomsg',mandatory=True,dummy_func_nounder=mangle_name(u,du,c,"foobar"),dummy_func_under=mangle_name(u,du,c,"foo_bar"),main_func_name=self.env.FC_MAIN)
- except self.errors.ConfigurationError:
- pass
- else:
- self.end_msg("ok ('%s', '%s', '%s-case')"%(u,du,c))
- self.env.FORTRAN_MANGLING=(u,du,c)
- break
- else:
- self.end_msg(False)
- self.fatal('mangler not found')
- return(u,du,c)
- at feature('pyext')
- at before_method('propagate_uselib_vars','apply_link')
-def set_lib_pat(self):
- self.env['fcshlib_PATTERN']=self.env['pyext_PATTERN']
- at conf
-def detect_openmp(self):
- for x in['-fopenmp','-openmp','-mp','-xopenmp','-omp','-qsmp=omp']:
- try:
- self.check_fc(msg='Checking for OpenMP flag %s'%x,fragment='program main\n call omp_get_num_threads()\nend program main',fcflags=x,linkflags=x,uselib_store='OPENMP')
- except self.errors.ConfigurationError:
- pass
- else:
- break
- else:
- self.fatal('Could not find OpenMP')
diff --git a/waflib/Tools/fc_scan.py b/waflib/Tools/fc_scan.py
deleted file mode 100644
index 48e06b5..0000000
--- a/waflib/Tools/fc_scan.py
+++ /dev/null
@@ -1,68 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import re
-from waflib import Utils,Task,TaskGen,Logs
-from waflib.TaskGen import feature,before_method,after_method,extension
-from waflib.Configure import conf
-INC_REGEX="""(?:^|['">]\s*;)\s*INCLUDE\s+(?:\w+_)?[<"'](.+?)(?=["'>])"""
-USE_REGEX="""(?:^|;)\s*USE(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)"""
-MOD_REGEX="""(?:^|;)\s*MODULE(?!\s*PROCEDURE)(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)"""
-re_inc=re.compile(INC_REGEX,re.I)
-re_use=re.compile(USE_REGEX,re.I)
-re_mod=re.compile(MOD_REGEX,re.I)
-class fortran_parser(object):
- def __init__(self,incpaths):
- self.seen=[]
- self.nodes=[]
- self.names=[]
- self.incpaths=incpaths
- def find_deps(self,node):
- txt=node.read()
- incs=[]
- uses=[]
- mods=[]
- for line in txt.splitlines():
- m=re_inc.search(line)
- if m:
- incs.append(m.group(1))
- m=re_use.search(line)
- if m:
- uses.append(m.group(1))
- m=re_mod.search(line)
- if m:
- mods.append(m.group(1))
- return(incs,uses,mods)
- def start(self,node):
- self.waiting=[node]
- while self.waiting:
- nd=self.waiting.pop(0)
- self.iter(nd)
- def iter(self,node):
- path=node.abspath()
- incs,uses,mods=self.find_deps(node)
- for x in incs:
- if x in self.seen:
- continue
- self.seen.append(x)
- self.tryfind_header(x)
- for x in uses:
- name="USE@%s"%x
- if not name in self.names:
- self.names.append(name)
- for x in mods:
- name="MOD@%s"%x
- if not name in self.names:
- self.names.append(name)
- def tryfind_header(self,filename):
- found=None
- for n in self.incpaths:
- found=n.find_resource(filename)
- if found:
- self.nodes.append(found)
- self.waiting.append(found)
- break
- if not found:
- if not filename in self.names:
- self.names.append(filename)
diff --git a/waflib/Tools/flex.py b/waflib/Tools/flex.py
deleted file mode 100644
index 13f6207..0000000
--- a/waflib/Tools/flex.py
+++ /dev/null
@@ -1,32 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import waflib.TaskGen,os,re
-def decide_ext(self,node):
- if'cxx'in self.features:
- return['.lex.cc']
- return['.lex.c']
-def flexfun(tsk):
- env=tsk.env
- bld=tsk.generator.bld
- wd=bld.variant_dir
- def to_list(xx):
- if isinstance(xx,str):return[xx]
- return xx
- tsk.last_cmd=lst=[]
- lst.extend(to_list(env['FLEX']))
- lst.extend(to_list(env['FLEXFLAGS']))
- inputs=[a.path_from(bld.bldnode)for a in tsk.inputs]
- if env.FLEX_MSYS:
- inputs=[x.replace(os.sep,'/')for x in inputs]
- lst.extend(inputs)
- lst=[x for x in lst if x]
- txt=bld.cmd_and_log(lst,cwd=wd,env=env.env or None,quiet=0)
- tsk.outputs[0].write(txt.replace('\r\n','\n').replace('\r','\n'))
-waflib.TaskGen.declare_chain(name='flex',rule=flexfun,ext_in='.l',decider=decide_ext,)
-def configure(conf):
- conf.find_program('flex',var='FLEX')
- conf.env.FLEXFLAGS=['-t']
- if re.search(r"\\msys\\[0-9.]+\\bin\\flex.exe$",conf.env.FLEX):
- conf.env.FLEX_MSYS=True
diff --git a/waflib/Tools/g95.py b/waflib/Tools/g95.py
deleted file mode 100644
index 9bc331a..0000000
--- a/waflib/Tools/g95.py
+++ /dev/null
@@ -1,55 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import re
-from waflib import Utils
-from waflib.Tools import fc,fc_config,fc_scan,ar
-from waflib.Configure import conf
- at conf
-def find_g95(conf):
- fc=conf.find_program('g95',var='FC')
- fc=conf.cmd_to_list(fc)
- conf.get_g95_version(fc)
- conf.env.FC_NAME='G95'
- at conf
-def g95_flags(conf):
- v=conf.env
- v['FCFLAGS_fcshlib']=['-fPIC']
- v['FORTRANMODFLAG']=['-fmod=','']
- v['FCFLAGS_DEBUG']=['-Werror']
- at conf
-def g95_modifier_win32(conf):
- fc_config.fortran_modifier_win32(conf)
- at conf
-def g95_modifier_cygwin(conf):
- fc_config.fortran_modifier_cygwin(conf)
- at conf
-def g95_modifier_darwin(conf):
- fc_config.fortran_modifier_darwin(conf)
- at conf
-def g95_modifier_platform(conf):
- dest_os=conf.env['DEST_OS']or Utils.unversioned_sys_platform()
- g95_modifier_func=getattr(conf,'g95_modifier_'+dest_os,None)
- if g95_modifier_func:
- g95_modifier_func()
- at conf
-def get_g95_version(conf,fc):
- version_re=re.compile(r"g95\s*(?P<major>\d*)\.(?P<minor>\d*)").search
- cmd=fc+['--version']
- out,err=fc_config.getoutput(conf,cmd,stdin=False)
- if out:
- match=version_re(out)
- else:
- match=version_re(err)
- if not match:
- conf.fatal('cannot determine g95 version')
- k=match.groupdict()
- conf.env['FC_VERSION']=(k['major'],k['minor'])
-def configure(conf):
- conf.find_g95()
- conf.find_ar()
- conf.fc_flags()
- conf.fc_add_flags()
- conf.g95_flags()
- conf.g95_modifier_platform()
diff --git a/waflib/Tools/gas.py b/waflib/Tools/gas.py
deleted file mode 100644
index b714ca1..0000000
--- a/waflib/Tools/gas.py
+++ /dev/null
@@ -1,12 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import waflib.Tools.asm
-from waflib.Tools import ar
-def configure(conf):
- conf.find_program(['gas','gcc'],var='AS')
- conf.env.AS_TGT_F=['-c','-o']
- conf.env.ASLNK_TGT_F=['-o']
- conf.find_ar()
- conf.load('asm')
diff --git a/waflib/Tools/gcc.py b/waflib/Tools/gcc.py
deleted file mode 100644
index adf11da..0000000
--- a/waflib/Tools/gcc.py
+++ /dev/null
@@ -1,97 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import os,sys
-from waflib import Configure,Options,Utils
-from waflib.Tools import ccroot,ar
-from waflib.Configure import conf
- at conf
-def find_gcc(conf):
- cc=conf.find_program(['gcc','cc'],var='CC')
- cc=conf.cmd_to_list(cc)
- conf.get_cc_version(cc,gcc=True)
- conf.env.CC_NAME='gcc'
- conf.env.CC=cc
- at conf
-def gcc_common_flags(conf):
- v=conf.env
- v['CC_SRC_F']=[]
- v['CC_TGT_F']=['-c','-o']
- if not v['LINK_CC']:v['LINK_CC']=v['CC']
- v['CCLNK_SRC_F']=[]
- v['CCLNK_TGT_F']=['-o']
- v['CPPPATH_ST']='-I%s'
- v['DEFINES_ST']='-D%s'
- v['LIB_ST']='-l%s'
- v['LIBPATH_ST']='-L%s'
- v['STLIB_ST']='-l%s'
- v['STLIBPATH_ST']='-L%s'
- v['RPATH_ST']='-Wl,-rpath,%s'
- v['SONAME_ST']='-Wl,-h,%s'
- v['SHLIB_MARKER']='-Wl,-Bdynamic'
- v['STLIB_MARKER']='-Wl,-Bstatic'
- v['cprogram_PATTERN']='%s'
- v['CFLAGS_cshlib']=['-fPIC']
- v['LINKFLAGS_cshlib']=['-shared']
- v['cshlib_PATTERN']='lib%s.so'
- v['LINKFLAGS_cstlib']=['-Wl,-Bstatic']
- v['cstlib_PATTERN']='lib%s.a'
- v['LINKFLAGS_MACBUNDLE']=['-bundle','-undefined','dynamic_lookup']
- v['CFLAGS_MACBUNDLE']=['-fPIC']
- v['macbundle_PATTERN']='%s.bundle'
- at conf
-def gcc_modifier_win32(conf):
- v=conf.env
- v['cprogram_PATTERN']='%s.exe'
- v['cshlib_PATTERN']='%s.dll'
- v['implib_PATTERN']='lib%s.dll.a'
- v['IMPLIB_ST']='-Wl,--out-implib,%s'
- v['CFLAGS_cshlib']=[]
- v.append_value('LINKFLAGS',['-Wl,--enable-auto-import'])
- at conf
-def gcc_modifier_cygwin(conf):
- gcc_modifier_win32(conf)
- v=conf.env
- v['cshlib_PATTERN']='cyg%s.dll'
- v.append_value('LINKFLAGS_cshlib',['-Wl,--enable-auto-image-base'])
- v['CFLAGS_cshlib']=[]
- at conf
-def gcc_modifier_darwin(conf):
- v=conf.env
- v['CFLAGS_cshlib']=['-fPIC','-compatibility_version','1','-current_version','1']
- v['LINKFLAGS_cshlib']=['-dynamiclib']
- v['cshlib_PATTERN']='lib%s.dylib'
- v['FRAMEWORKPATH_ST']='-F%s'
- v['FRAMEWORK_ST']=['-framework']
- v['ARCH_ST']=['-arch']
- v['LINKFLAGS_cstlib']=[]
- v['SHLIB_MARKER']=[]
- v['STLIB_MARKER']=[]
- v['SONAME_ST']=[]
- at conf
-def gcc_modifier_aix(conf):
- v=conf.env
- v['LINKFLAGS_cprogram']=['-Wl,-brtl']
- v['LINKFLAGS_cshlib']=['-shared','-Wl,-brtl,-bexpfull']
- v['SHLIB_MARKER']=[]
- at conf
-def gcc_modifier_hpux(conf):
- v=conf.env
- v['SHLIB_MARKER']=[]
- v['STLIB_MARKER']='-Bstatic'
- v['CFLAGS_cshlib']=['-fPIC','-DPIC']
- v['cshlib_PATTERN']='lib%s.sl'
- at conf
-def gcc_modifier_platform(conf):
- gcc_modifier_func=getattr(conf,'gcc_modifier_'+conf.env.DEST_OS,None)
- if gcc_modifier_func:
- gcc_modifier_func()
-def configure(conf):
- conf.find_gcc()
- conf.find_ar()
- conf.gcc_common_flags()
- conf.gcc_modifier_platform()
- conf.cc_load_tools()
- conf.cc_add_flags()
- conf.link_add_flags()
diff --git a/waflib/Tools/gdc.py b/waflib/Tools/gdc.py
deleted file mode 100644
index da966ec..0000000
--- a/waflib/Tools/gdc.py
+++ /dev/null
@@ -1,36 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import sys
-from waflib.Tools import ar,d
-from waflib.Configure import conf
- at conf
-def find_gdc(conf):
- conf.find_program('gdc',var='D')
- out=conf.cmd_and_log([conf.env.D,'--version'])
- if out.find("gdc ")==-1:
- conf.fatal("detected compiler is not gdc")
- at conf
-def common_flags_gdc(conf):
- v=conf.env
- v['DFLAGS']=[]
- v['D_SRC_F']=['-c']
- v['D_TGT_F']='-o%s'
- v['D_LINKER']=v['D']
- v['DLNK_SRC_F']=''
- v['DLNK_TGT_F']='-o%s'
- v['DINC_ST']='-I%s'
- v['DSHLIB_MARKER']=v['DSTLIB_MARKER']=''
- v['DSTLIB_ST']=v['DSHLIB_ST']='-l%s'
- v['DSTLIBPATH_ST']=v['DLIBPATH_ST']='-L%s'
- v['LINKFLAGS_dshlib']=['-shared']
- v['DHEADER_ext']='.di'
- v.DFLAGS_d_with_header='-fintfc'
- v['D_HDR_F']='-fintfc-file=%s'
-def configure(conf):
- conf.find_gdc()
- conf.load('ar')
- conf.load('d')
- conf.common_flags_gdc()
- conf.d_platform_flags()
diff --git a/waflib/Tools/gfortran.py b/waflib/Tools/gfortran.py
deleted file mode 100644
index 854a93d..0000000
--- a/waflib/Tools/gfortran.py
+++ /dev/null
@@ -1,69 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import re
-from waflib import Utils
-from waflib.Tools import fc,fc_config,fc_scan,ar
-from waflib.Configure import conf
- at conf
-def find_gfortran(conf):
- fc=conf.find_program(['gfortran','g77'],var='FC')
- fc=conf.cmd_to_list(fc)
- conf.get_gfortran_version(fc)
- conf.env.FC_NAME='GFORTRAN'
- at conf
-def gfortran_flags(conf):
- v=conf.env
- v['FCFLAGS_fcshlib']=['-fPIC']
- v['FORTRANMODFLAG']=['-J','']
- v['FCFLAGS_DEBUG']=['-Werror']
- at conf
-def gfortran_modifier_win32(conf):
- fc_config.fortran_modifier_win32(conf)
- at conf
-def gfortran_modifier_cygwin(conf):
- fc_config.fortran_modifier_cygwin(conf)
- at conf
-def gfortran_modifier_darwin(conf):
- fc_config.fortran_modifier_darwin(conf)
- at conf
-def gfortran_modifier_platform(conf):
- dest_os=conf.env['DEST_OS']or Utils.unversioned_sys_platform()
- gfortran_modifier_func=getattr(conf,'gfortran_modifier_'+dest_os,None)
- if gfortran_modifier_func:
- gfortran_modifier_func()
- at conf
-def get_gfortran_version(conf,fc):
- version_re=re.compile(r"GNU\s*Fortran",re.I).search
- cmd=fc+['--version']
- out,err=fc_config.getoutput(conf,cmd,stdin=False)
- if out:match=version_re(out)
- else:match=version_re(err)
- if not match:
- conf.fatal('Could not determine the compiler type')
- cmd=fc+['-dM','-E','-']
- out,err=fc_config.getoutput(conf,cmd,stdin=True)
- if out.find('__GNUC__')<0:
- conf.fatal('Could not determine the compiler type')
- k={}
- out=out.split('\n')
- import shlex
- for line in out:
- lst=shlex.split(line)
- if len(lst)>2:
- key=lst[1]
- val=lst[2]
- k[key]=val
- def isD(var):
- return var in k
- def isT(var):
- return var in k and k[var]!='0'
- conf.env['FC_VERSION']=(k['__GNUC__'],k['__GNUC_MINOR__'],k['__GNUC_PATCHLEVEL__'])
-def configure(conf):
- conf.find_gfortran()
- conf.find_ar()
- conf.fc_flags()
- conf.fc_add_flags()
- conf.gfortran_flags()
- conf.gfortran_modifier_platform()
diff --git a/waflib/Tools/glib2.py b/waflib/Tools/glib2.py
deleted file mode 100644
index 1d75510..0000000
--- a/waflib/Tools/glib2.py
+++ /dev/null
@@ -1,173 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import os
-from waflib import Task,Utils,Options,Errors,Logs
-from waflib.TaskGen import taskgen_method,before_method,after_method,feature
- at taskgen_method
-def add_marshal_file(self,filename,prefix):
- if not hasattr(self,'marshal_list'):
- self.marshal_list=[]
- self.meths.append('process_marshal')
- self.marshal_list.append((filename,prefix))
- at before_method('process_source')
-def process_marshal(self):
- for f,prefix in getattr(self,'marshal_list',[]):
- node=self.path.find_resource(f)
- if not node:
- raise Errors.WafError('file not found %r'%f)
- h_node=node.change_ext('.h')
- c_node=node.change_ext('.c')
- task=self.create_task('glib_genmarshal',node,[h_node,c_node])
- task.env.GLIB_GENMARSHAL_PREFIX=prefix
- self.source=self.to_nodes(getattr(self,'source',[]))
- self.source.append(c_node)
-class glib_genmarshal(Task.Task):
- def run(self):
- bld=self.inputs[0].__class__.ctx
- get=self.env.get_flat
- cmd1="%s %s --prefix=%s --header > %s"%(get('GLIB_GENMARSHAL'),self.inputs[0].srcpath(),get('GLIB_GENMARSHAL_PREFIX'),self.outputs[0].abspath())
- ret=bld.exec_command(cmd1)
- if ret:return ret
- c='''#include "%s"\n'''%self.outputs[0].name
- self.outputs[1].write(c)
- cmd2="%s %s --prefix=%s --body >> %s"%(get('GLIB_GENMARSHAL'),self.inputs[0].srcpath(),get('GLIB_GENMARSHAL_PREFIX'),self.outputs[1].abspath())
- return bld.exec_command(cmd2)
- vars=['GLIB_GENMARSHAL_PREFIX','GLIB_GENMARSHAL']
- color='BLUE'
- ext_out=['.h']
- at taskgen_method
-def add_enums_from_template(self,source='',target='',template='',comments=''):
- if not hasattr(self,'enums_list'):
- self.enums_list=[]
- self.meths.append('process_enums')
- self.enums_list.append({'source':source,'target':target,'template':template,'file-head':'','file-prod':'','file-tail':'','enum-prod':'','value-head':'','value-prod':'','value-tail':'','comments':comments})
- at taskgen_method
-def add_enums(self,source='',target='',file_head='',file_prod='',file_tail='',enum_prod='',value_head='',value_prod='',value_tail='',comments=''):
- if not hasattr(self,'enums_list'):
- self.enums_list=[]
- self.meths.append('process_enums')
- self.enums_list.append({'source':source,'template':'','target':target,'file-head':file_head,'file-prod':file_prod,'file-tail':file_tail,'enum-prod':enum_prod,'value-head':value_head,'value-prod':value_prod,'value-tail':value_tail,'comments':comments})
- at before_method('process_source')
-def process_enums(self):
- for enum in getattr(self,'enums_list',[]):
- task=self.create_task('glib_mkenums')
- env=task.env
- inputs=[]
- source_list=self.to_list(enum['source'])
- if not source_list:
- raise Errors.WafError('missing source '+str(enum))
- source_list=[self.path.find_resource(k)for k in source_list]
- inputs+=source_list
- env['GLIB_MKENUMS_SOURCE']=[k.abspath()for k in source_list]
- if not enum['target']:
- raise Errors.WafError('missing target '+str(enum))
- tgt_node=self.path.find_or_declare(enum['target'])
- if tgt_node.name.endswith('.c'):
- self.source.append(tgt_node)
- env['GLIB_MKENUMS_TARGET']=tgt_node.abspath()
- options=[]
- if enum['template']:
- template_node=self.path.find_resource(enum['template'])
- options.append('--template %s'%(template_node.abspath()))
- inputs.append(template_node)
- params={'file-head':'--fhead','file-prod':'--fprod','file-tail':'--ftail','enum-prod':'--eprod','value-head':'--vhead','value-prod':'--vprod','value-tail':'--vtail','comments':'--comments'}
- for param,option in params.items():
- if enum[param]:
- options.append('%s %r'%(option,enum[param]))
- env['GLIB_MKENUMS_OPTIONS']=' '.join(options)
- task.set_inputs(inputs)
- task.set_outputs(tgt_node)
-class glib_mkenums(Task.Task):
- run_str='${GLIB_MKENUMS} ${GLIB_MKENUMS_OPTIONS} ${GLIB_MKENUMS_SOURCE} > ${GLIB_MKENUMS_TARGET}'
- color='PINK'
- ext_out=['.h']
- at taskgen_method
-def add_settings_schemas(self,filename_list):
- if not hasattr(self,'settings_schema_files'):
- self.settings_schema_files=[]
- if not isinstance(filename_list,list):
- filename_list=[filename_list]
- self.settings_schema_files.extend(filename_list)
- at taskgen_method
-def add_settings_enums(self,namespace,filename_list):
- if hasattr(self,'settings_enum_namespace'):
- raise Errors.WafError("Tried to add gsettings enums to '%s' more than once"%self.name)
- self.settings_enum_namespace=namespace
- if type(filename_list)!='list':
- filename_list=[filename_list]
- self.settings_enum_files=filename_list
-def r_change_ext(self,ext):
- name=self.name
- k=name.rfind('.')
- if k>=0:
- name=name[:k]+ext
- else:
- name=name+ext
- return self.parent.find_or_declare([name])
- at feature('glib2')
-def process_settings(self):
- enums_tgt_node=[]
- install_files=[]
- settings_schema_files=getattr(self,'settings_schema_files',[])
- if settings_schema_files and not self.env['GLIB_COMPILE_SCHEMAS']:
- raise Errors.WafError("Unable to process GSettings schemas - glib-compile-schemas was not found during configure")
- if hasattr(self,'settings_enum_files'):
- enums_task=self.create_task('glib_mkenums')
- source_list=self.settings_enum_files
- source_list=[self.path.find_resource(k)for k in source_list]
- enums_task.set_inputs(source_list)
- enums_task.env['GLIB_MKENUMS_SOURCE']=[k.abspath()for k in source_list]
- target=self.settings_enum_namespace+'.enums.xml'
- tgt_node=self.path.find_or_declare(target)
- enums_task.set_outputs(tgt_node)
- enums_task.env['GLIB_MKENUMS_TARGET']=tgt_node.abspath()
- enums_tgt_node=[tgt_node]
- install_files.append(tgt_node)
- options='--comments "<!-- @comment@ -->" --fhead "<schemalist>" --vhead " <@type@ id=\\"%s. at EnumName@\\">" --vprod " <value nick=\\"@valuenick@\\" value=\\"@valuenum@\\"/>" --vtail " </@type@>" --ftail "</schemalist>" '%(self.settings_enum_namespace)
- enums_task.env['GLIB_MKENUMS_OPTIONS']=options
- for schema in settings_schema_files:
- schema_task=self.create_task('glib_validate_schema')
- schema_node=self.path.find_resource(schema)
- if not schema_node:
- raise Errors.WafError("Cannot find the schema file '%s'"%schema)
- install_files.append(schema_node)
- source_list=enums_tgt_node+[schema_node]
- schema_task.set_inputs(source_list)
- schema_task.env['GLIB_COMPILE_SCHEMAS_OPTIONS']=[("--schema-file="+k.abspath())for k in source_list]
- target_node=r_change_ext(schema_node,'.xml.valid')
- schema_task.set_outputs(target_node)
- schema_task.env['GLIB_VALIDATE_SCHEMA_OUTPUT']=target_node.abspath()
- def compile_schemas_callback(bld):
- if not bld.is_install:return
- Logs.pprint('YELLOW','Updating GSettings schema cache')
- command=Utils.subst_vars("${GLIB_COMPILE_SCHEMAS} ${GSETTINGSSCHEMADIR}",bld.env)
- ret=self.bld.exec_command(command)
- if self.bld.is_install:
- if not self.env['GSETTINGSSCHEMADIR']:
- raise Errors.WafError('GSETTINGSSCHEMADIR not defined (should have been set up automatically during configure)')
- if install_files:
- self.bld.install_files(self.env['GSETTINGSSCHEMADIR'],install_files)
- if not hasattr(self.bld,'_compile_schemas_registered'):
- self.bld.add_post_fun(compile_schemas_callback)
- self.bld._compile_schemas_registered=True
-class glib_validate_schema(Task.Task):
- run_str='rm -f ${GLIB_VALIDATE_SCHEMA_OUTPUT} && ${GLIB_COMPILE_SCHEMAS} --dry-run ${GLIB_COMPILE_SCHEMAS_OPTIONS} && touch ${GLIB_VALIDATE_SCHEMA_OUTPUT}'
- color='PINK'
-def configure(conf):
- conf.find_program('glib-genmarshal',var='GLIB_GENMARSHAL')
- conf.find_perl_program('glib-mkenums',var='GLIB_MKENUMS')
- conf.find_program('glib-compile-schemas',var='GLIB_COMPILE_SCHEMAS',mandatory=False)
- def getstr(varname):
- return getattr(Options.options,varname,getattr(conf.env,varname,''))
- gsettingsschemadir=getstr('GSETTINGSSCHEMADIR')
- if not gsettingsschemadir:
- datadir=getstr('DATADIR')
- if not datadir:
- prefix=conf.env['PREFIX']
- datadir=os.path.join(prefix,'share')
- gsettingsschemadir=os.path.join(datadir,'glib-2.0','schemas')
- conf.env['GSETTINGSSCHEMADIR']=gsettingsschemadir
-def options(opt):
- opt.add_option('--gsettingsschemadir',help='GSettings schema location [Default: ${datadir}/glib-2.0/schemas]',default='',dest='GSETTINGSSCHEMADIR')
diff --git a/waflib/Tools/gnu_dirs.py b/waflib/Tools/gnu_dirs.py
deleted file mode 100644
index 9c8a304..0000000
--- a/waflib/Tools/gnu_dirs.py
+++ /dev/null
@@ -1,65 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import os
-from waflib import Utils,Options,Context
-_options=[x.split(', ')for x in'''
-bindir, user executables, ${EXEC_PREFIX}/bin
-sbindir, system admin executables, ${EXEC_PREFIX}/sbin
-libexecdir, program executables, ${EXEC_PREFIX}/libexec
-sysconfdir, read-only single-machine data, ${PREFIX}/etc
-sharedstatedir, modifiable architecture-independent data, ${PREFIX}/com
-localstatedir, modifiable single-machine data, ${PREFIX}/var
-libdir, object code libraries, ${EXEC_PREFIX}/lib
-includedir, C header files, ${PREFIX}/include
-oldincludedir, C header files for non-gcc, /usr/include
-datarootdir, read-only arch.-independent data root, ${PREFIX}/share
-datadir, read-only architecture-independent data, ${DATAROOTDIR}
-infodir, info documentation, ${DATAROOTDIR}/info
-localedir, locale-dependent data, ${DATAROOTDIR}/locale
-mandir, man documentation, ${DATAROOTDIR}/man
-docdir, documentation root, ${DATAROOTDIR}/doc/${PACKAGE}
-htmldir, html documentation, ${DOCDIR}
-dvidir, dvi documentation, ${DOCDIR}
-pdfdir, pdf documentation, ${DOCDIR}
-psdir, ps documentation, ${DOCDIR}
-'''.split('\n')if x]
-def configure(conf):
- def get_param(varname,default):
- return getattr(Options.options,varname,'')or default
- env=conf.env
- env.LIBDIR=env.BINDIR=[]
- env.EXEC_PREFIX=get_param('EXEC_PREFIX',env.PREFIX)
- env.PACKAGE=getattr(Context.g_module,'APPNAME',None)or env.PACKAGE
- complete=False
- iter=0
- while not complete and iter<len(_options)+1:
- iter+=1
- complete=True
- for name,help,default in _options:
- name=name.upper()
- if not env[name]:
- try:
- env[name]=Utils.subst_vars(get_param(name,default).replace('/',os.sep),env)
- except TypeError:
- complete=False
- if not complete:
- lst=[name for name,_,_ in _options if not env[name.upper()]]
- raise conf.errors.WafError('Variable substitution failure %r'%lst)
-def options(opt):
- inst_dir=opt.add_option_group('Installation directories','By default, "waf install" will put the files in\
- "/usr/local/bin", "/usr/local/lib" etc. An installation prefix other\
- than "/usr/local" can be given using "--prefix", for example "--prefix=$HOME"')
- for k in('--prefix','--destdir'):
- option=opt.parser.get_option(k)
- if option:
- opt.parser.remove_option(k)
- inst_dir.add_option(option)
- inst_dir.add_option('--exec-prefix',help='installation prefix [Default: ${PREFIX}]',default='',dest='EXEC_PREFIX')
- dirs_options=opt.add_option_group('Pre-defined installation directories','')
- for name,help,default in _options:
- option_name='--'+name
- str_default=default
- str_help='%s [Default: %s]'%(help,str_default)
- dirs_options.add_option(option_name,help=str_help,default='',dest=name.upper())
diff --git a/waflib/Tools/gxx.py b/waflib/Tools/gxx.py
deleted file mode 100644
index 8257017..0000000
--- a/waflib/Tools/gxx.py
+++ /dev/null
@@ -1,97 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import os,sys
-from waflib import Configure,Options,Utils
-from waflib.Tools import ccroot,ar
-from waflib.Configure import conf
- at conf
-def find_gxx(conf):
- cxx=conf.find_program(['g++','c++'],var='CXX')
- cxx=conf.cmd_to_list(cxx)
- conf.get_cc_version(cxx,gcc=True)
- conf.env.CXX_NAME='gcc'
- conf.env.CXX=cxx
- at conf
-def gxx_common_flags(conf):
- v=conf.env
- v['CXX_SRC_F']=[]
- v['CXX_TGT_F']=['-c','-o']
- if not v['LINK_CXX']:v['LINK_CXX']=v['CXX']
- v['CXXLNK_SRC_F']=[]
- v['CXXLNK_TGT_F']=['-o']
- v['CPPPATH_ST']='-I%s'
- v['DEFINES_ST']='-D%s'
- v['LIB_ST']='-l%s'
- v['LIBPATH_ST']='-L%s'
- v['STLIB_ST']='-l%s'
- v['STLIBPATH_ST']='-L%s'
- v['RPATH_ST']='-Wl,-rpath,%s'
- v['SONAME_ST']='-Wl,-h,%s'
- v['SHLIB_MARKER']='-Wl,-Bdynamic'
- v['STLIB_MARKER']='-Wl,-Bstatic'
- v['cxxprogram_PATTERN']='%s'
- v['CXXFLAGS_cxxshlib']=['-fPIC']
- v['LINKFLAGS_cxxshlib']=['-shared']
- v['cxxshlib_PATTERN']='lib%s.so'
- v['LINKFLAGS_cxxstlib']=['-Wl,-Bstatic']
- v['cxxstlib_PATTERN']='lib%s.a'
- v['LINKFLAGS_MACBUNDLE']=['-bundle','-undefined','dynamic_lookup']
- v['CXXFLAGS_MACBUNDLE']=['-fPIC']
- v['macbundle_PATTERN']='%s.bundle'
- at conf
-def gxx_modifier_win32(conf):
- v=conf.env
- v['cxxprogram_PATTERN']='%s.exe'
- v['cxxshlib_PATTERN']='%s.dll'
- v['implib_PATTERN']='lib%s.dll.a'
- v['IMPLIB_ST']='-Wl,--out-implib,%s'
- v['CXXFLAGS_cxxshlib']=[]
- v.append_value('LINKFLAGS',['-Wl,--enable-auto-import'])
- at conf
-def gxx_modifier_cygwin(conf):
- gxx_modifier_win32(conf)
- v=conf.env
- v['cxxshlib_PATTERN']='cyg%s.dll'
- v.append_value('LINKFLAGS_cxxshlib',['-Wl,--enable-auto-image-base'])
- v['CXXFLAGS_cxxshlib']=[]
- at conf
-def gxx_modifier_darwin(conf):
- v=conf.env
- v['CXXFLAGS_cxxshlib']=['-fPIC','-compatibility_version','1','-current_version','1']
- v['LINKFLAGS_cxxshlib']=['-dynamiclib']
- v['cxxshlib_PATTERN']='lib%s.dylib'
- v['FRAMEWORKPATH_ST']='-F%s'
- v['FRAMEWORK_ST']=['-framework']
- v['ARCH_ST']=['-arch']
- v['LINKFLAGS_cxxstlib']=[]
- v['SHLIB_MARKER']=[]
- v['STLIB_MARKER']=[]
- v['SONAME_ST']=[]
- at conf
-def gxx_modifier_aix(conf):
- v=conf.env
- v['LINKFLAGS_cxxprogram']=['-Wl,-brtl']
- v['LINKFLAGS_cxxshlib']=['-shared','-Wl,-brtl,-bexpfull']
- v['SHLIB_MARKER']=[]
- at conf
-def gxx_modifier_hpux(conf):
- v=conf.env
- v['SHLIB_MARKER']=[]
- v['STLIB_MARKER']='-Bstatic'
- v['CFLAGS_cxxshlib']=['-fPIC','-DPIC']
- v['cxxshlib_PATTERN']='lib%s.sl'
- at conf
-def gxx_modifier_platform(conf):
- gxx_modifier_func=getattr(conf,'gxx_modifier_'+conf.env.DEST_OS,None)
- if gxx_modifier_func:
- gxx_modifier_func()
-def configure(conf):
- conf.find_gxx()
- conf.find_ar()
- conf.gxx_common_flags()
- conf.gxx_modifier_platform()
- conf.cxx_load_tools()
- conf.cxx_add_flags()
- conf.link_add_flags()
diff --git a/waflib/Tools/icc.py b/waflib/Tools/icc.py
deleted file mode 100644
index 7c75e18..0000000
--- a/waflib/Tools/icc.py
+++ /dev/null
@@ -1,30 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import os,sys
-from waflib.Tools import ccroot,ar,gcc
-from waflib.Configure import conf
- at conf
-def find_icc(conf):
- if sys.platform=='cygwin':
- conf.fatal('The Intel compiler does not work on Cygwin')
- v=conf.env
- cc=None
- if v['CC']:cc=v['CC']
- elif'CC'in conf.environ:cc=conf.environ['CC']
- if not cc:cc=conf.find_program('icc',var='CC')
- if not cc:cc=conf.find_program('ICL',var='CC')
- if not cc:conf.fatal('Intel C Compiler (icc) was not found')
- cc=conf.cmd_to_list(cc)
- conf.get_cc_version(cc,icc=True)
- v['CC']=cc
- v['CC_NAME']='icc'
-def configure(conf):
- conf.find_icc()
- conf.find_ar()
- conf.gcc_common_flags()
- conf.gcc_modifier_platform()
- conf.cc_load_tools()
- conf.cc_add_flags()
- conf.link_add_flags()
diff --git a/waflib/Tools/icpc.py b/waflib/Tools/icpc.py
deleted file mode 100644
index 14a5325..0000000
--- a/waflib/Tools/icpc.py
+++ /dev/null
@@ -1,29 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import os,sys
-from waflib.Tools import ccroot,ar,gxx
-from waflib.Configure import conf
- at conf
-def find_icpc(conf):
- if sys.platform=='cygwin':
- conf.fatal('The Intel compiler does not work on Cygwin')
- v=conf.env
- cxx=None
- if v['CXX']:cxx=v['CXX']
- elif'CXX'in conf.environ:cxx=conf.environ['CXX']
- if not cxx:cxx=conf.find_program('icpc',var='CXX')
- if not cxx:conf.fatal('Intel C++ Compiler (icpc) was not found')
- cxx=conf.cmd_to_list(cxx)
- conf.get_cc_version(cxx,icc=True)
- v['CXX']=cxx
- v['CXX_NAME']='icc'
-def configure(conf):
- conf.find_icpc()
- conf.find_ar()
- conf.gxx_common_flags()
- conf.gxx_modifier_platform()
- conf.cxx_load_tools()
- conf.cxx_add_flags()
- conf.link_add_flags()
diff --git a/waflib/Tools/ifort.py b/waflib/Tools/ifort.py
deleted file mode 100644
index a9f2528..0000000
--- a/waflib/Tools/ifort.py
+++ /dev/null
@@ -1,49 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import re
-from waflib import Utils
-from waflib.Tools import fc,fc_config,fc_scan,ar
-from waflib.Configure import conf
- at conf
-def find_ifort(conf):
- fc=conf.find_program('ifort',var='FC')
- fc=conf.cmd_to_list(fc)
- conf.get_ifort_version(fc)
- conf.env.FC_NAME='IFORT'
- at conf
-def ifort_modifier_cygwin(conf):
- raise NotImplementedError("Ifort on cygwin not yet implemented")
- at conf
-def ifort_modifier_win32(conf):
- fc_config.fortran_modifier_win32(conf)
- at conf
-def ifort_modifier_darwin(conf):
- fc_config.fortran_modifier_darwin(conf)
- at conf
-def ifort_modifier_platform(conf):
- dest_os=conf.env['DEST_OS']or Utils.unversioned_sys_platform()
- ifort_modifier_func=getattr(conf,'ifort_modifier_'+dest_os,None)
- if ifort_modifier_func:
- ifort_modifier_func()
- at conf
-def get_ifort_version(conf,fc):
- version_re=re.compile(r"ifort\s*\(IFORT\)\s*(?P<major>\d*)\.(?P<minor>\d*)",re.I).search
- cmd=fc+['--version']
- out,err=fc_config.getoutput(conf,cmd,stdin=False)
- if out:
- match=version_re(out)
- else:
- match=version_re(err)
- if not match:
- conf.fatal('cannot determine ifort version.')
- k=match.groupdict()
- conf.env['FC_VERSION']=(k['major'],k['minor'])
-def configure(conf):
- conf.find_ifort()
- conf.find_program('xiar',var='AR')
- conf.env.ARFLAGS='rcs'
- conf.fc_flags()
- conf.fc_add_flags()
- conf.ifort_modifier_platform()
diff --git a/waflib/Tools/intltool.py b/waflib/Tools/intltool.py
deleted file mode 100644
index d558674..0000000
--- a/waflib/Tools/intltool.py
+++ /dev/null
@@ -1,77 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import os,re
-from waflib import Configure,TaskGen,Task,Utils,Runner,Options,Build,Logs
-import waflib.Tools.ccroot
-from waflib.TaskGen import feature,before_method
-from waflib.Logs import error
- at before_method('process_source')
- at feature('intltool_in')
-def apply_intltool_in_f(self):
- try:self.meths.remove('process_source')
- except ValueError:pass
- if not self.env.LOCALEDIR:
- self.env.LOCALEDIR=self.env.PREFIX+'/share/locale'
- for i in self.to_list(self.source):
- node=self.path.find_resource(i)
- podir=getattr(self,'podir','po')
- podirnode=self.path.find_dir(podir)
- if not podirnode:
- error("could not find the podir %r"%podir)
- continue
- cache=getattr(self,'intlcache','.intlcache')
- self.env['INTLCACHE']=os.path.join(self.path.bldpath(),podir,cache)
- self.env['INTLPODIR']=podirnode.bldpath()
- self.env['INTLFLAGS']=getattr(self,'flags',['-q','-u','-c'])
- task=self.create_task('intltool',node,node.change_ext(''))
- inst=getattr(self,'install_path','${LOCALEDIR}')
- if inst:
- self.bld.install_files(inst,task.outputs)
- at feature('intltool_po')
-def apply_intltool_po(self):
- try:self.meths.remove('process_source')
- except ValueError:pass
- if not self.env.LOCALEDIR:
- self.env.LOCALEDIR=self.env.PREFIX+'/share/locale'
- appname=getattr(self,'appname','set_your_app_name')
- podir=getattr(self,'podir','')
- inst=getattr(self,'install_path','${LOCALEDIR}')
- linguas=self.path.find_node(os.path.join(podir,'LINGUAS'))
- if linguas:
- file=open(linguas.abspath())
- langs=[]
- for line in file.readlines():
- if not line.startswith('#'):
- langs+=line.split()
- file.close()
- re_linguas=re.compile('[-a-zA-Z_ at .]+')
- for lang in langs:
- if re_linguas.match(lang):
- node=self.path.find_resource(os.path.join(podir,re_linguas.match(lang).group()+'.po'))
- task=self.create_task('po',node,node.change_ext('.mo'))
- if inst:
- filename=task.outputs[0].name
- (langname,ext)=os.path.splitext(filename)
- inst_file=inst+os.sep+langname+os.sep+'LC_MESSAGES'+os.sep+appname+'.mo'
- self.bld.install_as(inst_file,task.outputs[0],chmod=getattr(self,'chmod',Utils.O644),env=task.env)
- else:
- Logs.pprint('RED',"Error no LINGUAS file found in po directory")
-class po(Task.Task):
- run_str='${MSGFMT} -o ${TGT} ${SRC}'
- color='BLUE'
-class intltool(Task.Task):
- run_str='${INTLTOOL} ${INTLFLAGS} ${INTLCACHE} ${INTLPODIR} ${SRC} ${TGT}'
- color='BLUE'
-def configure(conf):
- conf.find_program('msgfmt',var='MSGFMT')
- conf.find_perl_program('intltool-merge',var='INTLTOOL')
- prefix=conf.env.PREFIX
- datadir=conf.env.DATADIR
- if not datadir:
- datadir=os.path.join(prefix,'share')
- conf.define('LOCALEDIR',os.path.join(datadir,'locale').replace('\\','\\\\'))
- conf.define('DATADIR',datadir.replace('\\','\\\\'))
- if conf.env.CC or conf.env.CXX:
- conf.check(header_name='locale.h')
diff --git a/waflib/Tools/irixcc.py b/waflib/Tools/irixcc.py
deleted file mode 100644
index 8dbdfca..0000000
--- a/waflib/Tools/irixcc.py
+++ /dev/null
@@ -1,48 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import os
-from waflib import Utils
-from waflib.Tools import ccroot,ar
-from waflib.Configure import conf
- at conf
-def find_irixcc(conf):
- v=conf.env
- cc=None
- if v['CC']:cc=v['CC']
- elif'CC'in conf.environ:cc=conf.environ['CC']
- if not cc:cc=conf.find_program('cc',var='CC')
- if not cc:conf.fatal('irixcc was not found')
- cc=conf.cmd_to_list(cc)
- try:
- conf.cmd_and_log(cc+['-version'])
- except Exception:
- conf.fatal('%r -version could not be executed'%cc)
- v['CC']=cc
- v['CC_NAME']='irix'
- at conf
-def irixcc_common_flags(conf):
- v=conf.env
- v['CC_SRC_F']=''
- v['CC_TGT_F']=['-c','-o']
- v['CPPPATH_ST']='-I%s'
- v['DEFINES_ST']='-D%s'
- if not v['LINK_CC']:v['LINK_CC']=v['CC']
- v['CCLNK_SRC_F']=''
- v['CCLNK_TGT_F']=['-o']
- v['LIB_ST']='-l%s'
- v['LIBPATH_ST']='-L%s'
- v['STLIB_ST']='-l%s'
- v['STLIBPATH_ST']='-L%s'
- v['cprogram_PATTERN']='%s'
- v['cshlib_PATTERN']='lib%s.so'
- v['cstlib_PATTERN']='lib%s.a'
-def configure(conf):
- conf.find_irixcc()
- conf.find_cpp()
- conf.find_ar()
- conf.irixcc_common_flags()
- conf.cc_load_tools()
- conf.cc_add_flags()
- conf.link_add_flags()
diff --git a/waflib/Tools/javaw.py b/waflib/Tools/javaw.py
deleted file mode 100644
index 5750f40..0000000
--- a/waflib/Tools/javaw.py
+++ /dev/null
@@ -1,311 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import os,re,tempfile,shutil
-from waflib import TaskGen,Task,Utils,Options,Build,Errors,Node,Logs
-from waflib.Configure import conf
-from waflib.TaskGen import feature,before_method,after_method
-from waflib.Tools import ccroot
-ccroot.USELIB_VARS['javac']=set(['CLASSPATH','JAVACFLAGS'])
-SOURCE_RE='**/*.java'
-JAR_RE='**/*'
-class_check_source='''
-public class Test {
- public static void main(String[] argv) {
- Class lib;
- if (argv.length < 1) {
- System.err.println("Missing argument");
- System.exit(77);
- }
- try {
- lib = Class.forName(argv[0]);
- } catch (ClassNotFoundException e) {
- System.err.println("ClassNotFoundException");
- System.exit(1);
- }
- lib = null;
- System.exit(0);
- }
-}
-'''
- at feature('javac')
- at before_method('process_source')
-def apply_java(self):
- Utils.def_attrs(self,jarname='',classpath='',sourcepath='.',srcdir='.',jar_mf_attributes={},jar_mf_classpath=[])
- nodes_lst=[]
- outdir=getattr(self,'outdir',None)
- if outdir:
- if not isinstance(outdir,Node.Node):
- outdir=self.path.get_bld().make_node(self.outdir)
- else:
- outdir=self.path.get_bld()
- outdir.mkdir()
- self.outdir=outdir
- self.env['OUTDIR']=outdir.abspath()
- self.javac_task=tsk=self.create_task('javac')
- tmp=[]
- srcdir=getattr(self,'srcdir','')
- if isinstance(srcdir,Node.Node):
- srcdir=[srcdir]
- for x in Utils.to_list(srcdir):
- if isinstance(x,Node.Node):
- y=x
- else:
- y=self.path.find_dir(x)
- if not y:
- self.bld.fatal('Could not find the folder %s from %s'%(x,self.path))
- tmp.append(y)
- tsk.srcdir=tmp
- if getattr(self,'compat',None):
- tsk.env.append_value('JAVACFLAGS',['-source',self.compat])
- if hasattr(self,'sourcepath'):
- fold=[isinstance(x,Node.Node)and x or self.path.find_dir(x)for x in self.to_list(self.sourcepath)]
- names=os.pathsep.join([x.srcpath()for x in fold])
- else:
- names=[x.srcpath()for x in tsk.srcdir]
- if names:
- tsk.env.append_value('JAVACFLAGS',['-sourcepath',names])
- at feature('javac')
- at after_method('apply_java')
-def use_javac_files(self):
- lst=[]
- self.uselib=self.to_list(getattr(self,'uselib',[]))
- names=self.to_list(getattr(self,'use',[]))
- get=self.bld.get_tgen_by_name
- for x in names:
- try:
- y=get(x)
- except Exception:
- self.uselib.append(x)
- else:
- y.post()
- lst.append(y.jar_task.outputs[0].abspath())
- self.javac_task.set_run_after(y.jar_task)
- if lst:
- self.env.append_value('CLASSPATH',lst)
- at feature('javac')
- at after_method('apply_java','propagate_uselib_vars','use_javac_files')
-def set_classpath(self):
- self.env.append_value('CLASSPATH',getattr(self,'classpath',[]))
- for x in self.tasks:
- x.env.CLASSPATH=os.pathsep.join(self.env.CLASSPATH)+os.pathsep
- at feature('jar')
- at after_method('apply_java','use_javac_files')
- at before_method('process_source')
-def jar_files(self):
- destfile=getattr(self,'destfile','test.jar')
- jaropts=getattr(self,'jaropts',[])
- manifest=getattr(self,'manifest',None)
- basedir=getattr(self,'basedir',None)
- if basedir:
- if not isinstance(self.basedir,Node.Node):
- basedir=self.path.get_bld().make_node(basedir)
- else:
- basedir=self.path.get_bld()
- if not basedir:
- self.bld.fatal('Could not find the basedir %r for %r'%(self.basedir,self))
- self.jar_task=tsk=self.create_task('jar_create')
- if manifest:
- jarcreate=getattr(self,'jarcreate','cfm')
- node=self.path.find_node(manifest)
- tsk.dep_nodes.append(node)
- jaropts.insert(0,node.abspath())
- else:
- jarcreate=getattr(self,'jarcreate','cf')
- if not isinstance(destfile,Node.Node):
- destfile=self.path.find_or_declare(destfile)
- if not destfile:
- self.bld.fatal('invalid destfile %r for %r'%(destfile,self))
- tsk.set_outputs(destfile)
- tsk.basedir=basedir
- jaropts.append('-C')
- jaropts.append(basedir.bldpath())
- jaropts.append('.')
- tsk.env['JAROPTS']=jaropts
- tsk.env['JARCREATE']=jarcreate
- if getattr(self,'javac_task',None):
- tsk.set_run_after(self.javac_task)
- at feature('jar')
- at after_method('jar_files')
-def use_jar_files(self):
- lst=[]
- self.uselib=self.to_list(getattr(self,'uselib',[]))
- names=self.to_list(getattr(self,'use',[]))
- get=self.bld.get_tgen_by_name
- for x in names:
- try:
- y=get(x)
- except Exception:
- self.uselib.append(x)
- else:
- y.post()
- self.jar_task.run_after.update(y.tasks)
-class jar_create(Task.Task):
- color='GREEN'
- run_str='${JAR} ${JARCREATE} ${TGT} ${JAROPTS}'
- def runnable_status(self):
- for t in self.run_after:
- if not t.hasrun:
- return Task.ASK_LATER
- if not self.inputs:
- global JAR_RE
- try:
- self.inputs=[x for x in self.basedir.ant_glob(JAR_RE,remove=False)if id(x)!=id(self.outputs[0])]
- except Exception:
- raise Errors.WafError('Could not find the basedir %r for %r'%(self.basedir,self))
- return super(jar_create,self).runnable_status()
-class javac(Task.Task):
- color='BLUE'
- nocache=True
- vars=['CLASSPATH','JAVACFLAGS','JAVAC','OUTDIR']
- def runnable_status(self):
- for t in self.run_after:
- if not t.hasrun:
- return Task.ASK_LATER
- if not self.inputs:
- global SOURCE_RE
- self.inputs=[]
- for x in self.srcdir:
- self.inputs.extend(x.ant_glob(SOURCE_RE,remove=False))
- return super(javac,self).runnable_status()
- def run(self):
- env=self.env
- gen=self.generator
- bld=gen.bld
- wd=bld.bldnode.abspath()
- def to_list(xx):
- if isinstance(xx,str):return[xx]
- return xx
- cmd=[]
- cmd.extend(to_list(env['JAVAC']))
- cmd.extend(['-classpath'])
- cmd.extend(to_list(env['CLASSPATH']))
- cmd.extend(['-d'])
- cmd.extend(to_list(env['OUTDIR']))
- cmd.extend(to_list(env['JAVACFLAGS']))
- files=[a.path_from(bld.bldnode)for a in self.inputs]
- tmp=None
- try:
- if len(str(files))+len(str(cmd))>8192:
- (fd,tmp)=tempfile.mkstemp(dir=bld.bldnode.abspath())
- try:
- os.write(fd,'\n'.join(files))
- finally:
- if tmp:
- os.close(fd)
- if Logs.verbose:
- Logs.debug('runner: %r'%(cmd+files))
- cmd.append('@'+tmp)
- else:
- cmd+=files
- ret=self.exec_command(cmd,cwd=wd,env=env.env or None)
- finally:
- if tmp:
- os.unlink(tmp)
- return ret
- def post_run(self):
- for n in self.generator.outdir.ant_glob('**/*.class'):
- n.sig=Utils.h_file(n.abspath())
- self.generator.bld.task_sigs[self.uid()]=self.cache_sig
- at feature('javadoc')
- at after_method('process_rule')
-def create_javadoc(self):
- tsk=self.create_task('javadoc')
- tsk.classpath=getattr(self,'classpath',[])
- self.javadoc_package=Utils.to_list(self.javadoc_package)
- if not isinstance(self.javadoc_output,Node.Node):
- self.javadoc_output=self.bld.path.find_or_declare(self.javadoc_output)
-class javadoc(Task.Task):
- color='BLUE'
- def __str__(self):
- return'%s: %s -> %s\n'%(self.__class__.__name__,self.generator.srcdir,self.generator.javadoc_output)
- def run(self):
- env=self.env
- bld=self.generator.bld
- wd=bld.bldnode.abspath()
- srcpath=self.generator.path.abspath()+os.sep+self.generator.srcdir
- srcpath+=os.pathsep
- srcpath+=self.generator.path.get_bld().abspath()+os.sep+self.generator.srcdir
- classpath=env.CLASSPATH
- classpath+=os.pathsep
- classpath+=os.pathsep.join(self.classpath)
- classpath="".join(classpath)
- self.last_cmd=lst=[]
- lst.extend(Utils.to_list(env['JAVADOC']))
- lst.extend(['-d',self.generator.javadoc_output.abspath()])
- lst.extend(['-sourcepath',srcpath])
- lst.extend(['-classpath',classpath])
- lst.extend(['-subpackages'])
- lst.extend(self.generator.javadoc_package)
- lst=[x for x in lst if x]
- self.generator.bld.cmd_and_log(lst,cwd=wd,env=env.env or None,quiet=0)
- def post_run(self):
- nodes=self.generator.javadoc_output.ant_glob('**')
- for x in nodes:
- x.sig=Utils.h_file(x.abspath())
- self.generator.bld.task_sigs[self.uid()]=self.cache_sig
-def configure(self):
- java_path=self.environ['PATH'].split(os.pathsep)
- v=self.env
- if'JAVA_HOME'in self.environ:
- java_path=[os.path.join(self.environ['JAVA_HOME'],'bin')]+java_path
- self.env['JAVA_HOME']=[self.environ['JAVA_HOME']]
- for x in'javac java jar javadoc'.split():
- self.find_program(x,var=x.upper(),path_list=java_path)
- self.env[x.upper()]=self.cmd_to_list(self.env[x.upper()])
- if'CLASSPATH'in self.environ:
- v['CLASSPATH']=self.environ['CLASSPATH']
- if not v['JAR']:self.fatal('jar is required for making java packages')
- if not v['JAVAC']:self.fatal('javac is required for compiling java classes')
- v['JARCREATE']='cf'
- v['JAVACFLAGS']=[]
- at conf
-def check_java_class(self,classname,with_classpath=None):
- javatestdir='.waf-javatest'
- classpath=javatestdir
- if self.env['CLASSPATH']:
- classpath+=os.pathsep+self.env['CLASSPATH']
- if isinstance(with_classpath,str):
- classpath+=os.pathsep+with_classpath
- shutil.rmtree(javatestdir,True)
- os.mkdir(javatestdir)
- java_file=open(os.path.join(javatestdir,'Test.java'),'w')
- java_file.write(class_check_source)
- java_file.close()
- self.exec_command(self.env['JAVAC']+[os.path.join(javatestdir,'Test.java')],shell=False)
- cmd=self.env['JAVA']+['-cp',classpath,'Test',classname]
- self.to_log("%s\n"%str(cmd))
- found=self.exec_command(cmd,shell=False)
- self.msg('Checking for java class %s'%classname,not found)
- shutil.rmtree(javatestdir,True)
- return found
- at conf
-def check_jni_headers(conf):
- if not conf.env.CC_NAME and not conf.env.CXX_NAME:
- conf.fatal('load a compiler first (gcc, g++, ..)')
- if not conf.env.JAVA_HOME:
- conf.fatal('set JAVA_HOME in the system environment')
- javaHome=conf.env['JAVA_HOME'][0]
- dir=conf.root.find_dir(conf.env.JAVA_HOME[0]+'/include')
- if dir is None:
- dir=conf.root.find_dir(conf.env.JAVA_HOME[0]+'/../Headers')
- if dir is None:
- conf.fatal('JAVA_HOME does not seem to be set properly')
- f=dir.ant_glob('**/(jni|jni_md).h')
- incDirs=[x.parent.abspath()for x in f]
- dir=conf.root.find_dir(conf.env.JAVA_HOME[0])
- f=dir.ant_glob('**/*jvm.(so|dll|dylib)')
- libDirs=[x.parent.abspath()for x in f]or[javaHome]
- f=dir.ant_glob('**/*jvm.(lib)')
- if f:
- libDirs=[[x,y.parent.abspath()]for x in libDirs for y in f]
- for d in libDirs:
- try:
- conf.check(header_name='jni.h',define_name='HAVE_JNI_H',lib='jvm',libpath=d,includes=incDirs,uselib_store='JAVA',uselib='JAVA')
- except Exception:
- pass
- else:
- break
- else:
- conf.fatal('could not find lib jvm in %r (see config.log)'%libDirs)
diff --git a/waflib/Tools/kde4.py b/waflib/Tools/kde4.py
deleted file mode 100644
index cd51f5f..0000000
--- a/waflib/Tools/kde4.py
+++ /dev/null
@@ -1,48 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import os,sys,re
-from waflib import Options,TaskGen,Task,Utils
-from waflib.TaskGen import feature,after_method
- at feature('msgfmt')
-def apply_msgfmt(self):
- for lang in self.to_list(self.langs):
- node=self.path.find_resource(lang+'.po')
- task=self.create_task('msgfmt',node,node.change_ext('.mo'))
- langname=lang.split('/')
- langname=langname[-1]
- inst=getattr(self,'install_path','${KDE4_LOCALE_INSTALL_DIR}')
- self.bld.install_as(inst+os.sep+langname+os.sep+'LC_MESSAGES'+os.sep+getattr(self,'appname','set_your_appname')+'.mo',task.outputs[0],chmod=getattr(self,'chmod',Utils.O644))
-class msgfmt(Task.Task):
- color='BLUE'
- run_str='${MSGFMT} ${SRC} -o ${TGT}'
-def configure(self):
- kdeconfig=self.find_program('kde4-config')
- prefix=self.cmd_and_log('%s --prefix'%kdeconfig).strip()
- fname='%s/share/apps/cmake/modules/KDELibsDependencies.cmake'%prefix
- try:os.stat(fname)
- except OSError:
- fname='%s/share/kde4/apps/cmake/modules/KDELibsDependencies.cmake'%prefix
- try:os.stat(fname)
- except OSError:self.fatal('could not open %s'%fname)
- try:
- txt=Utils.readf(fname)
- except(OSError,IOError):
- self.fatal('could not read %s'%fname)
- txt=txt.replace('\\\n','\n')
- fu=re.compile('#(.*)\n')
- txt=fu.sub('',txt)
- setregexp=re.compile('([sS][eE][tT]\s*\()\s*([^\s]+)\s+\"([^"]+)\"\)')
- found=setregexp.findall(txt)
- for(_,key,val)in found:
- self.env[key]=val
- self.env['LIB_KDECORE']=['kdecore']
- self.env['LIB_KDEUI']=['kdeui']
- self.env['LIB_KIO']=['kio']
- self.env['LIB_KHTML']=['khtml']
- self.env['LIB_KPARTS']=['kparts']
- self.env['LIBPATH_KDECORE']=[os.path.join(self.env.KDE4_LIB_INSTALL_DIR,'kde4','devel'),self.env.KDE4_LIB_INSTALL_DIR]
- self.env['INCLUDES_KDECORE']=[self.env['KDE4_INCLUDE_INSTALL_DIR']]
- self.env.append_value('INCLUDES_KDECORE',[self.env['KDE4_INCLUDE_INSTALL_DIR']+os.sep+'KDE'])
- self.find_program('msgfmt',var='MSGFMT')
diff --git a/waflib/Tools/ldc2.py b/waflib/Tools/ldc2.py
deleted file mode 100644
index 25b99e5..0000000
--- a/waflib/Tools/ldc2.py
+++ /dev/null
@@ -1,37 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import sys
-from waflib.Tools import ar,d
-from waflib.Configure import conf
- at conf
-def find_ldc2(conf):
- conf.find_program(['ldc2'],var='D')
- out=conf.cmd_and_log([conf.env.D,'-version'])
- if out.find("based on DMD v2.")==-1:
- conf.fatal("detected compiler is not ldc2")
- at conf
-def common_flags_ldc2(conf):
- v=conf.env
- v['D_SRC_F']=['-c']
- v['D_TGT_F']='-of%s'
- v['D_LINKER']=v['D']
- v['DLNK_SRC_F']=''
- v['DLNK_TGT_F']='-of%s'
- v['DINC_ST']='-I%s'
- v['DSHLIB_MARKER']=v['DSTLIB_MARKER']=''
- v['DSTLIB_ST']=v['DSHLIB_ST']='-L-l%s'
- v['DSTLIBPATH_ST']=v['DLIBPATH_ST']='-L-L%s'
- v['LINKFLAGS_dshlib']=['-L-shared']
- v['DHEADER_ext']='.di'
- v['DFLAGS_d_with_header']=['-H','-Hf']
- v['D_HDR_F']='%s'
- v['LINKFLAGS']=[]
- v['DFLAGS_dshlib']=['-relocation-model=pic']
-def configure(conf):
- conf.find_ldc2()
- conf.load('ar')
- conf.load('d')
- conf.common_flags_ldc2()
- conf.d_platform_flags()
diff --git a/waflib/Tools/lua.py b/waflib/Tools/lua.py
deleted file mode 100644
index a0a35fc..0000000
--- a/waflib/Tools/lua.py
+++ /dev/null
@@ -1,18 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-from waflib.TaskGen import extension
-from waflib import Task,Utils
- at extension('.lua')
-def add_lua(self,node):
- tsk=self.create_task('luac',node,node.change_ext('.luac'))
- inst_to=getattr(self,'install_path',self.env.LUADIR and'${LUADIR}'or None)
- if inst_to:
- self.bld.install_files(inst_to,tsk.outputs)
- return tsk
-class luac(Task.Task):
- run_str='${LUAC} -s -o ${TGT} ${SRC}'
- color='PINK'
-def configure(conf):
- conf.find_program('luac',var='LUAC')
diff --git a/waflib/Tools/msvc.py b/waflib/Tools/msvc.py
deleted file mode 100644
index 289cd47..0000000
--- a/waflib/Tools/msvc.py
+++ /dev/null
@@ -1,726 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import os,sys,re,tempfile
-from waflib import Utils,Task,Logs,Options
-from waflib.Logs import debug,warn
-from waflib.TaskGen import after_method,feature
-from waflib.Configure import conf
-from waflib.Tools import ccroot,c,cxx,ar,winres
-g_msvc_systemlibs='''
-aclui activeds ad1 adptif adsiid advapi32 asycfilt authz bhsupp bits bufferoverflowu cabinet
-cap certadm certidl ciuuid clusapi comctl32 comdlg32 comsupp comsuppd comsuppw comsuppwd comsvcs
-credui crypt32 cryptnet cryptui d3d8thk daouuid dbgeng dbghelp dciman32 ddao35 ddao35d
-ddao35u ddao35ud delayimp dhcpcsvc dhcpsapi dlcapi dnsapi dsprop dsuiext dtchelp
-faultrep fcachdll fci fdi framedyd framedyn gdi32 gdiplus glauxglu32 gpedit gpmuuid
-gtrts32w gtrtst32hlink htmlhelp httpapi icm32 icmui imagehlp imm32 iphlpapi iprop
-kernel32 ksguid ksproxy ksuser libcmt libcmtd libcpmt libcpmtd loadperf lz32 mapi
-mapi32 mgmtapi minidump mmc mobsync mpr mprapi mqoa mqrt msacm32 mscms mscoree
-msdasc msimg32 msrating mstask msvcmrt msvcurt msvcurtd mswsock msxml2 mtx mtxdm
-netapi32 nmapinmsupp npptools ntdsapi ntdsbcli ntmsapi ntquery odbc32 odbcbcp
-odbccp32 oldnames ole32 oleacc oleaut32 oledb oledlgolepro32 opends60 opengl32
-osptk parser pdh penter pgobootrun pgort powrprof psapi ptrustm ptrustmd ptrustu
-ptrustud qosname rasapi32 rasdlg rassapi resutils riched20 rpcndr rpcns4 rpcrt4 rtm
-rtutils runtmchk scarddlg scrnsave scrnsavw secur32 sensapi setupapi sfc shell32
-shfolder shlwapi sisbkup snmpapi sporder srclient sti strsafe svcguid tapi32 thunk32
-traffic unicows url urlmon user32 userenv usp10 uuid uxtheme vcomp vcompd vdmdbg
-version vfw32 wbemuuid webpost wiaguid wininet winmm winscard winspool winstrm
-wintrust wldap32 wmiutils wow32 ws2_32 wsnmp32 wsock32 wst wtsapi32 xaswitch xolehlp
-'''.split()
-all_msvc_platforms=[('x64','amd64'),('x86','x86'),('ia64','ia64'),('x86_amd64','amd64'),('x86_ia64','ia64')]
-all_wince_platforms=[('armv4','arm'),('armv4i','arm'),('mipsii','mips'),('mipsii_fp','mips'),('mipsiv','mips'),('mipsiv_fp','mips'),('sh4','sh'),('x86','cex86')]
-all_icl_platforms=[('intel64','amd64'),('em64t','amd64'),('ia32','x86'),('Itanium','ia64')]
-def options(opt):
- opt.add_option('--msvc_version',type='string',help='msvc version, eg: "msvc 10.0,msvc 9.0"',default='')
- opt.add_option('--msvc_targets',type='string',help='msvc targets, eg: "x64,arm"',default='')
-def setup_msvc(conf,versions,arch=False):
- platforms=getattr(Options.options,'msvc_targets','').split(',')
- if platforms==['']:
- platforms=Utils.to_list(conf.env['MSVC_TARGETS'])or[i for i,j in all_msvc_platforms+all_icl_platforms+all_wince_platforms]
- desired_versions=getattr(Options.options,'msvc_version','').split(',')
- if desired_versions==['']:
- desired_versions=conf.env['MSVC_VERSIONS']or[v for v,_ in versions][::-1]
- versiondict=dict(versions)
- for version in desired_versions:
- try:
- targets=dict(versiondict[version])
- for target in platforms:
- try:
- arch,(p1,p2,p3)=targets[target]
- compiler,revision=version.rsplit(' ',1)
- if arch:
- return compiler,revision,p1,p2,p3,arch
- else:
- return compiler,revision,p1,p2,p3
- except KeyError:continue
- except KeyError:continue
- conf.fatal('msvc: Impossible to find a valid architecture for building (in setup_msvc)')
- at conf
-def get_msvc_version(conf,compiler,version,target,vcvars):
- debug('msvc: get_msvc_version: %r %r %r',compiler,version,target)
- batfile=conf.bldnode.make_node('waf-print-msvc.bat')
- batfile.write("""@echo off
-set INCLUDE=
-set LIB=
-call "%s" %s
-echo PATH=%%PATH%%
-echo INCLUDE=%%INCLUDE%%
-echo LIB=%%LIB%%
-"""%(vcvars,target))
- sout=conf.cmd_and_log(['cmd','/E:on','/V:on','/C',batfile.abspath()])
- lines=sout.splitlines()
- if not lines[0]:
- lines.pop(0)
- if version=='11.0':
- if lines[0].startswith('Error'):
- conf.fatal('msvc: Could not find a valid architecture for building (get_msvc_version_1)')
- else:
- for x in('Setting environment','Setting SDK environment','Intel(R) C++ Compiler','Intel Parallel Studio','Intel(R) Parallel Studio','Intel(R) Composer','Intel Corporation. All rights reserved.'):
- if lines[0].find(x)>-1:
- lines.pop(0)
- break
- else:
- debug('msvc: get_msvc_version: %r %r %r -> not found',compiler,version,target)
- conf.fatal('msvc: Could not find a valid architecture for building (get_msvc_version_2)')
- MSVC_PATH=MSVC_INCDIR=MSVC_LIBDIR=None
- for line in lines:
- if line.startswith('PATH='):
- path=line[5:]
- MSVC_PATH=path.split(';')
- elif line.startswith('INCLUDE='):
- MSVC_INCDIR=[i for i in line[8:].split(';')if i]
- elif line.startswith('LIB='):
- MSVC_LIBDIR=[i for i in line[4:].split(';')if i]
- if None in(MSVC_PATH,MSVC_INCDIR,MSVC_LIBDIR):
- conf.fatal('msvc: Could not find a valid architecture for building (get_msvc_version_3)')
- env=dict(os.environ)
- env.update(PATH=path)
- compiler_name,linker_name,lib_name=_get_prog_names(conf,compiler)
- cxx=conf.find_program(compiler_name,path_list=MSVC_PATH)
- cxx=conf.cmd_to_list(cxx)
- if'CL'in env:
- del(env['CL'])
- try:
- try:
- conf.cmd_and_log(cxx+['/help'],env=env)
- except Exception ,e:
- debug('msvc: get_msvc_version: %r %r %r -> failure'%(compiler,version,target))
- debug(str(e))
- conf.fatal('msvc: cannot run the compiler (in get_msvc_version)')
- else:
- debug('msvc: get_msvc_version: %r %r %r -> OK',compiler,version,target)
- finally:
- conf.env[compiler_name]=''
- return(MSVC_PATH,MSVC_INCDIR,MSVC_LIBDIR)
- at conf
-def gather_wsdk_versions(conf,versions):
- version_pattern=re.compile('^v..?.?\...?.?')
- try:
- all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Microsoft\\Microsoft SDKs\\Windows')
- except WindowsError:
- try:
- all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Microsoft\\Microsoft SDKs\\Windows')
- except WindowsError:
- return
- index=0
- while 1:
- try:
- version=Utils.winreg.EnumKey(all_versions,index)
- except WindowsError:
- break
- index=index+1
- if not version_pattern.match(version):
- continue
- try:
- msvc_version=Utils.winreg.OpenKey(all_versions,version)
- path,type=Utils.winreg.QueryValueEx(msvc_version,'InstallationFolder')
- except WindowsError:
- continue
- if os.path.isfile(os.path.join(path,'bin','SetEnv.cmd')):
- targets=[]
- for target,arch in all_msvc_platforms:
- try:
- targets.append((target,(arch,conf.get_msvc_version('wsdk',version,'/'+target,os.path.join(path,'bin','SetEnv.cmd')))))
- except conf.errors.ConfigurationError:
- pass
- versions.append(('wsdk '+version[1:],targets))
-def gather_wince_supported_platforms():
- supported_wince_platforms=[]
- try:
- ce_sdk=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Microsoft\\Windows CE Tools\\SDKs')
- except WindowsError:
- try:
- ce_sdk=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Microsoft\\Windows CE Tools\\SDKs')
- except WindowsError:
- ce_sdk=''
- if not ce_sdk:
- return supported_wince_platforms
- ce_index=0
- while 1:
- try:
- sdk_device=Utils.winreg.EnumKey(ce_sdk,ce_index)
- except WindowsError:
- break
- ce_index=ce_index+1
- sdk=Utils.winreg.OpenKey(ce_sdk,sdk_device)
- try:
- path,type=Utils.winreg.QueryValueEx(sdk,'SDKRootDir')
- except WindowsError:
- try:
- path,type=Utils.winreg.QueryValueEx(sdk,'SDKInformation')
- path,xml=os.path.split(path)
- except WindowsError:
- continue
- path=str(path)
- path,device=os.path.split(path)
- if not device:
- path,device=os.path.split(path)
- for arch,compiler in all_wince_platforms:
- platforms=[]
- if os.path.isdir(os.path.join(path,device,'Lib',arch)):
- platforms.append((arch,compiler,os.path.join(path,device,'Include',arch),os.path.join(path,device,'Lib',arch)))
- if platforms:
- supported_wince_platforms.append((device,platforms))
- return supported_wince_platforms
-def gather_msvc_detected_versions():
- version_pattern=re.compile('^(\d\d?\.\d\d?)(Exp)?$')
- detected_versions=[]
- for vcver,vcvar in[('VCExpress','Exp'),('VisualStudio','')]:
- try:
- prefix='SOFTWARE\\Wow6432node\\Microsoft\\'+vcver
- all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,prefix)
- except WindowsError:
- try:
- prefix='SOFTWARE\\Microsoft\\'+vcver
- all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,prefix)
- except WindowsError:
- continue
- index=0
- while 1:
- try:
- version=Utils.winreg.EnumKey(all_versions,index)
- except WindowsError:
- break
- index=index+1
- match=version_pattern.match(version)
- if not match:
- continue
- else:
- versionnumber=float(match.group(1))
- detected_versions.append((versionnumber,version+vcvar,prefix+"\\"+version))
- def fun(tup):
- return tup[0]
- detected_versions.sort(key=fun)
- return detected_versions
- at conf
-def gather_msvc_targets(conf,versions,version,vc_path):
- targets=[]
- if os.path.isfile(os.path.join(vc_path,'vcvarsall.bat')):
- for target,realtarget in all_msvc_platforms[::-1]:
- try:
- targets.append((target,(realtarget,conf.get_msvc_version('msvc',version,target,os.path.join(vc_path,'vcvarsall.bat')))))
- except conf.errors.ConfigurationError:
- pass
- elif os.path.isfile(os.path.join(vc_path,'Common7','Tools','vsvars32.bat')):
- try:
- targets.append(('x86',('x86',conf.get_msvc_version('msvc',version,'x86',os.path.join(vc_path,'Common7','Tools','vsvars32.bat')))))
- except conf.errors.ConfigurationError:
- pass
- elif os.path.isfile(os.path.join(vc_path,'Bin','vcvars32.bat')):
- try:
- targets.append(('x86',('x86',conf.get_msvc_version('msvc',version,'',os.path.join(vc_path,'Bin','vcvars32.bat')))))
- except conf.errors.ConfigurationError:
- pass
- versions.append(('msvc '+version,targets))
- at conf
-def gather_wince_targets(conf,versions,version,vc_path,vsvars,supported_platforms):
- for device,platforms in supported_platforms:
- cetargets=[]
- for platform,compiler,include,lib in platforms:
- winCEpath=os.path.join(vc_path,'ce')
- if not os.path.isdir(winCEpath):
- continue
- try:
- common_bindirs,_1,_2=conf.get_msvc_version('msvc',version,'x86',vsvars)
- except conf.errors.ConfigurationError:
- continue
- if os.path.isdir(os.path.join(winCEpath,'lib',platform)):
- bindirs=[os.path.join(winCEpath,'bin',compiler),os.path.join(winCEpath,'bin','x86_'+compiler)]+common_bindirs
- incdirs=[os.path.join(winCEpath,'include'),os.path.join(winCEpath,'atlmfc','include'),include]
- libdirs=[os.path.join(winCEpath,'lib',platform),os.path.join(winCEpath,'atlmfc','lib',platform),lib]
- cetargets.append((platform,(platform,(bindirs,incdirs,libdirs))))
- if cetargets:
- versions.append((device+' '+version,cetargets))
- at conf
-def gather_msvc_versions(conf,versions):
- vc_paths=[]
- for(v,version,reg)in gather_msvc_detected_versions():
- try:
- try:
- msvc_version=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,reg+"\\Setup\\VC")
- except WindowsError:
- msvc_version=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,reg+"\\Setup\\Microsoft Visual C++")
- path,type=Utils.winreg.QueryValueEx(msvc_version,'ProductDir')
- vc_paths.append((version,os.path.abspath(str(path))))
- except WindowsError:
- continue
- wince_supported_platforms=gather_wince_supported_platforms()
- for version,vc_path in vc_paths:
- vs_path=os.path.dirname(vc_path)
- vsvars=os.path.join(vs_path,'Common7','Tools','vsvars32.bat')
- if wince_supported_platforms and os.path.isfile(vsvars):
- conf.gather_wince_targets(versions,version,vc_path,vsvars,wince_supported_platforms)
- for version,vc_path in vc_paths:
- vs_path=os.path.dirname(vc_path)
- conf.gather_msvc_targets(versions,version,vc_path)
- at conf
-def gather_icl_versions(conf,versions):
- version_pattern=re.compile('^...?.?\....?.?')
- try:
- all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Intel\\Compilers\\C++')
- except WindowsError:
- try:
- all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Intel\\Compilers\\C++')
- except WindowsError:
- return
- index=0
- while 1:
- try:
- version=Utils.winreg.EnumKey(all_versions,index)
- except WindowsError:
- break
- index=index+1
- if not version_pattern.match(version):
- continue
- targets=[]
- for target,arch in all_icl_platforms:
- try:
- if target=='intel64':targetDir='EM64T_NATIVE'
- else:targetDir=target
- Utils.winreg.OpenKey(all_versions,version+'\\'+targetDir)
- icl_version=Utils.winreg.OpenKey(all_versions,version)
- path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir')
- batch_file=os.path.join(path,'bin','iclvars.bat')
- if os.path.isfile(batch_file):
- try:
- targets.append((target,(arch,conf.get_msvc_version('intel',version,target,batch_file))))
- except conf.errors.ConfigurationError:
- pass
- except WindowsError:
- pass
- for target,arch in all_icl_platforms:
- try:
- icl_version=Utils.winreg.OpenKey(all_versions,version+'\\'+target)
- path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir')
- batch_file=os.path.join(path,'bin','iclvars.bat')
- if os.path.isfile(batch_file):
- try:
- targets.append((target,(arch,conf.get_msvc_version('intel',version,target,batch_file))))
- except conf.errors.ConfigurationError:
- pass
- except WindowsError:
- continue
- major=version[0:2]
- versions.append(('intel '+major,targets))
- at conf
-def gather_intel_composer_versions(conf,versions):
- version_pattern=re.compile('^...?.?\...?.?.?')
- try:
- all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Intel\\Suites')
- except WindowsError:
- try:
- all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Intel\\Suites')
- except WindowsError:
- return
- index=0
- while 1:
- try:
- version=Utils.winreg.EnumKey(all_versions,index)
- except WindowsError:
- break
- index=index+1
- if not version_pattern.match(version):
- continue
- targets=[]
- for target,arch in all_icl_platforms:
- try:
- if target=='intel64':targetDir='EM64T_NATIVE'
- else:targetDir=target
- try:
- defaults=Utils.winreg.OpenKey(all_versions,version+'\\Defaults\\C++\\'+targetDir)
- except WindowsError:
- if targetDir=='EM64T_NATIVE':
- defaults=Utils.winreg.OpenKey(all_versions,version+'\\Defaults\\C++\\EM64T')
- else:
- raise WindowsError
- uid,type=Utils.winreg.QueryValueEx(defaults,'SubKey')
- Utils.winreg.OpenKey(all_versions,version+'\\'+uid+'\\C++\\'+targetDir)
- icl_version=Utils.winreg.OpenKey(all_versions,version+'\\'+uid+'\\C++')
- path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir')
- batch_file=os.path.join(path,'bin','iclvars.bat')
- if os.path.isfile(batch_file):
- try:
- targets.append((target,(arch,conf.get_msvc_version('intel',version,target,batch_file))))
- except conf.errors.ConfigurationError ,e:
- pass
- compilervars_warning_attr='_compilervars_warning_key'
- if version[0:2]=='13'and getattr(conf,compilervars_warning_attr,True):
- setattr(conf,compilervars_warning_attr,False)
- patch_url='http://software.intel.com/en-us/forums/topic/328487'
- compilervars_arch=os.path.join(path,'bin','compilervars_arch.bat')
- vs_express_path=os.environ['VS110COMNTOOLS']+r'..\IDE\VSWinExpress.exe'
- dev_env_path=os.environ['VS110COMNTOOLS']+r'..\IDE\devenv.exe'
- if(r'if exist "%VS110COMNTOOLS%..\IDE\VSWinExpress.exe"'in Utils.readf(compilervars_arch)and not os.path.exists(vs_express_path)and not os.path.exists(dev_env_path)):
- Logs.warn(('The Intel compilervar_arch.bat only checks for one Visual Studio SKU ''(VSWinExpress.exe) but it does not seem to be installed at %r. ''The intel command line set up will fail to configure unless the file %r''is patched. See: %s')%(vs_express_path,compilervars_arch,patch_url))
- except WindowsError:
- pass
- major=version[0:2]
- versions.append(('intel '+major,targets))
- at conf
-def get_msvc_versions(conf):
- if not conf.env['MSVC_INSTALLED_VERSIONS']:
- lst=[]
- conf.gather_icl_versions(lst)
- conf.gather_intel_composer_versions(lst)
- conf.gather_wsdk_versions(lst)
- conf.gather_msvc_versions(lst)
- conf.env['MSVC_INSTALLED_VERSIONS']=lst
- return conf.env['MSVC_INSTALLED_VERSIONS']
- at conf
-def print_all_msvc_detected(conf):
- for version,targets in conf.env['MSVC_INSTALLED_VERSIONS']:
- Logs.info(version)
- for target,l in targets:
- Logs.info("\t"+target)
- at conf
-def detect_msvc(conf,arch=False):
- versions=get_msvc_versions(conf)
- return setup_msvc(conf,versions,arch)
- at conf
-def find_lt_names_msvc(self,libname,is_static=False):
- lt_names=['lib%s.la'%libname,'%s.la'%libname,]
- for path in self.env['LIBPATH']:
- for la in lt_names:
- laf=os.path.join(path,la)
- dll=None
- if os.path.exists(laf):
- ltdict=Utils.read_la_file(laf)
- lt_libdir=None
- if ltdict.get('libdir',''):
- lt_libdir=ltdict['libdir']
- if not is_static and ltdict.get('library_names',''):
- dllnames=ltdict['library_names'].split()
- dll=dllnames[0].lower()
- dll=re.sub('\.dll$','',dll)
- return(lt_libdir,dll,False)
- elif ltdict.get('old_library',''):
- olib=ltdict['old_library']
- if os.path.exists(os.path.join(path,olib)):
- return(path,olib,True)
- elif lt_libdir!=''and os.path.exists(os.path.join(lt_libdir,olib)):
- return(lt_libdir,olib,True)
- else:
- return(None,olib,True)
- else:
- raise self.errors.WafError('invalid libtool object file: %s'%laf)
- return(None,None,None)
- at conf
-def libname_msvc(self,libname,is_static=False):
- lib=libname.lower()
- lib=re.sub('\.lib$','',lib)
- if lib in g_msvc_systemlibs:
- return lib
- lib=re.sub('^lib','',lib)
- if lib=='m':
- return None
- (lt_path,lt_libname,lt_static)=self.find_lt_names_msvc(lib,is_static)
- if lt_path!=None and lt_libname!=None:
- if lt_static==True:
- return os.path.join(lt_path,lt_libname)
- if lt_path!=None:
- _libpaths=[lt_path]+self.env['LIBPATH']
- else:
- _libpaths=self.env['LIBPATH']
- static_libs=['lib%ss.lib'%lib,'lib%s.lib'%lib,'%ss.lib'%lib,'%s.lib'%lib,]
- dynamic_libs=['lib%s.dll.lib'%lib,'lib%s.dll.a'%lib,'%s.dll.lib'%lib,'%s.dll.a'%lib,'lib%s_d.lib'%lib,'%s_d.lib'%lib,'%s.lib'%lib,]
- libnames=static_libs
- if not is_static:
- libnames=dynamic_libs+static_libs
- for path in _libpaths:
- for libn in libnames:
- if os.path.exists(os.path.join(path,libn)):
- debug('msvc: lib found: %s'%os.path.join(path,libn))
- return re.sub('\.lib$','',libn)
- self.fatal("The library %r could not be found"%libname)
- return re.sub('\.lib$','',libname)
- at conf
-def check_lib_msvc(self,libname,is_static=False,uselib_store=None):
- libn=self.libname_msvc(libname,is_static)
- if not uselib_store:
- uselib_store=libname.upper()
- if False and is_static:
- self.env['STLIB_'+uselib_store]=[libn]
- else:
- self.env['LIB_'+uselib_store]=[libn]
- at conf
-def check_libs_msvc(self,libnames,is_static=False):
- for libname in Utils.to_list(libnames):
- self.check_lib_msvc(libname,is_static)
-def configure(conf):
- conf.autodetect(True)
- conf.find_msvc()
- conf.msvc_common_flags()
- conf.cc_load_tools()
- conf.cxx_load_tools()
- conf.cc_add_flags()
- conf.cxx_add_flags()
- conf.link_add_flags()
- conf.visual_studio_add_flags()
- at conf
-def no_autodetect(conf):
- conf.env.NO_MSVC_DETECT=1
- configure(conf)
- at conf
-def autodetect(conf,arch=False):
- v=conf.env
- if v.NO_MSVC_DETECT:
- return
- if arch:
- compiler,version,path,includes,libdirs,arch=conf.detect_msvc(True)
- v['DEST_CPU']=arch
- else:
- compiler,version,path,includes,libdirs=conf.detect_msvc()
- v['PATH']=path
- v['INCLUDES']=includes
- v['LIBPATH']=libdirs
- v['MSVC_COMPILER']=compiler
- try:
- v['MSVC_VERSION']=float(version)
- except Exception:
- v['MSVC_VERSION']=float(version[:-3])
-def _get_prog_names(conf,compiler):
- if compiler=='intel':
- compiler_name='ICL'
- linker_name='XILINK'
- lib_name='XILIB'
- else:
- compiler_name='CL'
- linker_name='LINK'
- lib_name='LIB'
- return compiler_name,linker_name,lib_name
- at conf
-def find_msvc(conf):
- if sys.platform=='cygwin':
- conf.fatal('MSVC module does not work under cygwin Python!')
- v=conf.env
- path=v['PATH']
- compiler=v['MSVC_COMPILER']
- version=v['MSVC_VERSION']
- compiler_name,linker_name,lib_name=_get_prog_names(conf,compiler)
- v.MSVC_MANIFEST=(compiler=='msvc'and version>=8)or(compiler=='wsdk'and version>=6)or(compiler=='intel'and version>=11)
- cxx=None
- if v['CXX']:cxx=v['CXX']
- elif'CXX'in conf.environ:cxx=conf.environ['CXX']
- cxx=conf.find_program(compiler_name,var='CXX',path_list=path)
- cxx=conf.cmd_to_list(cxx)
- env=dict(conf.environ)
- if path:env.update(PATH=';'.join(path))
- if not conf.cmd_and_log(cxx+['/nologo','/help'],env=env):
- conf.fatal('the msvc compiler could not be identified')
- v['CC']=v['CXX']=cxx
- v['CC_NAME']=v['CXX_NAME']='msvc'
- if not v['LINK_CXX']:
- link=conf.find_program(linker_name,path_list=path)
- if link:v['LINK_CXX']=link
- else:conf.fatal('%s was not found (linker)'%linker_name)
- v['LINK']=link
- if not v['LINK_CC']:
- v['LINK_CC']=v['LINK_CXX']
- if not v['AR']:
- stliblink=conf.find_program(lib_name,path_list=path,var='AR')
- if not stliblink:return
- v['ARFLAGS']=['/NOLOGO']
- if v.MSVC_MANIFEST:
- conf.find_program('MT',path_list=path,var='MT')
- v['MTFLAGS']=['/NOLOGO']
- conf.load('winres')
- if not conf.env['WINRC']:
- warn('Resource compiler not found. Compiling resource file is disabled')
- at conf
-def visual_studio_add_flags(self):
- v=self.env
- try:v.prepend_value('INCLUDES',[x for x in self.environ['INCLUDE'].split(';')if x])
- except Exception:pass
- try:v.prepend_value('LIBPATH',[x for x in self.environ['LIB'].split(';')if x])
- except Exception:pass
- at conf
-def msvc_common_flags(conf):
- v=conf.env
- v['DEST_BINFMT']='pe'
- v.append_value('CFLAGS',['/nologo'])
- v.append_value('CXXFLAGS',['/nologo'])
- v['DEFINES_ST']='/D%s'
- v['CC_SRC_F']=''
- v['CC_TGT_F']=['/c','/Fo']
- if v['MSVC_VERSION']>=8:
- v['CC_TGT_F']=['/FC']+v['CC_TGT_F']
- v['CXX_SRC_F']=''
- v['CXX_TGT_F']=['/c','/Fo']
- if v['MSVC_VERSION']>=8:
- v['CXX_TGT_F']=['/FC']+v['CXX_TGT_F']
- v['CPPPATH_ST']='/I%s'
- v['AR_TGT_F']=v['CCLNK_TGT_F']=v['CXXLNK_TGT_F']='/OUT:'
- v['CFLAGS_CONSOLE']=v['CXXFLAGS_CONSOLE']=['/SUBSYSTEM:CONSOLE']
- v['CFLAGS_NATIVE']=v['CXXFLAGS_NATIVE']=['/SUBSYSTEM:NATIVE']
- v['CFLAGS_POSIX']=v['CXXFLAGS_POSIX']=['/SUBSYSTEM:POSIX']
- v['CFLAGS_WINDOWS']=v['CXXFLAGS_WINDOWS']=['/SUBSYSTEM:WINDOWS']
- v['CFLAGS_WINDOWSCE']=v['CXXFLAGS_WINDOWSCE']=['/SUBSYSTEM:WINDOWSCE']
- v['CFLAGS_CRT_MULTITHREADED']=v['CXXFLAGS_CRT_MULTITHREADED']=['/MT']
- v['CFLAGS_CRT_MULTITHREADED_DLL']=v['CXXFLAGS_CRT_MULTITHREADED_DLL']=['/MD']
- v['CFLAGS_CRT_MULTITHREADED_DBG']=v['CXXFLAGS_CRT_MULTITHREADED_DBG']=['/MTd']
- v['CFLAGS_CRT_MULTITHREADED_DLL_DBG']=v['CXXFLAGS_CRT_MULTITHREADED_DLL_DBG']=['/MDd']
- v['LIB_ST']='%s.lib'
- v['LIBPATH_ST']='/LIBPATH:%s'
- v['STLIB_ST']='%s.lib'
- v['STLIBPATH_ST']='/LIBPATH:%s'
- v.append_value('LINKFLAGS',['/NOLOGO'])
- if v['MSVC_MANIFEST']:
- v.append_value('LINKFLAGS',['/MANIFEST'])
- v['CFLAGS_cshlib']=[]
- v['CXXFLAGS_cxxshlib']=[]
- v['LINKFLAGS_cshlib']=v['LINKFLAGS_cxxshlib']=['/DLL']
- v['cshlib_PATTERN']=v['cxxshlib_PATTERN']='%s.dll'
- v['implib_PATTERN']='%s.lib'
- v['IMPLIB_ST']='/IMPLIB:%s'
- v['LINKFLAGS_cstlib']=[]
- v['cstlib_PATTERN']=v['cxxstlib_PATTERN']='%s.lib'
- v['cprogram_PATTERN']=v['cxxprogram_PATTERN']='%s.exe'
- at after_method('apply_link')
- at feature('c','cxx')
-def apply_flags_msvc(self):
- if self.env.CC_NAME!='msvc'or not getattr(self,'link_task',None):
- return
- is_static=isinstance(self.link_task,ccroot.stlink_task)
- subsystem=getattr(self,'subsystem','')
- if subsystem:
- subsystem='/subsystem:%s'%subsystem
- flags=is_static and'ARFLAGS'or'LINKFLAGS'
- self.env.append_value(flags,subsystem)
- if not is_static:
- for f in self.env.LINKFLAGS:
- d=f.lower()
- if d[1:]=='debug':
- pdbnode=self.link_task.outputs[0].change_ext('.pdb')
- self.link_task.outputs.append(pdbnode)
- try:
- self.install_task.source.append(pdbnode)
- except AttributeError:
- pass
- break
- at feature('cprogram','cshlib','cxxprogram','cxxshlib')
- at after_method('apply_link')
-def apply_manifest(self):
- if self.env.CC_NAME=='msvc'and self.env.MSVC_MANIFEST and getattr(self,'link_task',None):
- out_node=self.link_task.outputs[0]
- man_node=out_node.parent.find_or_declare(out_node.name+'.manifest')
- self.link_task.outputs.append(man_node)
- self.link_task.do_manifest=True
-def exec_mf(self):
- env=self.env
- mtool=env['MT']
- if not mtool:
- return 0
- self.do_manifest=False
- outfile=self.outputs[0].abspath()
- manifest=None
- for out_node in self.outputs:
- if out_node.name.endswith('.manifest'):
- manifest=out_node.abspath()
- break
- if manifest is None:
- return 0
- mode=''
- if'cprogram'in self.generator.features or'cxxprogram'in self.generator.features:
- mode='1'
- elif'cshlib'in self.generator.features or'cxxshlib'in self.generator.features:
- mode='2'
- debug('msvc: embedding manifest in mode %r'%mode)
- lst=[]
- lst.append(env['MT'])
- lst.extend(Utils.to_list(env['MTFLAGS']))
- lst.extend(['-manifest',manifest])
- lst.append('-outputresource:%s;%s'%(outfile,mode))
- lst=[lst]
- return self.exec_command(*lst)
-def quote_response_command(self,flag):
- if flag.find(' ')>-1:
- for x in('/LIBPATH:','/IMPLIB:','/OUT:','/I'):
- if flag.startswith(x):
- flag='%s"%s"'%(x,flag[len(x):])
- break
- else:
- flag='"%s"'%flag
- return flag
-def exec_response_command(self,cmd,**kw):
- try:
- tmp=None
- if sys.platform.startswith('win')and isinstance(cmd,list)and len(' '.join(cmd))>=8192:
- program=cmd[0]
- cmd=[self.quote_response_command(x)for x in cmd]
- (fd,tmp)=tempfile.mkstemp()
- os.write(fd,'\r\n'.join(i.replace('\\','\\\\')for i in cmd[1:]))
- os.close(fd)
- cmd=[program,'@'+tmp]
- ret=self.generator.bld.exec_command(cmd,**kw)
- finally:
- if tmp:
- try:
- os.remove(tmp)
- except OSError:
- pass
- return ret
-def exec_command_msvc(self,*k,**kw):
- assert self.env['CC_NAME']=='msvc'
- if isinstance(k[0],list):
- lst=[]
- carry=''
- for a in k[0]:
- if a=='/Fo'or a=='/doc'or a[-1]==':':
- carry=a
- else:
- lst.append(carry+a)
- carry=''
- k=[lst]
- if self.env['PATH']:
- env=dict(self.env.env or os.environ)
- env.update(PATH=';'.join(self.env['PATH']))
- kw['env']=env
- bld=self.generator.bld
- try:
- if not kw.get('cwd',None):
- kw['cwd']=bld.cwd
- except AttributeError:
- bld.cwd=kw['cwd']=bld.variant_dir
- ret=self.exec_response_command(k[0],**kw)
- if not ret and getattr(self,'do_manifest',None):
- ret=self.exec_mf()
- return ret
-def wrap_class(class_name):
- cls=Task.classes.get(class_name,None)
- if not cls:
- return None
- derived_class=type(class_name,(cls,),{})
- def exec_command(self,*k,**kw):
- if self.env['CC_NAME']=='msvc':
- return self.exec_command_msvc(*k,**kw)
- else:
- return super(derived_class,self).exec_command(*k,**kw)
- derived_class.exec_command=exec_command
- derived_class.exec_response_command=exec_response_command
- derived_class.quote_response_command=quote_response_command
- derived_class.exec_command_msvc=exec_command_msvc
- derived_class.exec_mf=exec_mf
- return derived_class
-for k in'c cxx cprogram cxxprogram cshlib cxxshlib cstlib cxxstlib'.split():
- wrap_class(k)
diff --git a/waflib/Tools/nasm.py b/waflib/Tools/nasm.py
deleted file mode 100644
index e3126dd..0000000
--- a/waflib/Tools/nasm.py
+++ /dev/null
@@ -1,14 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import waflib.Tools.asm
-from waflib.TaskGen import feature
- at feature('asm')
-def apply_nasm_vars(self):
- self.env.append_value('ASFLAGS',self.to_list(getattr(self,'nasm_flags',[])))
-def configure(conf):
- nasm=conf.find_program(['nasm','yasm'],var='AS')
- conf.env.AS_TGT_F=['-o']
- conf.env.ASLNK_TGT_F=['-o']
- conf.load('asm')
diff --git a/waflib/Tools/perl.py b/waflib/Tools/perl.py
deleted file mode 100644
index 8b6c2f8..0000000
--- a/waflib/Tools/perl.py
+++ /dev/null
@@ -1,80 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import os
-from waflib import Task,Options,Utils
-from waflib.Configure import conf
-from waflib.TaskGen import extension,feature,before_method
- at before_method('apply_incpaths','apply_link','propagate_uselib_vars')
- at feature('perlext')
-def init_perlext(self):
- self.uselib=self.to_list(getattr(self,'uselib',[]))
- if not'PERLEXT'in self.uselib:self.uselib.append('PERLEXT')
- self.env['cshlib_PATTERN']=self.env['cxxshlib_PATTERN']=self.env['perlext_PATTERN']
- at extension('.xs')
-def xsubpp_file(self,node):
- outnode=node.change_ext('.c')
- self.create_task('xsubpp',node,outnode)
- self.source.append(outnode)
-class xsubpp(Task.Task):
- run_str='${PERL} ${XSUBPP} -noprototypes -typemap ${EXTUTILS_TYPEMAP} ${SRC} > ${TGT}'
- color='BLUE'
- ext_out=['.h']
- at conf
-def check_perl_version(self,minver=None):
- res=True
- if minver:
- cver='.'.join(map(str,minver))
- else:
- cver=''
- self.start_msg('Checking for minimum perl version %s'%cver)
- perl=getattr(Options.options,'perlbinary',None)
- if not perl:
- perl=self.find_program('perl',var='PERL')
- if not perl:
- self.end_msg("Perl not found",color="YELLOW")
- return False
- self.env['PERL']=perl
- version=self.cmd_and_log([perl,"-e",'printf \"%vd\", $^V'])
- if not version:
- res=False
- version="Unknown"
- elif not minver is None:
- ver=tuple(map(int,version.split(".")))
- if ver<minver:
- res=False
- self.end_msg(version,color=res and"GREEN"or"YELLOW")
- return res
- at conf
-def check_perl_module(self,module):
- cmd=[self.env['PERL'],'-e','use %s'%module]
- self.start_msg('perl module %s'%module)
- try:
- r=self.cmd_and_log(cmd)
- except Exception:
- self.end_msg(False)
- return None
- self.end_msg(r or True)
- return r
- at conf
-def check_perl_ext_devel(self):
- env=self.env
- perl=env.PERL
- if not perl:
- self.fatal('find perl first')
- def read_out(cmd):
- return Utils.to_list(self.cmd_and_log(perl+cmd))
- env['LINKFLAGS_PERLEXT']=read_out(" -MConfig -e'print $Config{lddlflags}'")
- env['INCLUDES_PERLEXT']=read_out(" -MConfig -e'print \"$Config{archlib}/CORE\"'")
- env['CFLAGS_PERLEXT']=read_out(" -MConfig -e'print \"$Config{ccflags} $Config{cccdlflags}\"'")
- env['XSUBPP']=read_out(" -MConfig -e'print \"$Config{privlib}/ExtUtils/xsubpp$Config{exe_ext}\"'")
- env['EXTUTILS_TYPEMAP']=read_out(" -MConfig -e'print \"$Config{privlib}/ExtUtils/typemap\"'")
- if not getattr(Options.options,'perlarchdir',None):
- env['ARCHDIR_PERL']=self.cmd_and_log(perl+" -MConfig -e'print $Config{sitearch}'")
- else:
- env['ARCHDIR_PERL']=getattr(Options.options,'perlarchdir')
- env['perlext_PATTERN']='%s.'+self.cmd_and_log(perl+" -MConfig -e'print $Config{dlext}'")
-def options(opt):
- opt.add_option('--with-perl-binary',type='string',dest='perlbinary',help='Specify alternate perl binary',default=None)
- opt.add_option('--with-perl-archdir',type='string',dest='perlarchdir',help='Specify directory where to install arch specific files',default=None)
diff --git a/waflib/Tools/python.py b/waflib/Tools/python.py
deleted file mode 100644
index 4bb911e..0000000
--- a/waflib/Tools/python.py
+++ /dev/null
@@ -1,340 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import os,sys
-from waflib import Utils,Options,Errors,Logs
-from waflib.TaskGen import extension,before_method,after_method,feature
-from waflib.Configure import conf
-FRAG='''
-#include <Python.h>
-#ifdef __cplusplus
-extern "C" {
-#endif
- void Py_Initialize(void);
- void Py_Finalize(void);
-#ifdef __cplusplus
-}
-#endif
-int main(int argc, char **argv)
-{
- (void)argc; (void)argv;
- Py_Initialize();
- Py_Finalize();
- return 0;
-}
-'''
-INST='''
-import sys, py_compile
-py_compile.compile(sys.argv[1], sys.argv[2], sys.argv[3])
-'''
-DISTUTILS_IMP=['from distutils.sysconfig import get_config_var, get_python_lib']
- at extension('.py')
-def process_py(self,node):
- try:
- if not self.bld.is_install:
- return
- except AttributeError:
- return
- try:
- if not self.install_path:
- return
- except AttributeError:
- self.install_path='${PYTHONDIR}'
- def inst_py(ctx):
- install_from=getattr(self,'install_from',None)
- if install_from:
- install_from=self.path.find_dir(install_from)
- install_pyfile(self,node,install_from)
- self.bld.add_post_fun(inst_py)
-def install_pyfile(self,node,install_from=None):
- from_node=install_from or node.parent
- tsk=self.bld.install_as(self.install_path+'/'+node.path_from(from_node),node,postpone=False)
- path=tsk.get_install_path()
- if self.bld.is_install<0:
- Logs.info("+ removing byte compiled python files")
- for x in'co':
- try:
- os.remove(path+x)
- except OSError:
- pass
- if self.bld.is_install>0:
- try:
- st1=os.stat(path)
- except OSError:
- Logs.error('The python file is missing, this should not happen')
- for x in['c','o']:
- do_inst=self.env['PY'+x.upper()]
- try:
- st2=os.stat(path+x)
- except OSError:
- pass
- else:
- if st1.st_mtime<=st2.st_mtime:
- do_inst=False
- if do_inst:
- lst=(x=='o')and[self.env['PYFLAGS_OPT']]or[]
- (a,b,c)=(path,path+x,tsk.get_install_path(destdir=False)+x)
- argv=self.env['PYTHON']+lst+['-c',INST,a,b,c]
- Logs.info('+ byte compiling %r'%(path+x))
- env=self.env.env or None
- ret=Utils.subprocess.Popen(argv,env=env).wait()
- if ret:
- raise Errors.WafError('py%s compilation failed %r'%(x,path))
- at feature('py')
-def feature_py(self):
- pass
- at feature('pyext')
- at before_method('propagate_uselib_vars','apply_link')
- at after_method('apply_bundle')
-def init_pyext(self):
- self.uselib=self.to_list(getattr(self,'uselib',[]))
- if not'PYEXT'in self.uselib:
- self.uselib.append('PYEXT')
- self.env.cshlib_PATTERN=self.env.cxxshlib_PATTERN=self.env.macbundle_PATTERN=self.env.pyext_PATTERN
- self.env.fcshlib_PATTERN=self.env.dshlib_PATTERN=self.env.pyext_PATTERN
- try:
- if not self.install_path:
- return
- except AttributeError:
- self.install_path='${PYTHONARCHDIR}'
- at feature('pyext')
- at before_method('apply_link','apply_bundle')
-def set_bundle(self):
- if Utils.unversioned_sys_platform()=='darwin':
- self.mac_bundle=True
- at before_method('propagate_uselib_vars')
- at feature('pyembed')
-def init_pyembed(self):
- self.uselib=self.to_list(getattr(self,'uselib',[]))
- if not'PYEMBED'in self.uselib:
- self.uselib.append('PYEMBED')
- at conf
-def get_python_variables(self,variables,imports=None):
- if not imports:
- try:
- imports=self.python_imports
- except AttributeError:
- imports=DISTUTILS_IMP
- program=list(imports)
- program.append('')
- for v in variables:
- program.append("print(repr(%s))"%v)
- os_env=dict(os.environ)
- try:
- del os_env['MACOSX_DEPLOYMENT_TARGET']
- except KeyError:
- pass
- try:
- out=self.cmd_and_log(self.env.PYTHON+['-c','\n'.join(program)],env=os_env)
- except Errors.WafError:
- self.fatal('The distutils module is unusable: install "python-devel"?')
- self.to_log(out)
- return_values=[]
- for s in out.split('\n'):
- s=s.strip()
- if not s:
- continue
- if s=='None':
- return_values.append(None)
- elif(s[0]=="'"and s[-1]=="'")or(s[0]=='"'and s[-1]=='"'):
- return_values.append(eval(s))
- elif s[0].isdigit():
- return_values.append(int(s))
- else:break
- return return_values
- at conf
-def check_python_headers(conf):
- env=conf.env
- if not env['CC_NAME']and not env['CXX_NAME']:
- conf.fatal('load a compiler first (gcc, g++, ..)')
- if not env['PYTHON_VERSION']:
- conf.check_python_version()
- pybin=conf.env.PYTHON
- if not pybin:
- conf.fatal('Could not find the python executable')
- v='prefix SO LDFLAGS LIBDIR LIBPL INCLUDEPY Py_ENABLE_SHARED MACOSX_DEPLOYMENT_TARGET LDSHARED CFLAGS'.split()
- try:
- lst=conf.get_python_variables(["get_config_var('%s') or ''"%x for x in v])
- except RuntimeError:
- conf.fatal("Python development headers not found (-v for details).")
- vals=['%s = %r'%(x,y)for(x,y)in zip(v,lst)]
- conf.to_log("Configuration returned from %r:\n%r\n"%(pybin,'\n'.join(vals)))
- dct=dict(zip(v,lst))
- x='MACOSX_DEPLOYMENT_TARGET'
- if dct[x]:
- conf.env[x]=conf.environ[x]=dct[x]
- env['pyext_PATTERN']='%s'+dct['SO']
- all_flags=dct['LDFLAGS']+' '+dct['CFLAGS']
- conf.parse_flags(all_flags,'PYEMBED')
- all_flags=dct['LDFLAGS']+' '+dct['LDSHARED']+' '+dct['CFLAGS']
- conf.parse_flags(all_flags,'PYEXT')
- result=None
- for name in('python'+env['PYTHON_VERSION'],'python'+env['PYTHON_VERSION'].replace('.','')):
- if not result and env['LIBPATH_PYEMBED']:
- path=env['LIBPATH_PYEMBED']
- conf.to_log("\n\n# Trying default LIBPATH_PYEMBED: %r\n"%path)
- result=conf.check(lib=name,uselib='PYEMBED',libpath=path,mandatory=False,msg='Checking for library %s in LIBPATH_PYEMBED'%name)
- if not result and dct['LIBDIR']:
- path=[dct['LIBDIR']]
- conf.to_log("\n\n# try again with -L$python_LIBDIR: %r\n"%path)
- result=conf.check(lib=name,uselib='PYEMBED',libpath=path,mandatory=False,msg='Checking for library %s in LIBDIR'%name)
- if not result and dct['LIBPL']:
- path=[dct['LIBPL']]
- conf.to_log("\n\n# try again with -L$python_LIBPL (some systems don't install the python library in $prefix/lib)\n")
- result=conf.check(lib=name,uselib='PYEMBED',libpath=path,mandatory=False,msg='Checking for library %s in python_LIBPL'%name)
- if not result:
- path=[os.path.join(dct['prefix'],"libs")]
- conf.to_log("\n\n# try again with -L$prefix/libs, and pythonXY name rather than pythonX.Y (win32)\n")
- result=conf.check(lib=name,uselib='PYEMBED',libpath=path,mandatory=False,msg='Checking for library %s in $prefix/libs'%name)
- if result:
- break
- if result:
- env['LIBPATH_PYEMBED']=path
- env.append_value('LIB_PYEMBED',[name])
- else:
- conf.to_log("\n\n### LIB NOT FOUND\n")
- if(Utils.is_win32 or sys.platform.startswith('os2')or dct['Py_ENABLE_SHARED']):
- env['LIBPATH_PYEXT']=env['LIBPATH_PYEMBED']
- env['LIB_PYEXT']=env['LIB_PYEMBED']
- num='.'.join(env['PYTHON_VERSION'].split('.')[:2])
- conf.find_program([''.join(pybin)+'-config','python%s-config'%num,'python-config-%s'%num,'python%sm-config'%num],var='PYTHON_CONFIG',mandatory=False)
- includes=[]
- if conf.env.PYTHON_CONFIG:
- for incstr in conf.cmd_and_log([conf.env.PYTHON_CONFIG,'--includes']).strip().split():
- if(incstr.startswith('-I')or incstr.startswith('/I')):
- incstr=incstr[2:]
- if incstr not in includes:
- includes.append(incstr)
- conf.to_log("Include path for Python extensions (found via python-config --includes): %r\n"%(includes,))
- env['INCLUDES_PYEXT']=includes
- env['INCLUDES_PYEMBED']=includes
- else:
- conf.to_log("Include path for Python extensions ""(found via distutils module): %r\n"%(dct['INCLUDEPY'],))
- env['INCLUDES_PYEXT']=[dct['INCLUDEPY']]
- env['INCLUDES_PYEMBED']=[dct['INCLUDEPY']]
- if env['CC_NAME']=='gcc':
- env.append_value('CFLAGS_PYEMBED',['-fno-strict-aliasing'])
- env.append_value('CFLAGS_PYEXT',['-fno-strict-aliasing'])
- if env['CXX_NAME']=='gcc':
- env.append_value('CXXFLAGS_PYEMBED',['-fno-strict-aliasing'])
- env.append_value('CXXFLAGS_PYEXT',['-fno-strict-aliasing'])
- if env.CC_NAME=="msvc":
- from distutils.msvccompiler import MSVCCompiler
- dist_compiler=MSVCCompiler()
- dist_compiler.initialize()
- env.append_value('CFLAGS_PYEXT',dist_compiler.compile_options)
- env.append_value('CXXFLAGS_PYEXT',dist_compiler.compile_options)
- env.append_value('LINKFLAGS_PYEXT',dist_compiler.ldflags_shared)
- try:
- conf.check(header_name='Python.h',define_name='HAVE_PYTHON_H',uselib='PYEMBED',fragment=FRAG,errmsg=':-(')
- except conf.errors.ConfigurationError:
- xx=conf.env.CXX_NAME and'cxx'or'c'
- conf.check_cfg(msg='Asking python-config for the flags (pyembed)',path=conf.env.PYTHON_CONFIG,package='',uselib_store='PYEMBED',args=['--cflags','--libs','--ldflags'])
- conf.check(header_name='Python.h',define_name='HAVE_PYTHON_H',msg='Getting pyembed flags from python-config',fragment=FRAG,errmsg='Could not build a python embedded interpreter',features='%s %sprogram pyembed'%(xx,xx))
- conf.check_cfg(msg='Asking python-config for the flags (pyext)',path=conf.env.PYTHON_CONFIG,package='',uselib_store='PYEXT',args=['--cflags','--libs','--ldflags'])
- conf.check(header_name='Python.h',define_name='HAVE_PYTHON_H',msg='Getting pyext flags from python-config',features='%s %sshlib pyext'%(xx,xx),fragment=FRAG,errmsg='Could not build python extensions')
- at conf
-def check_python_version(conf,minver=None):
- assert minver is None or isinstance(minver,tuple)
- pybin=conf.env['PYTHON']
- if not pybin:
- conf.fatal('could not find the python executable')
- cmd=pybin+['-c','import sys\nfor x in sys.version_info: print(str(x))']
- Logs.debug('python: Running python command %r'%cmd)
- lines=conf.cmd_and_log(cmd).split()
- assert len(lines)==5,"found %i lines, expected 5: %r"%(len(lines),lines)
- pyver_tuple=(int(lines[0]),int(lines[1]),int(lines[2]),lines[3],int(lines[4]))
- result=(minver is None)or(pyver_tuple>=minver)
- if result:
- pyver='.'.join([str(x)for x in pyver_tuple[:2]])
- conf.env['PYTHON_VERSION']=pyver
- if'PYTHONDIR'in conf.environ:
- pydir=conf.environ['PYTHONDIR']
- else:
- if Utils.is_win32:
- (python_LIBDEST,pydir)=conf.get_python_variables(["get_config_var('LIBDEST') or ''","get_python_lib(standard_lib=0, prefix=%r) or ''"%conf.env['PREFIX']])
- else:
- python_LIBDEST=None
- (pydir,)=conf.get_python_variables(["get_python_lib(standard_lib=0, prefix=%r) or ''"%conf.env['PREFIX']])
- if python_LIBDEST is None:
- if conf.env['LIBDIR']:
- python_LIBDEST=os.path.join(conf.env['LIBDIR'],"python"+pyver)
- else:
- python_LIBDEST=os.path.join(conf.env['PREFIX'],"lib","python"+pyver)
- if'PYTHONARCHDIR'in conf.environ:
- pyarchdir=conf.environ['PYTHONARCHDIR']
- else:
- (pyarchdir,)=conf.get_python_variables(["get_python_lib(plat_specific=1, standard_lib=0, prefix=%r) or ''"%conf.env['PREFIX']])
- if not pyarchdir:
- pyarchdir=pydir
- if hasattr(conf,'define'):
- conf.define('PYTHONDIR',pydir)
- conf.define('PYTHONARCHDIR',pyarchdir)
- conf.env['PYTHONDIR']=pydir
- conf.env['PYTHONARCHDIR']=pyarchdir
- pyver_full='.'.join(map(str,pyver_tuple[:3]))
- if minver is None:
- conf.msg('Checking for python version',pyver_full)
- else:
- minver_str='.'.join(map(str,minver))
- conf.msg('Checking for python version',pyver_tuple,">= %s"%(minver_str,)and'GREEN'or'YELLOW')
- if not result:
- conf.fatal('The python version is too old, expecting %r'%(minver,))
-PYTHON_MODULE_TEMPLATE='''
-import %s as current_module
-version = getattr(current_module, '__version__', None)
-if version is not None:
- print(str(version))
-else:
- print('unknown version')
-'''
- at conf
-def check_python_module(conf,module_name,condition=''):
- msg='Python module %s'%module_name
- if condition:
- msg='%s (%s)'%(msg,condition)
- conf.start_msg(msg)
- try:
- ret=conf.cmd_and_log(conf.env['PYTHON']+['-c',PYTHON_MODULE_TEMPLATE%module_name])
- except Exception:
- conf.end_msg(False)
- conf.fatal('Could not find the python module %r'%module_name)
- ret=ret.strip()
- if condition:
- conf.end_msg(ret)
- if ret=='unknown version':
- conf.fatal('Could not check the %s version'%module_name)
- from distutils.version import LooseVersion
- def num(*k):
- if isinstance(k[0],int):
- return LooseVersion('.'.join([str(x)for x in k]))
- else:
- return LooseVersion(k[0])
- d={'num':num,'ver':LooseVersion(ret)}
- ev=eval(condition,{},d)
- if not ev:
- conf.fatal('The %s version does not satisfy the requirements'%module_name)
- else:
- if ret=='unknown version':
- conf.end_msg(True)
- else:
- conf.end_msg(ret)
-def configure(conf):
- try:
- conf.find_program('python',var='PYTHON')
- except conf.errors.ConfigurationError:
- Logs.warn("could not find a python executable, setting to sys.executable '%s'"%sys.executable)
- conf.env.PYTHON=sys.executable
- if conf.env.PYTHON!=sys.executable:
- Logs.warn("python executable %r differs from system %r"%(conf.env.PYTHON,sys.executable))
- conf.env.PYTHON=conf.cmd_to_list(conf.env.PYTHON)
- v=conf.env
- v['PYCMD']='"import sys, py_compile;py_compile.compile(sys.argv[1], sys.argv[2])"'
- v['PYFLAGS']=''
- v['PYFLAGS_OPT']='-O'
- v['PYC']=getattr(Options.options,'pyc',1)
- v['PYO']=getattr(Options.options,'pyo',1)
-def options(opt):
- opt.add_option('--nopyc',action='store_false',default=1,help='Do not install bytecode compiled .pyc files (configuration) [Default:install]',dest='pyc')
- opt.add_option('--nopyo',action='store_false',default=1,help='Do not install optimised compiled .pyo files (configuration) [Default:install]',dest='pyo')
diff --git a/waflib/Tools/qt4.py b/waflib/Tools/qt4.py
deleted file mode 100644
index 4300a57..0000000
--- a/waflib/Tools/qt4.py
+++ /dev/null
@@ -1,437 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-try:
- from xml.sax import make_parser
- from xml.sax.handler import ContentHandler
-except ImportError:
- has_xml=False
- ContentHandler=object
-else:
- has_xml=True
-import os,sys
-from waflib.Tools import c_preproc,cxx
-from waflib import Task,Utils,Options,Errors
-from waflib.TaskGen import feature,after_method,extension
-from waflib.Configure import conf
-from waflib import Logs
-MOC_H=['.h','.hpp','.hxx','.hh']
-EXT_RCC=['.qrc']
-EXT_UI=['.ui']
-EXT_QT4=['.cpp','.cc','.cxx','.C']
-QT4_LIBS="QtCore QtGui QtUiTools QtNetwork QtOpenGL QtSql QtSvg QtTest QtXml QtXmlPatterns QtWebKit Qt3Support QtHelp QtScript QtDeclarative"
-class qxx(cxx.cxx):
- def __init__(self,*k,**kw):
- Task.Task.__init__(self,*k,**kw)
- self.moc_done=0
- def scan(self):
- (nodes,names)=c_preproc.scan(self)
- for x in nodes:
- if x.name.endswith('.moc'):
- nodes.remove(x)
- names.append(x.path_from(self.inputs[0].parent.get_bld()))
- return(nodes,names)
- def runnable_status(self):
- if self.moc_done:
- return Task.Task.runnable_status(self)
- else:
- for t in self.run_after:
- if not t.hasrun:
- return Task.ASK_LATER
- self.add_moc_tasks()
- return Task.Task.runnable_status(self)
- def add_moc_tasks(self):
- node=self.inputs[0]
- bld=self.generator.bld
- try:
- self.signature()
- except KeyError:
- pass
- else:
- delattr(self,'cache_sig')
- moctasks=[]
- mocfiles=[]
- try:
- tmp_lst=bld.raw_deps[self.uid()]
- bld.raw_deps[self.uid()]=[]
- except KeyError:
- tmp_lst=[]
- for d in tmp_lst:
- if not d.endswith('.moc'):
- continue
- if d in mocfiles:
- Logs.error("paranoia owns")
- continue
- mocfiles.append(d)
- h_node=None
- try:ext=Options.options.qt_header_ext.split()
- except AttributeError:pass
- if not ext:ext=MOC_H
- base2=d[:-4]
- for x in[node.parent]+self.generator.includes_nodes:
- for e in ext:
- h_node=x.find_node(base2+e)
- if h_node:
- break
- if h_node:
- m_node=h_node.change_ext('.moc')
- break
- else:
- for k in EXT_QT4:
- if base2.endswith(k):
- for x in[node.parent]+self.generator.includes_nodes:
- h_node=x.find_node(base2)
- if h_node:
- break
- if h_node:
- m_node=h_node.change_ext(k+'.moc')
- break
- if not h_node:
- raise Errors.WafError('no header found for %r which is a moc file'%d)
- bld.node_deps[(self.inputs[0].parent.abspath(),m_node.name)]=h_node
- task=Task.classes['moc'](env=self.env,generator=self.generator)
- task.set_inputs(h_node)
- task.set_outputs(m_node)
- gen=bld.producer
- gen.outstanding.insert(0,task)
- gen.total+=1
- moctasks.append(task)
- tmp_lst=bld.raw_deps[self.uid()]=mocfiles
- lst=bld.node_deps.get(self.uid(),())
- for d in lst:
- name=d.name
- if name.endswith('.moc'):
- task=Task.classes['moc'](env=self.env,generator=self.generator)
- task.set_inputs(bld.node_deps[(self.inputs[0].parent.abspath(),name)])
- task.set_outputs(d)
- gen=bld.producer
- gen.outstanding.insert(0,task)
- gen.total+=1
- moctasks.append(task)
- self.run_after.update(set(moctasks))
- self.moc_done=1
- run=Task.classes['cxx'].__dict__['run']
-class trans_update(Task.Task):
- run_str='${QT_LUPDATE} ${SRC} -ts ${TGT}'
- color='BLUE'
-Task.update_outputs(trans_update)
-class XMLHandler(ContentHandler):
- def __init__(self):
- self.buf=[]
- self.files=[]
- def startElement(self,name,attrs):
- if name=='file':
- self.buf=[]
- def endElement(self,name):
- if name=='file':
- self.files.append(str(''.join(self.buf)))
- def characters(self,cars):
- self.buf.append(cars)
- at extension(*EXT_RCC)
-def create_rcc_task(self,node):
- rcnode=node.change_ext('_rc.cpp')
- rcctask=self.create_task('rcc',node,rcnode)
- cpptask=self.create_task('cxx',rcnode,rcnode.change_ext('.o'))
- try:
- self.compiled_tasks.append(cpptask)
- except AttributeError:
- self.compiled_tasks=[cpptask]
- return cpptask
- at extension(*EXT_UI)
-def create_uic_task(self,node):
- uictask=self.create_task('ui4',node)
- uictask.outputs=[self.path.find_or_declare(self.env['ui_PATTERN']%node.name[:-3])]
- at extension('.ts')
-def add_lang(self,node):
- self.lang=self.to_list(getattr(self,'lang',[]))+[node]
- at feature('qt4')
- at after_method('apply_link')
-def apply_qt4(self):
- if getattr(self,'lang',None):
- qmtasks=[]
- for x in self.to_list(self.lang):
- if isinstance(x,str):
- x=self.path.find_resource(x+'.ts')
- qmtasks.append(self.create_task('ts2qm',x,x.change_ext('.qm')))
- if getattr(self,'update',None)and Options.options.trans_qt4:
- cxxnodes=[a.inputs[0]for a in self.compiled_tasks]+[a.inputs[0]for a in self.tasks if getattr(a,'inputs',None)and a.inputs[0].name.endswith('.ui')]
- for x in qmtasks:
- self.create_task('trans_update',cxxnodes,x.inputs)
- if getattr(self,'langname',None):
- qmnodes=[x.outputs[0]for x in qmtasks]
- rcnode=self.langname
- if isinstance(rcnode,str):
- rcnode=self.path.find_or_declare(rcnode+'.qrc')
- t=self.create_task('qm2rcc',qmnodes,rcnode)
- k=create_rcc_task(self,t.outputs[0])
- self.link_task.inputs.append(k.outputs[0])
- lst=[]
- for flag in self.to_list(self.env['CXXFLAGS']):
- if len(flag)<2:continue
- f=flag[0:2]
- if f in['-D','-I','/D','/I']:
- if(f[0]=='/'):
- lst.append('-'+flag[1:])
- else:
- lst.append(flag)
- self.env['MOC_FLAGS']=lst
- at extension(*EXT_QT4)
-def cxx_hook(self,node):
- return self.create_compiled_task('qxx',node)
-class rcc(Task.Task):
- color='BLUE'
- run_str='${QT_RCC} -name ${SRC[0].name} ${SRC[0].abspath()} ${RCC_ST} -o ${TGT}'
- ext_out=['.h']
- def scan(self):
- node=self.inputs[0]
- if not has_xml:
- Logs.error('no xml support was found, the rcc dependencies will be incomplete!')
- return([],[])
- parser=make_parser()
- curHandler=XMLHandler()
- parser.setContentHandler(curHandler)
- fi=open(self.inputs[0].abspath(),'r')
- try:
- parser.parse(fi)
- finally:
- fi.close()
- nodes=[]
- names=[]
- root=self.inputs[0].parent
- for x in curHandler.files:
- nd=root.find_resource(x)
- if nd:nodes.append(nd)
- else:names.append(x)
- return(nodes,names)
-class moc(Task.Task):
- color='BLUE'
- run_str='${QT_MOC} ${MOC_FLAGS} ${MOCCPPPATH_ST:INCPATHS} ${MOCDEFINES_ST:DEFINES} ${SRC} ${MOC_ST} ${TGT}'
-class ui4(Task.Task):
- color='BLUE'
- run_str='${QT_UIC} ${SRC} -o ${TGT}'
- ext_out=['.h']
-class ts2qm(Task.Task):
- color='BLUE'
- run_str='${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}'
-class qm2rcc(Task.Task):
- color='BLUE'
- after='ts2qm'
- def run(self):
- txt='\n'.join(['<file>%s</file>'%k.path_from(self.outputs[0].parent)for k in self.inputs])
- code='<!DOCTYPE RCC><RCC version="1.0">\n<qresource>\n%s\n</qresource>\n</RCC>'%txt
- self.outputs[0].write(code)
-def configure(self):
- self.find_qt4_binaries()
- self.set_qt4_libs_to_check()
- self.find_qt4_libraries()
- self.add_qt4_rpath()
- self.simplify_qt4_libs()
- at conf
-def find_qt4_binaries(self):
- env=self.env
- opt=Options.options
- qtdir=getattr(opt,'qtdir','')
- qtbin=getattr(opt,'qtbin','')
- paths=[]
- if qtdir:
- qtbin=os.path.join(qtdir,'bin')
- if not qtdir:
- qtdir=os.environ.get('QT4_ROOT','')
- qtbin=os.environ.get('QT4_BIN',None)or os.path.join(qtdir,'bin')
- if qtbin:
- paths=[qtbin]
- if not qtdir:
- paths=os.environ.get('PATH','').split(os.pathsep)
- paths.append('/usr/share/qt4/bin/')
- try:
- lst=Utils.listdir('/usr/local/Trolltech/')
- except OSError:
- pass
- else:
- if lst:
- lst.sort()
- lst.reverse()
- qtdir='/usr/local/Trolltech/%s/'%lst[0]
- qtbin=os.path.join(qtdir,'bin')
- paths.append(qtbin)
- cand=None
- prev_ver=['4','0','0']
- for qmk in['qmake-qt4','qmake4','qmake']:
- try:
- qmake=self.find_program(qmk,path_list=paths)
- except self.errors.ConfigurationError:
- pass
- else:
- try:
- version=self.cmd_and_log([qmake,'-query','QT_VERSION']).strip()
- except self.errors.WafError:
- pass
- else:
- if version:
- new_ver=version.split('.')
- if new_ver>prev_ver:
- cand=qmake
- prev_ver=new_ver
- if cand:
- self.env.QMAKE=cand
- else:
- self.fatal('Could not find qmake for qt4')
- qtbin=self.cmd_and_log([self.env.QMAKE,'-query','QT_INSTALL_BINS']).strip()+os.sep
- def find_bin(lst,var):
- if var in env:
- return
- for f in lst:
- try:
- ret=self.find_program(f,path_list=paths)
- except self.errors.ConfigurationError:
- pass
- else:
- env[var]=ret
- break
- find_bin(['uic-qt3','uic3'],'QT_UIC3')
- find_bin(['uic-qt4','uic'],'QT_UIC')
- if not env['QT_UIC']:
- self.fatal('cannot find the uic compiler for qt4')
- try:
- uicver=self.cmd_and_log(env['QT_UIC']+" -version 2>&1").strip()
- except self.errors.ConfigurationError:
- self.fatal('this uic compiler is for qt3, add uic for qt4 to your path')
- uicver=uicver.replace('Qt User Interface Compiler ','').replace('User Interface Compiler for Qt','')
- self.msg('Checking for uic version','%s'%uicver)
- if uicver.find(' 3.')!=-1:
- self.fatal('this uic compiler is for qt3, add uic for qt4 to your path')
- find_bin(['moc-qt4','moc'],'QT_MOC')
- find_bin(['rcc'],'QT_RCC')
- find_bin(['lrelease-qt4','lrelease'],'QT_LRELEASE')
- find_bin(['lupdate-qt4','lupdate'],'QT_LUPDATE')
- env['UIC3_ST']='%s -o %s'
- env['UIC_ST']='%s -o %s'
- env['MOC_ST']='-o'
- env['ui_PATTERN']='ui_%s.h'
- env['QT_LRELEASE_FLAGS']=['-silent']
- env.MOCCPPPATH_ST='-I%s'
- env.MOCDEFINES_ST='-D%s'
- at conf
-def find_qt4_libraries(self):
- qtlibs=getattr(Options.options,'qtlibs',None)or os.environ.get("QT4_LIBDIR",None)
- if not qtlibs:
- try:
- qtlibs=self.cmd_and_log([self.env.QMAKE,'-query','QT_INSTALL_LIBS']).strip()
- except Errors.WafError:
- qtdir=self.cmd_and_log([self.env.QMAKE,'-query','QT_INSTALL_PREFIX']).strip()+os.sep
- qtlibs=os.path.join(qtdir,'lib')
- self.msg('Found the Qt4 libraries in',qtlibs)
- qtincludes=os.environ.get("QT4_INCLUDES",None)or self.cmd_and_log([self.env.QMAKE,'-query','QT_INSTALL_HEADERS']).strip()
- env=self.env
- if not'PKG_CONFIG_PATH'in os.environ:
- os.environ['PKG_CONFIG_PATH']='%s:%s/pkgconfig:/usr/lib/qt4/lib/pkgconfig:/opt/qt4/lib/pkgconfig:/usr/lib/qt4/lib:/opt/qt4/lib'%(qtlibs,qtlibs)
- try:
- if os.environ.get("QT4_XCOMPILE",None):
- raise self.errors.ConfigurationError()
- self.check_cfg(atleast_pkgconfig_version='0.1')
- except self.errors.ConfigurationError:
- for i in self.qt4_vars:
- uselib=i.upper()
- if Utils.unversioned_sys_platform()=="darwin":
- frameworkName=i+".framework"
- qtDynamicLib=os.path.join(qtlibs,frameworkName,i)
- if os.path.exists(qtDynamicLib):
- env.append_unique('FRAMEWORK_'+uselib,i)
- self.msg('Checking for %s'%i,qtDynamicLib,'GREEN')
- else:
- self.msg('Checking for %s'%i,False,'YELLOW')
- env.append_unique('INCLUDES_'+uselib,os.path.join(qtlibs,frameworkName,'Headers'))
- elif env.DEST_OS!="win32":
- qtDynamicLib=os.path.join(qtlibs,"lib"+i+".so")
- qtStaticLib=os.path.join(qtlibs,"lib"+i+".a")
- if os.path.exists(qtDynamicLib):
- env.append_unique('LIB_'+uselib,i)
- self.msg('Checking for %s'%i,qtDynamicLib,'GREEN')
- elif os.path.exists(qtStaticLib):
- env.append_unique('LIB_'+uselib,i)
- self.msg('Checking for %s'%i,qtStaticLib,'GREEN')
- else:
- self.msg('Checking for %s'%i,False,'YELLOW')
- env.append_unique('LIBPATH_'+uselib,qtlibs)
- env.append_unique('INCLUDES_'+uselib,qtincludes)
- env.append_unique('INCLUDES_'+uselib,os.path.join(qtincludes,i))
- else:
- for k in("lib%s.a","lib%s4.a","%s.lib","%s4.lib"):
- lib=os.path.join(qtlibs,k%i)
- if os.path.exists(lib):
- env.append_unique('LIB_'+uselib,i+k[k.find("%s")+2:k.find('.')])
- self.msg('Checking for %s'%i,lib,'GREEN')
- break
- else:
- self.msg('Checking for %s'%i,False,'YELLOW')
- env.append_unique('LIBPATH_'+uselib,qtlibs)
- env.append_unique('INCLUDES_'+uselib,qtincludes)
- env.append_unique('INCLUDES_'+uselib,os.path.join(qtincludes,i))
- uselib=i.upper()+"_debug"
- for k in("lib%sd.a","lib%sd4.a","%sd.lib","%sd4.lib"):
- lib=os.path.join(qtlibs,k%i)
- if os.path.exists(lib):
- env.append_unique('LIB_'+uselib,i+k[k.find("%s")+2:k.find('.')])
- self.msg('Checking for %s'%i,lib,'GREEN')
- break
- else:
- self.msg('Checking for %s'%i,False,'YELLOW')
- env.append_unique('LIBPATH_'+uselib,qtlibs)
- env.append_unique('INCLUDES_'+uselib,qtincludes)
- env.append_unique('INCLUDES_'+uselib,os.path.join(qtincludes,i))
- else:
- for i in self.qt4_vars_debug+self.qt4_vars:
- self.check_cfg(package=i,args='--cflags --libs',mandatory=False)
- at conf
-def simplify_qt4_libs(self):
- env=self.env
- def process_lib(vars_,coreval):
- for d in vars_:
- var=d.upper()
- if var=='QTCORE':
- continue
- value=env['LIBPATH_'+var]
- if value:
- core=env[coreval]
- accu=[]
- for lib in value:
- if lib in core:
- continue
- accu.append(lib)
- env['LIBPATH_'+var]=accu
- process_lib(self.qt4_vars,'LIBPATH_QTCORE')
- process_lib(self.qt4_vars_debug,'LIBPATH_QTCORE_DEBUG')
- at conf
-def add_qt4_rpath(self):
- env=self.env
- if Options.options.want_rpath:
- def process_rpath(vars_,coreval):
- for d in vars_:
- var=d.upper()
- value=env['LIBPATH_'+var]
- if value:
- core=env[coreval]
- accu=[]
- for lib in value:
- if var!='QTCORE':
- if lib in core:
- continue
- accu.append('-Wl,--rpath='+lib)
- env['RPATH_'+var]=accu
- process_rpath(self.qt4_vars,'LIBPATH_QTCORE')
- process_rpath(self.qt4_vars_debug,'LIBPATH_QTCORE_DEBUG')
- at conf
-def set_qt4_libs_to_check(self):
- if not hasattr(self,'qt4_vars'):
- self.qt4_vars=QT4_LIBS
- self.qt4_vars=Utils.to_list(self.qt4_vars)
- if not hasattr(self,'qt4_vars_debug'):
- self.qt4_vars_debug=[a+'_debug'for a in self.qt4_vars]
- self.qt4_vars_debug=Utils.to_list(self.qt4_vars_debug)
-def options(opt):
- opt.add_option('--want-rpath',action='store_true',default=False,dest='want_rpath',help='enable the rpath for qt libraries')
- opt.add_option('--header-ext',type='string',default='',help='header extension for moc files',dest='qt_header_ext')
- for i in'qtdir qtbin qtlibs'.split():
- opt.add_option('--'+i,type='string',default='',dest=i)
- opt.add_option('--translate',action="store_true",help="collect translation strings",dest="trans_qt4",default=False)
diff --git a/waflib/Tools/ruby.py b/waflib/Tools/ruby.py
deleted file mode 100644
index 04cddfb..0000000
--- a/waflib/Tools/ruby.py
+++ /dev/null
@@ -1,103 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import os
-from waflib import Task,Options,Utils
-from waflib.TaskGen import before_method,feature,after_method,Task,extension
-from waflib.Configure import conf
- at feature('rubyext')
- at before_method('apply_incpaths','apply_lib_vars','apply_bundle','apply_link')
-def init_rubyext(self):
- self.install_path='${ARCHDIR_RUBY}'
- self.uselib=self.to_list(getattr(self,'uselib',''))
- if not'RUBY'in self.uselib:
- self.uselib.append('RUBY')
- if not'RUBYEXT'in self.uselib:
- self.uselib.append('RUBYEXT')
- at feature('rubyext')
- at before_method('apply_link','propagate_uselib')
-def apply_ruby_so_name(self):
- self.env['cshlib_PATTERN']=self.env['cxxshlib_PATTERN']=self.env['rubyext_PATTERN']
- at conf
-def check_ruby_version(self,minver=()):
- if Options.options.rubybinary:
- self.env.RUBY=Options.options.rubybinary
- else:
- self.find_program('ruby',var='RUBY')
- ruby=self.env.RUBY
- try:
- version=self.cmd_and_log([ruby,'-e','puts defined?(VERSION) ? VERSION : RUBY_VERSION']).strip()
- except Exception:
- self.fatal('could not determine ruby version')
- self.env.RUBY_VERSION=version
- try:
- ver=tuple(map(int,version.split(".")))
- except Exception:
- self.fatal('unsupported ruby version %r'%version)
- cver=''
- if minver:
- if ver<minver:
- self.fatal('ruby is too old %r'%ver)
- cver='.'.join([str(x)for x in minver])
- else:
- cver=ver
- self.msg('Checking for ruby version %s'%str(minver or''),cver)
- at conf
-def check_ruby_ext_devel(self):
- if not self.env.RUBY:
- self.fatal('ruby detection is required first')
- if not self.env.CC_NAME and not self.env.CXX_NAME:
- self.fatal('load a c/c++ compiler first')
- version=tuple(map(int,self.env.RUBY_VERSION.split(".")))
- def read_out(cmd):
- return Utils.to_list(self.cmd_and_log([self.env.RUBY,'-rrbconfig','-e',cmd]))
- def read_config(key):
- return read_out('puts Config::CONFIG[%r]'%key)
- ruby=self.env['RUBY']
- archdir=read_config('archdir')
- cpppath=archdir
- if version>=(1,9,0):
- ruby_hdrdir=read_config('rubyhdrdir')
- cpppath+=ruby_hdrdir
- cpppath+=[os.path.join(ruby_hdrdir[0],read_config('arch')[0])]
- self.check(header_name='ruby.h',includes=cpppath,errmsg='could not find ruby header file')
- self.env.LIBPATH_RUBYEXT=read_config('libdir')
- self.env.LIBPATH_RUBYEXT+=archdir
- self.env.INCLUDES_RUBYEXT=cpppath
- self.env.CFLAGS_RUBYEXT=read_config('CCDLFLAGS')
- self.env.rubyext_PATTERN='%s.'+read_config('DLEXT')[0]
- flags=read_config('LDSHARED')
- while flags and flags[0][0]!='-':
- flags=flags[1:]
- if len(flags)>1 and flags[1]=="ppc":
- flags=flags[2:]
- self.env.LINKFLAGS_RUBYEXT=flags
- self.env.LINKFLAGS_RUBYEXT+=read_config('LIBS')
- self.env.LINKFLAGS_RUBYEXT+=read_config('LIBRUBYARG_SHARED')
- if Options.options.rubyarchdir:
- self.env.ARCHDIR_RUBY=Options.options.rubyarchdir
- else:
- self.env.ARCHDIR_RUBY=read_config('sitearchdir')[0]
- if Options.options.rubylibdir:
- self.env.LIBDIR_RUBY=Options.options.rubylibdir
- else:
- self.env.LIBDIR_RUBY=read_config('sitelibdir')[0]
- at conf
-def check_ruby_module(self,module_name):
- self.start_msg('Ruby module %s'%module_name)
- try:
- self.cmd_and_log([self.env['RUBY'],'-e','require \'%s\';puts 1'%module_name])
- except Exception:
- self.end_msg(False)
- self.fatal('Could not find the ruby module %r'%module_name)
- self.end_msg(True)
- at extension('.rb')
-def process(self,node):
- tsk=self.create_task('run_ruby',node)
-class run_ruby(Task.Task):
- run_str='${RUBY} ${RBFLAGS} -I ${SRC[0].parent.abspath()} ${SRC}'
-def options(opt):
- opt.add_option('--with-ruby-archdir',type='string',dest='rubyarchdir',help='Specify directory where to install arch specific files')
- opt.add_option('--with-ruby-libdir',type='string',dest='rubylibdir',help='Specify alternate ruby library path')
- opt.add_option('--with-ruby-binary',type='string',dest='rubybinary',help='Specify alternate ruby binary')
diff --git a/waflib/Tools/suncc.py b/waflib/Tools/suncc.py
deleted file mode 100644
index edd24cd..0000000
--- a/waflib/Tools/suncc.py
+++ /dev/null
@@ -1,53 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import os
-from waflib import Utils
-from waflib.Tools import ccroot,ar
-from waflib.Configure import conf
- at conf
-def find_scc(conf):
- v=conf.env
- cc=None
- if v['CC']:cc=v['CC']
- elif'CC'in conf.environ:cc=conf.environ['CC']
- if not cc:cc=conf.find_program('cc',var='CC')
- if not cc:conf.fatal('Could not find a Sun C compiler')
- cc=conf.cmd_to_list(cc)
- try:
- conf.cmd_and_log(cc+['-flags'])
- except Exception:
- conf.fatal('%r is not a Sun compiler'%cc)
- v['CC']=cc
- v['CC_NAME']='sun'
- at conf
-def scc_common_flags(conf):
- v=conf.env
- v['CC_SRC_F']=[]
- v['CC_TGT_F']=['-c','-o']
- if not v['LINK_CC']:v['LINK_CC']=v['CC']
- v['CCLNK_SRC_F']=''
- v['CCLNK_TGT_F']=['-o']
- v['CPPPATH_ST']='-I%s'
- v['DEFINES_ST']='-D%s'
- v['LIB_ST']='-l%s'
- v['LIBPATH_ST']='-L%s'
- v['STLIB_ST']='-l%s'
- v['STLIBPATH_ST']='-L%s'
- v['SONAME_ST']='-Wl,-h,%s'
- v['SHLIB_MARKER']='-Bdynamic'
- v['STLIB_MARKER']='-Bstatic'
- v['cprogram_PATTERN']='%s'
- v['CFLAGS_cshlib']=['-Kpic','-DPIC']
- v['LINKFLAGS_cshlib']=['-G']
- v['cshlib_PATTERN']='lib%s.so'
- v['LINKFLAGS_cstlib']=['-Bstatic']
- v['cstlib_PATTERN']='lib%s.a'
-def configure(conf):
- conf.find_scc()
- conf.find_ar()
- conf.scc_common_flags()
- conf.cc_load_tools()
- conf.cc_add_flags()
- conf.link_add_flags()
diff --git a/waflib/Tools/suncxx.py b/waflib/Tools/suncxx.py
deleted file mode 100644
index 4b8b931..0000000
--- a/waflib/Tools/suncxx.py
+++ /dev/null
@@ -1,54 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import os
-from waflib import Utils
-from waflib.Tools import ccroot,ar
-from waflib.Configure import conf
- at conf
-def find_sxx(conf):
- v=conf.env
- cc=None
- if v['CXX']:cc=v['CXX']
- elif'CXX'in conf.environ:cc=conf.environ['CXX']
- if not cc:cc=conf.find_program('CC',var='CXX')
- if not cc:cc=conf.find_program('c++',var='CXX')
- if not cc:conf.fatal('Could not find a Sun C++ compiler')
- cc=conf.cmd_to_list(cc)
- try:
- conf.cmd_and_log(cc+['-flags'])
- except Exception:
- conf.fatal('%r is not a Sun compiler'%cc)
- v['CXX']=cc
- v['CXX_NAME']='sun'
- at conf
-def sxx_common_flags(conf):
- v=conf.env
- v['CXX_SRC_F']=[]
- v['CXX_TGT_F']=['-c','-o']
- if not v['LINK_CXX']:v['LINK_CXX']=v['CXX']
- v['CXXLNK_SRC_F']=[]
- v['CXXLNK_TGT_F']=['-o']
- v['CPPPATH_ST']='-I%s'
- v['DEFINES_ST']='-D%s'
- v['LIB_ST']='-l%s'
- v['LIBPATH_ST']='-L%s'
- v['STLIB_ST']='-l%s'
- v['STLIBPATH_ST']='-L%s'
- v['SONAME_ST']='-Wl,-h,%s'
- v['SHLIB_MARKER']='-Bdynamic'
- v['STLIB_MARKER']='-Bstatic'
- v['cxxprogram_PATTERN']='%s'
- v['CXXFLAGS_cxxshlib']=['-Kpic','-DPIC']
- v['LINKFLAGS_cxxshlib']=['-G']
- v['cxxshlib_PATTERN']='lib%s.so'
- v['LINKFLAGS_cxxstlib']=['-Bstatic']
- v['cxxstlib_PATTERN']='lib%s.a'
-def configure(conf):
- conf.find_sxx()
- conf.find_ar()
- conf.sxx_common_flags()
- conf.cxx_load_tools()
- conf.cxx_add_flags()
- conf.link_add_flags()
diff --git a/waflib/Tools/tex.py b/waflib/Tools/tex.py
deleted file mode 100644
index 4448381..0000000
--- a/waflib/Tools/tex.py
+++ /dev/null
@@ -1,250 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import os,re
-from waflib import Utils,Task,Errors,Logs
-from waflib.TaskGen import feature,before_method
-re_bibunit=re.compile(r'\\(?P<type>putbib)\[(?P<file>[^\[\]]*)\]',re.M)
-def bibunitscan(self):
- node=self.inputs[0]
- nodes=[]
- if not node:return nodes
- code=node.read()
- for match in re_bibunit.finditer(code):
- path=match.group('file')
- if path:
- for k in['','.bib']:
- Logs.debug('tex: trying %s%s'%(path,k))
- fi=node.parent.find_resource(path+k)
- if fi:
- nodes.append(fi)
- else:
- Logs.debug('tex: could not find %s'%path)
- Logs.debug("tex: found the following bibunit files: %s"%nodes)
- return nodes
-exts_deps_tex=['','.ltx','.tex','.bib','.pdf','.png','.eps','.ps']
-exts_tex=['.ltx','.tex']
-re_tex=re.compile(r'\\(?P<type>include|bibliography|putbib|includegraphics|input|import|bringin|lstinputlisting)(\[[^\[\]]*\])?{(?P<file>[^{}]*)}',re.M)
-g_bibtex_re=re.compile('bibdata',re.M)
-class tex(Task.Task):
- bibtex_fun,_=Task.compile_fun('${BIBTEX} ${BIBTEXFLAGS} ${SRCFILE}',shell=False)
- bibtex_fun.__doc__="""
- Execute the program **bibtex**
- """
- makeindex_fun,_=Task.compile_fun('${MAKEINDEX} ${MAKEINDEXFLAGS} ${SRCFILE}',shell=False)
- makeindex_fun.__doc__="""
- Execute the program **makeindex**
- """
- def exec_command(self,cmd,**kw):
- bld=self.generator.bld
- try:
- if not kw.get('cwd',None):
- kw['cwd']=bld.cwd
- except AttributeError:
- bld.cwd=kw['cwd']=bld.variant_dir
- return Utils.subprocess.Popen(cmd,**kw).wait()
- def scan_aux(self,node):
- nodes=[node]
- re_aux=re.compile(r'\\@input{(?P<file>[^{}]*)}',re.M)
- def parse_node(node):
- code=node.read()
- for match in re_aux.finditer(code):
- path=match.group('file')
- found=node.parent.find_or_declare(path)
- if found and found not in nodes:
- Logs.debug('tex: found aux node '+found.abspath())
- nodes.append(found)
- parse_node(found)
- parse_node(node)
- return nodes
- def scan(self):
- node=self.inputs[0]
- nodes=[]
- names=[]
- seen=[]
- if not node:return(nodes,names)
- def parse_node(node):
- if node in seen:
- return
- seen.append(node)
- code=node.read()
- global re_tex
- for match in re_tex.finditer(code):
- for path in match.group('file').split(','):
- if path:
- add_name=True
- found=None
- for k in exts_deps_tex:
- Logs.debug('tex: trying %s%s'%(path,k))
- found=node.parent.find_resource(path+k)
- if found and not found in self.outputs:
- nodes.append(found)
- add_name=False
- for ext in exts_tex:
- if found.name.endswith(ext):
- parse_node(found)
- break
- if add_name:
- names.append(path)
- parse_node(node)
- for x in nodes:
- x.parent.get_bld().mkdir()
- Logs.debug("tex: found the following : %s and names %s"%(nodes,names))
- return(nodes,names)
- def check_status(self,msg,retcode):
- if retcode!=0:
- raise Errors.WafError("%r command exit status %r"%(msg,retcode))
- def bibfile(self):
- need_bibtex=False
- try:
- for aux_node in self.aux_nodes:
- ct=aux_node.read()
- if g_bibtex_re.findall(ct):
- need_bibtex=True
- break
- except(OSError,IOError):
- Logs.error('error bibtex scan')
- else:
- if need_bibtex:
- Logs.warn('calling bibtex')
- self.env.env={}
- self.env.env.update(os.environ)
- self.env.env.update({'BIBINPUTS':self.TEXINPUTS,'BSTINPUTS':self.TEXINPUTS})
- self.env.SRCFILE=self.aux_nodes[0].name[:-4]
- self.check_status('error when calling bibtex',self.bibtex_fun())
- def bibunits(self):
- try:
- bibunits=bibunitscan(self)
- except OSError:
- Logs.error('error bibunitscan')
- else:
- if bibunits:
- fn=['bu'+str(i)for i in xrange(1,len(bibunits)+1)]
- if fn:
- Logs.warn('calling bibtex on bibunits')
- for f in fn:
- self.env.env={'BIBINPUTS':self.TEXINPUTS,'BSTINPUTS':self.TEXINPUTS}
- self.env.SRCFILE=f
- self.check_status('error when calling bibtex',self.bibtex_fun())
- def makeindex(self):
- try:
- idx_path=self.idx_node.abspath()
- os.stat(idx_path)
- except OSError:
- Logs.warn('index file %s absent, not calling makeindex'%idx_path)
- else:
- Logs.warn('calling makeindex')
- self.env.SRCFILE=self.idx_node.name
- self.env.env={}
- self.check_status('error when calling makeindex %s'%idx_path,self.makeindex_fun())
- def run(self):
- env=self.env
- if not env['PROMPT_LATEX']:
- env.append_value('LATEXFLAGS','-interaction=batchmode')
- env.append_value('PDFLATEXFLAGS','-interaction=batchmode')
- env.append_value('XELATEXFLAGS','-interaction=batchmode')
- fun=self.texfun
- node=self.inputs[0]
- srcfile=node.abspath()
- texinputs=self.env.TEXINPUTS or''
- self.TEXINPUTS=node.parent.get_bld().abspath()+os.pathsep+node.parent.get_src().abspath()+os.pathsep+texinputs+os.pathsep
- self.cwd=self.inputs[0].parent.get_bld().abspath()
- Logs.warn('first pass on %s'%self.__class__.__name__)
- self.env.env={}
- self.env.env.update(os.environ)
- self.env.env.update({'TEXINPUTS':self.TEXINPUTS})
- self.env.SRCFILE=srcfile
- self.check_status('error when calling latex',fun())
- self.aux_nodes=self.scan_aux(node.change_ext('.aux'))
- self.idx_node=node.change_ext('.idx')
- self.bibfile()
- self.bibunits()
- self.makeindex()
- hash=''
- for i in range(10):
- prev_hash=hash
- try:
- hashes=[Utils.h_file(x.abspath())for x in self.aux_nodes]
- hash=Utils.h_list(hashes)
- except(OSError,IOError):
- Logs.error('could not read aux.h')
- pass
- if hash and hash==prev_hash:
- break
- Logs.warn('calling %s'%self.__class__.__name__)
- self.env.env={}
- self.env.env.update(os.environ)
- self.env.env.update({'TEXINPUTS':self.TEXINPUTS})
- self.env.SRCFILE=srcfile
- self.check_status('error when calling %s'%self.__class__.__name__,fun())
-class latex(tex):
- texfun,vars=Task.compile_fun('${LATEX} ${LATEXFLAGS} ${SRCFILE}',shell=False)
-class pdflatex(tex):
- texfun,vars=Task.compile_fun('${PDFLATEX} ${PDFLATEXFLAGS} ${SRCFILE}',shell=False)
-class xelatex(tex):
- texfun,vars=Task.compile_fun('${XELATEX} ${XELATEXFLAGS} ${SRCFILE}',shell=False)
-class dvips(Task.Task):
- run_str='${DVIPS} ${DVIPSFLAGS} ${SRC} -o ${TGT}'
- color='BLUE'
- after=['latex','pdflatex','xelatex']
-class dvipdf(Task.Task):
- run_str='${DVIPDF} ${DVIPDFFLAGS} ${SRC} ${TGT}'
- color='BLUE'
- after=['latex','pdflatex','xelatex']
-class pdf2ps(Task.Task):
- run_str='${PDF2PS} ${PDF2PSFLAGS} ${SRC} ${TGT}'
- color='BLUE'
- after=['latex','pdflatex','xelatex']
- at feature('tex')
- at before_method('process_source')
-def apply_tex(self):
- if not getattr(self,'type',None)in['latex','pdflatex','xelatex']:
- self.type='pdflatex'
- tree=self.bld
- outs=Utils.to_list(getattr(self,'outs',[]))
- self.env['PROMPT_LATEX']=getattr(self,'prompt',1)
- deps_lst=[]
- if getattr(self,'deps',None):
- deps=self.to_list(self.deps)
- for filename in deps:
- n=self.path.find_resource(filename)
- if not n:
- self.bld.fatal('Could not find %r for %r'%(filename,self))
- if not n in deps_lst:
- deps_lst.append(n)
- for node in self.to_nodes(self.source):
- if self.type=='latex':
- task=self.create_task('latex',node,node.change_ext('.dvi'))
- elif self.type=='pdflatex':
- task=self.create_task('pdflatex',node,node.change_ext('.pdf'))
- elif self.type=='xelatex':
- task=self.create_task('xelatex',node,node.change_ext('.pdf'))
- task.env=self.env
- if deps_lst:
- try:
- lst=tree.node_deps[task.uid()]
- for n in deps_lst:
- if not n in lst:
- lst.append(n)
- except KeyError:
- tree.node_deps[task.uid()]=deps_lst
- if self.type=='latex':
- if'ps'in outs:
- tsk=self.create_task('dvips',task.outputs,node.change_ext('.ps'))
- tsk.env.env={'TEXINPUTS':node.parent.abspath()+os.pathsep+self.path.abspath()+os.pathsep+self.path.get_bld().abspath()}
- if'pdf'in outs:
- tsk=self.create_task('dvipdf',task.outputs,node.change_ext('.pdf'))
- tsk.env.env={'TEXINPUTS':node.parent.abspath()+os.pathsep+self.path.abspath()+os.pathsep+self.path.get_bld().abspath()}
- elif self.type=='pdflatex':
- if'ps'in outs:
- self.create_task('pdf2ps',task.outputs,node.change_ext('.ps'))
- self.source=[]
-def configure(self):
- v=self.env
- for p in'tex latex pdflatex xelatex bibtex dvips dvipdf ps2pdf makeindex pdf2ps'.split():
- try:
- self.find_program(p,var=p.upper())
- except self.errors.ConfigurationError:
- pass
- v['DVIPSFLAGS']='-Ppdf'
diff --git a/waflib/Tools/vala.py b/waflib/Tools/vala.py
deleted file mode 100644
index 96248c1..0000000
--- a/waflib/Tools/vala.py
+++ /dev/null
@@ -1,201 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import os.path,shutil,re
-from waflib import Context,Task,Utils,Logs,Options,Errors
-from waflib.TaskGen import extension,taskgen_method
-from waflib.Configure import conf
-class valac(Task.Task):
- vars=["VALAC","VALAC_VERSION","VALAFLAGS"]
- ext_out=['.h']
- def run(self):
- cmd=[self.env['VALAC']]+self.env['VALAFLAGS']
- cmd.extend([a.abspath()for a in self.inputs])
- ret=self.exec_command(cmd,cwd=self.outputs[0].parent.abspath())
- if ret:
- return ret
- for x in self.outputs:
- if id(x.parent)!=id(self.outputs[0].parent):
- shutil.move(self.outputs[0].parent.abspath()+os.sep+x.name,x.abspath())
- if self.generator.dump_deps_node:
- self.generator.dump_deps_node.write('\n'.join(self.generator.packages))
- return ret
-valac=Task.update_outputs(valac)
- at taskgen_method
-def init_vala_task(self):
- self.profile=getattr(self,'profile','gobject')
- if self.profile=='gobject':
- self.uselib=Utils.to_list(getattr(self,'uselib',[]))
- if not'GOBJECT'in self.uselib:
- self.uselib.append('GOBJECT')
- def addflags(flags):
- self.env.append_value('VALAFLAGS',flags)
- if self.profile:
- addflags('--profile=%s'%self.profile)
- if hasattr(self,'threading'):
- if self.profile=='gobject':
- if not'GTHREAD'in self.uselib:
- self.uselib.append('GTHREAD')
- else:
- Logs.warn("Profile %s means no threading support"%self.profile)
- self.threading=False
- if self.threading:
- addflags('--threading')
- valatask=self.valatask
- self.is_lib='cprogram'not in self.features
- if self.is_lib:
- addflags('--library=%s'%self.target)
- h_node=self.path.find_or_declare('%s.h'%self.target)
- valatask.outputs.append(h_node)
- addflags('--header=%s'%h_node.name)
- valatask.outputs.append(self.path.find_or_declare('%s.vapi'%self.target))
- if getattr(self,'gir',None):
- gir_node=self.path.find_or_declare('%s.gir'%self.gir)
- addflags('--gir=%s'%gir_node.name)
- valatask.outputs.append(gir_node)
- self.vala_target_glib=getattr(self,'vala_target_glib',getattr(Options.options,'vala_target_glib',None))
- if self.vala_target_glib:
- addflags('--target-glib=%s'%self.vala_target_glib)
- addflags(['--define=%s'%x for x in getattr(self,'vala_defines',[])])
- packages_private=Utils.to_list(getattr(self,'packages_private',[]))
- addflags(['--pkg=%s'%x for x in packages_private])
- def _get_api_version():
- api_version='1.0'
- if hasattr(Context.g_module,'API_VERSION'):
- version=Context.g_module.API_VERSION.split(".")
- if version[0]=="0":
- api_version="0."+version[1]
- else:
- api_version=version[0]+".0"
- return api_version
- self.includes=Utils.to_list(getattr(self,'includes',[]))
- self.uselib=self.to_list(getattr(self,'uselib',[]))
- valatask.install_path=getattr(self,'install_path','')
- valatask.vapi_path=getattr(self,'vapi_path','${DATAROOTDIR}/vala/vapi')
- valatask.pkg_name=getattr(self,'pkg_name',self.env['PACKAGE'])
- valatask.header_path=getattr(self,'header_path','${INCLUDEDIR}/%s-%s'%(valatask.pkg_name,_get_api_version()))
- valatask.install_binding=getattr(self,'install_binding',True)
- self.packages=packages=Utils.to_list(getattr(self,'packages',[]))
- self.vapi_dirs=vapi_dirs=Utils.to_list(getattr(self,'vapi_dirs',[]))
- includes=[]
- if hasattr(self,'use'):
- local_packages=Utils.to_list(self.use)[:]
- seen=[]
- while len(local_packages)>0:
- package=local_packages.pop()
- if package in seen:
- continue
- seen.append(package)
- try:
- package_obj=self.bld.get_tgen_by_name(package)
- except Errors.WafError:
- continue
- package_name=package_obj.target
- package_node=package_obj.path
- package_dir=package_node.path_from(self.path)
- for task in package_obj.tasks:
- for output in task.outputs:
- if output.name==package_name+".vapi":
- valatask.set_run_after(task)
- if package_name not in packages:
- packages.append(package_name)
- if package_dir not in vapi_dirs:
- vapi_dirs.append(package_dir)
- if package_dir not in includes:
- includes.append(package_dir)
- if hasattr(package_obj,'use'):
- lst=self.to_list(package_obj.use)
- lst.reverse()
- local_packages=[pkg for pkg in lst if pkg not in seen]+local_packages
- addflags(['--pkg=%s'%p for p in packages])
- for vapi_dir in vapi_dirs:
- v_node=self.path.find_dir(vapi_dir)
- if not v_node:
- Logs.warn('Unable to locate Vala API directory: %r'%vapi_dir)
- else:
- addflags('--vapidir=%s'%v_node.abspath())
- addflags('--vapidir=%s'%v_node.get_bld().abspath())
- self.dump_deps_node=None
- if self.is_lib and self.packages:
- self.dump_deps_node=self.path.find_or_declare('%s.deps'%self.target)
- valatask.outputs.append(self.dump_deps_node)
- self.includes.append(self.bld.srcnode.abspath())
- self.includes.append(self.bld.bldnode.abspath())
- for include in includes:
- try:
- self.includes.append(self.path.find_dir(include).abspath())
- self.includes.append(self.path.find_dir(include).get_bld().abspath())
- except AttributeError:
- Logs.warn("Unable to locate include directory: '%s'"%include)
- if self.is_lib and valatask.install_binding:
- headers_list=[o for o in valatask.outputs if o.suffix()==".h"]
- try:
- self.install_vheader.source=headers_list
- except AttributeError:
- self.install_vheader=self.bld.install_files(valatask.header_path,headers_list,self.env)
- vapi_list=[o for o in valatask.outputs if(o.suffix()in(".vapi",".deps"))]
- try:
- self.install_vapi.source=vapi_list
- except AttributeError:
- self.install_vapi=self.bld.install_files(valatask.vapi_path,vapi_list,self.env)
- gir_list=[o for o in valatask.outputs if o.suffix()=='.gir']
- try:
- self.install_gir.source=gir_list
- except AttributeError:
- self.install_gir=self.bld.install_files(getattr(self,'gir_path','${DATAROOTDIR}/gir-1.0'),gir_list,self.env)
- at extension('.vala','.gs')
-def vala_file(self,node):
- try:
- valatask=self.valatask
- except AttributeError:
- valatask=self.valatask=self.create_task('valac')
- self.init_vala_task()
- valatask.inputs.append(node)
- c_node=node.change_ext('.c')
- valatask.outputs.append(c_node)
- self.source.append(c_node)
- at conf
-def find_valac(self,valac_name,min_version):
- valac=self.find_program(valac_name,var='VALAC')
- try:
- output=self.cmd_and_log(valac+' --version')
- except Exception:
- valac_version=None
- else:
- ver=re.search(r'\d+.\d+.\d+',output).group(0).split('.')
- valac_version=tuple([int(x)for x in ver])
- self.msg('Checking for %s version >= %r'%(valac_name,min_version),valac_version,valac_version and valac_version>=min_version)
- if valac and valac_version<min_version:
- self.fatal("%s version %r is too old, need >= %r"%(valac_name,valac_version,min_version))
- self.env['VALAC_VERSION']=valac_version
- return valac
- at conf
-def check_vala(self,min_version=(0,8,0),branch=None):
- if not branch:
- branch=min_version[:2]
- try:
- find_valac(self,'valac-%d.%d'%(branch[0],branch[1]),min_version)
- except self.errors.ConfigurationError:
- find_valac(self,'valac',min_version)
- at conf
-def check_vala_deps(self):
- if not self.env['HAVE_GOBJECT']:
- pkg_args={'package':'gobject-2.0','uselib_store':'GOBJECT','args':'--cflags --libs'}
- if getattr(Options.options,'vala_target_glib',None):
- pkg_args['atleast_version']=Options.options.vala_target_glib
- self.check_cfg(**pkg_args)
- if not self.env['HAVE_GTHREAD']:
- pkg_args={'package':'gthread-2.0','uselib_store':'GTHREAD','args':'--cflags --libs'}
- if getattr(Options.options,'vala_target_glib',None):
- pkg_args['atleast_version']=Options.options.vala_target_glib
- self.check_cfg(**pkg_args)
-def configure(self):
- self.load('gnu_dirs')
- self.check_vala_deps()
- self.check_vala()
- self.env.VALAFLAGS=['-C','--quiet']
-def options(opt):
- opt.load('gnu_dirs')
- valaopts=opt.add_option_group('Vala Compiler Options')
- valaopts.add_option('--vala-target-glib',default=None,dest='vala_target_glib',metavar='MAJOR.MINOR',help='Target version of glib for Vala GObject code generation')
diff --git a/waflib/Tools/waf_unit_test.py b/waflib/Tools/waf_unit_test.py
deleted file mode 100644
index f461be1..0000000
--- a/waflib/Tools/waf_unit_test.py
+++ /dev/null
@@ -1,95 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import os,sys
-from waflib.TaskGen import feature,after_method
-from waflib import Utils,Task,Logs,Options
-testlock=Utils.threading.Lock()
- at feature('test')
- at after_method('apply_link')
-def make_test(self):
- if getattr(self,'link_task',None):
- self.create_task('utest',self.link_task.outputs)
-class utest(Task.Task):
- color='PINK'
- after=['vnum','inst']
- vars=[]
- def runnable_status(self):
- if getattr(Options.options,'no_tests',False):
- return Task.SKIP_ME
- ret=super(utest,self).runnable_status()
- if ret==Task.SKIP_ME:
- if getattr(Options.options,'all_tests',False):
- return Task.RUN_ME
- return ret
- def run(self):
- filename=self.inputs[0].abspath()
- self.ut_exec=getattr(self.generator,'ut_exec',[filename])
- if getattr(self.generator,'ut_fun',None):
- self.generator.ut_fun(self)
- try:
- fu=getattr(self.generator.bld,'all_test_paths')
- except AttributeError:
- fu=os.environ.copy()
- lst=[]
- for g in self.generator.bld.groups:
- for tg in g:
- if getattr(tg,'link_task',None):
- lst.append(tg.link_task.outputs[0].parent.abspath())
- def add_path(dct,path,var):
- dct[var]=os.pathsep.join(Utils.to_list(path)+[os.environ.get(var,'')])
- if Utils.is_win32:
- add_path(fu,lst,'PATH')
- elif Utils.unversioned_sys_platform()=='darwin':
- add_path(fu,lst,'DYLD_LIBRARY_PATH')
- add_path(fu,lst,'LD_LIBRARY_PATH')
- else:
- add_path(fu,lst,'LD_LIBRARY_PATH')
- self.generator.bld.all_test_paths=fu
- cwd=getattr(self.generator,'ut_cwd','')or self.inputs[0].parent.abspath()
- testcmd=getattr(Options.options,'testcmd',False)
- if testcmd:
- self.ut_exec=(testcmd%self.ut_exec[0]).split(' ')
- proc=Utils.subprocess.Popen(self.ut_exec,cwd=cwd,env=fu,stderr=Utils.subprocess.PIPE,stdout=Utils.subprocess.PIPE)
- (stdout,stderr)=proc.communicate()
- tup=(filename,proc.returncode,stdout,stderr)
- self.generator.utest_result=tup
- testlock.acquire()
- try:
- bld=self.generator.bld
- Logs.debug("ut: %r",tup)
- try:
- bld.utest_results.append(tup)
- except AttributeError:
- bld.utest_results=[tup]
- finally:
- testlock.release()
-def summary(bld):
- lst=getattr(bld,'utest_results',[])
- if lst:
- Logs.pprint('CYAN','execution summary')
- total=len(lst)
- tfail=len([x for x in lst if x[1]])
- Logs.pprint('CYAN',' tests that pass %d/%d'%(total-tfail,total))
- for(f,code,out,err)in lst:
- if not code:
- Logs.pprint('CYAN',' %s'%f)
- Logs.pprint('CYAN',' tests that fail %d/%d'%(tfail,total))
- for(f,code,out,err)in lst:
- if code:
- Logs.pprint('CYAN',' %s'%f)
-def set_exit_code(bld):
- lst=getattr(bld,'utest_results',[])
- for(f,code,out,err)in lst:
- if code:
- msg=[]
- if out:
- msg.append('stdout:%s%s'%(os.linesep,out.decode('utf-8')))
- if err:
- msg.append('stderr:%s%s'%(os.linesep,err.decode('utf-8')))
- bld.fatal(os.linesep.join(msg))
-def options(opt):
- opt.add_option('--notests',action='store_true',default=False,help='Exec no unit tests',dest='no_tests')
- opt.add_option('--alltests',action='store_true',default=False,help='Exec all unit tests',dest='all_tests')
- opt.add_option('--testcmd',action='store',default=False,help='Run the unit tests using the test-cmd string'' example "--test-cmd="valgrind --error-exitcode=1'' %s" to run under valgrind',dest='testcmd')
diff --git a/waflib/Tools/winres.py b/waflib/Tools/winres.py
deleted file mode 100644
index 88904af..0000000
--- a/waflib/Tools/winres.py
+++ /dev/null
@@ -1,85 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import re,traceback
-from waflib import Task,Logs,Utils
-from waflib.TaskGen import extension
-from waflib.Tools import c_preproc
- at extension('.rc')
-def rc_file(self,node):
- obj_ext='.rc.o'
- if self.env['WINRC_TGT_F']=='/fo':
- obj_ext='.res'
- rctask=self.create_task('winrc',node,node.change_ext(obj_ext))
- try:
- self.compiled_tasks.append(rctask)
- except AttributeError:
- self.compiled_tasks=[rctask]
-re_lines=re.compile('(?:^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*?)\s*$)|''(?:^\w+[ \t]*(ICON|BITMAP|CURSOR|HTML|FONT|MESSAGETABLE|TYPELIB|REGISTRY|D3DFX)[ \t]*(.*?)\s*$)',re.IGNORECASE|re.MULTILINE)
-class rc_parser(c_preproc.c_parser):
- def filter_comments(self,filepath):
- code=Utils.readf(filepath)
- if c_preproc.use_trigraphs:
- for(a,b)in c_preproc.trig_def:code=code.split(a).join(b)
- code=c_preproc.re_nl.sub('',code)
- code=c_preproc.re_cpp.sub(c_preproc.repl,code)
- ret=[]
- for m in re.finditer(re_lines,code):
- if m.group(2):
- ret.append((m.group(2),m.group(3)))
- else:
- ret.append(('include',m.group(5)))
- return ret
- def addlines(self,node):
- self.currentnode_stack.append(node.parent)
- filepath=node.abspath()
- self.count_files+=1
- if self.count_files>c_preproc.recursion_limit:
- raise c_preproc.PreprocError("recursion limit exceeded")
- pc=self.parse_cache
- Logs.debug('preproc: reading file %r',filepath)
- try:
- lns=pc[filepath]
- except KeyError:
- pass
- else:
- self.lines.extend(lns)
- return
- try:
- lines=self.filter_comments(filepath)
- lines.append((c_preproc.POPFILE,''))
- lines.reverse()
- pc[filepath]=lines
- self.lines.extend(lines)
- except IOError:
- raise c_preproc.PreprocError("could not read the file %s"%filepath)
- except Exception:
- if Logs.verbose>0:
- Logs.error("parsing %s failed"%filepath)
- traceback.print_exc()
-class winrc(Task.Task):
- run_str='${WINRC} ${WINRCFLAGS} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${WINRC_TGT_F} ${TGT} ${WINRC_SRC_F} ${SRC}'
- color='BLUE'
- def scan(self):
- tmp=rc_parser(self.generator.includes_nodes)
- tmp.start(self.inputs[0],self.env)
- nodes=tmp.nodes
- names=tmp.names
- if Logs.verbose:
- Logs.debug('deps: deps for %s: %r; unresolved %r'%(str(self),nodes,names))
- return(nodes,names)
-def configure(conf):
- v=conf.env
- v['WINRC_TGT_F']='-o'
- v['WINRC_SRC_F']='-i'
- if not conf.env.WINRC:
- if v.CC_NAME=='msvc':
- conf.find_program('RC',var='WINRC',path_list=v['PATH'])
- v['WINRC_TGT_F']='/fo'
- v['WINRC_SRC_F']=''
- else:
- conf.find_program('windres',var='WINRC',path_list=v['PATH'])
- if not conf.env.WINRC:
- conf.fatal('winrc was not found!')
- v['WINRCFLAGS']=[]
diff --git a/waflib/Tools/xlc.py b/waflib/Tools/xlc.py
deleted file mode 100644
index fbf0fcf..0000000
--- a/waflib/Tools/xlc.py
+++ /dev/null
@@ -1,45 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-from waflib.Tools import ccroot,ar
-from waflib.Configure import conf
- at conf
-def find_xlc(conf):
- cc=conf.find_program(['xlc_r','xlc'],var='CC')
- cc=conf.cmd_to_list(cc)
- conf.get_xlc_version(cc)
- conf.env.CC_NAME='xlc'
- conf.env.CC=cc
- at conf
-def xlc_common_flags(conf):
- v=conf.env
- v['CC_SRC_F']=[]
- v['CC_TGT_F']=['-c','-o']
- if not v['LINK_CC']:v['LINK_CC']=v['CC']
- v['CCLNK_SRC_F']=[]
- v['CCLNK_TGT_F']=['-o']
- v['CPPPATH_ST']='-I%s'
- v['DEFINES_ST']='-D%s'
- v['LIB_ST']='-l%s'
- v['LIBPATH_ST']='-L%s'
- v['STLIB_ST']='-l%s'
- v['STLIBPATH_ST']='-L%s'
- v['RPATH_ST']='-Wl,-rpath,%s'
- v['SONAME_ST']=[]
- v['SHLIB_MARKER']=[]
- v['STLIB_MARKER']=[]
- v['LINKFLAGS_cprogram']=['-Wl,-brtl']
- v['cprogram_PATTERN']='%s'
- v['CFLAGS_cshlib']=['-fPIC']
- v['LINKFLAGS_cshlib']=['-G','-Wl,-brtl,-bexpfull']
- v['cshlib_PATTERN']='lib%s.so'
- v['LINKFLAGS_cstlib']=[]
- v['cstlib_PATTERN']='lib%s.a'
-def configure(conf):
- conf.find_xlc()
- conf.find_ar()
- conf.xlc_common_flags()
- conf.cc_load_tools()
- conf.cc_add_flags()
- conf.link_add_flags()
diff --git a/waflib/Tools/xlcxx.py b/waflib/Tools/xlcxx.py
deleted file mode 100644
index b7efb23..0000000
--- a/waflib/Tools/xlcxx.py
+++ /dev/null
@@ -1,45 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-from waflib.Tools import ccroot,ar
-from waflib.Configure import conf
- at conf
-def find_xlcxx(conf):
- cxx=conf.find_program(['xlc++_r','xlc++'],var='CXX')
- cxx=conf.cmd_to_list(cxx)
- conf.get_xlc_version(cxx)
- conf.env.CXX_NAME='xlc++'
- conf.env.CXX=cxx
- at conf
-def xlcxx_common_flags(conf):
- v=conf.env
- v['CXX_SRC_F']=[]
- v['CXX_TGT_F']=['-c','-o']
- if not v['LINK_CXX']:v['LINK_CXX']=v['CXX']
- v['CXXLNK_SRC_F']=[]
- v['CXXLNK_TGT_F']=['-o']
- v['CPPPATH_ST']='-I%s'
- v['DEFINES_ST']='-D%s'
- v['LIB_ST']='-l%s'
- v['LIBPATH_ST']='-L%s'
- v['STLIB_ST']='-l%s'
- v['STLIBPATH_ST']='-L%s'
- v['RPATH_ST']='-Wl,-rpath,%s'
- v['SONAME_ST']=[]
- v['SHLIB_MARKER']=[]
- v['STLIB_MARKER']=[]
- v['LINKFLAGS_cxxprogram']=['-Wl,-brtl']
- v['cxxprogram_PATTERN']='%s'
- v['CXXFLAGS_cxxshlib']=['-fPIC']
- v['LINKFLAGS_cxxshlib']=['-G','-Wl,-brtl,-bexpfull']
- v['cxxshlib_PATTERN']='lib%s.so'
- v['LINKFLAGS_cxxstlib']=[]
- v['cxxstlib_PATTERN']='lib%s.a'
-def configure(conf):
- conf.find_xlcxx()
- conf.find_ar()
- conf.xlcxx_common_flags()
- conf.cxx_load_tools()
- conf.cxx_add_flags()
- conf.link_add_flags()
diff --git a/waflib/Utils.py b/waflib/Utils.py
deleted file mode 100644
index 9532a3b..0000000
--- a/waflib/Utils.py
+++ /dev/null
@@ -1,412 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import os,sys,errno,traceback,inspect,re,shutil,datetime,gc
-import subprocess
-try:
- from collections import deque
-except ImportError:
- class deque(list):
- def popleft(self):
- return self.pop(0)
-try:
- import _winreg as winreg
-except ImportError:
- try:
- import winreg
- except ImportError:
- winreg=None
-from waflib import Errors
-try:
- from collections import UserDict
-except ImportError:
- from UserDict import UserDict
-try:
- from hashlib import md5
-except ImportError:
- try:
- from md5 import md5
- except ImportError:
- pass
-try:
- import threading
-except ImportError:
- class threading(object):
- pass
- class Lock(object):
- def acquire(self):
- pass
- def release(self):
- pass
- threading.Lock=threading.Thread=Lock
-else:
- run_old=threading.Thread.run
- def run(*args,**kwargs):
- try:
- run_old(*args,**kwargs)
- except(KeyboardInterrupt,SystemExit):
- raise
- except Exception:
- sys.excepthook(*sys.exc_info())
- threading.Thread.run=run
-SIG_NIL='iluvcuteoverload'
-O644=420
-O755=493
-rot_chr=['\\','|','/','-']
-rot_idx=0
-try:
- from collections import defaultdict
-except ImportError:
- class defaultdict(dict):
- def __init__(self,default_factory):
- super(defaultdict,self).__init__()
- self.default_factory=default_factory
- def __getitem__(self,key):
- try:
- return super(defaultdict,self).__getitem__(key)
- except KeyError:
- value=self.default_factory()
- self[key]=value
- return value
-is_win32=sys.platform in('win32','cli')
-indicator='\x1b[K%s%s%s\r'
-if is_win32 and'NOCOLOR'in os.environ:
- indicator='%s%s%s\r'
-def readf(fname,m='r',encoding='ISO8859-1'):
- if sys.hexversion>0x3000000 and not'b'in m:
- m+='b'
- f=open(fname,m)
- try:
- txt=f.read()
- finally:
- f.close()
- txt=txt.decode(encoding)
- else:
- f=open(fname,m)
- try:
- txt=f.read()
- finally:
- f.close()
- return txt
-def writef(fname,data,m='w',encoding='ISO8859-1'):
- if sys.hexversion>0x3000000 and not'b'in m:
- data=data.encode(encoding)
- m+='b'
- f=open(fname,m)
- try:
- f.write(data)
- finally:
- f.close()
-def h_file(fname):
- f=open(fname,'rb')
- m=md5()
- try:
- while fname:
- fname=f.read(200000)
- m.update(fname)
- finally:
- f.close()
- return m.digest()
-if hasattr(os,'O_NOINHERIT'):
- def readf_win32(f,m='r',encoding='ISO8859-1'):
- flags=os.O_NOINHERIT|os.O_RDONLY
- if'b'in m:
- flags|=os.O_BINARY
- if'+'in m:
- flags|=os.O_RDWR
- try:
- fd=os.open(f,flags)
- except OSError:
- raise IOError('Cannot read from %r'%f)
- if sys.hexversion>0x3000000 and not'b'in m:
- m+='b'
- f=os.fdopen(fd,m)
- try:
- txt=f.read()
- finally:
- f.close()
- txt=txt.decode(encoding)
- else:
- f=os.fdopen(fd,m)
- try:
- txt=f.read()
- finally:
- f.close()
- return txt
- def writef_win32(f,data,m='w',encoding='ISO8859-1'):
- if sys.hexversion>0x3000000 and not'b'in m:
- data=data.encode(encoding)
- m+='b'
- flags=os.O_CREAT|os.O_TRUNC|os.O_WRONLY|os.O_NOINHERIT
- if'b'in m:
- flags|=os.O_BINARY
- if'+'in m:
- flags|=os.O_RDWR
- try:
- fd=os.open(f,flags)
- except OSError:
- raise IOError('Cannot write to %r'%f)
- f=os.fdopen(fd,m)
- try:
- f.write(data)
- finally:
- f.close()
- def h_file_win32(fname):
- try:
- fd=os.open(fname,os.O_BINARY|os.O_RDONLY|os.O_NOINHERIT)
- except OSError:
- raise IOError('Cannot read from %r'%fname)
- f=os.fdopen(fd,'rb')
- m=md5()
- try:
- while fname:
- fname=f.read(200000)
- m.update(fname)
- finally:
- f.close()
- return m.digest()
- readf_old=readf
- writef_old=writef
- h_file_old=h_file
- readf=readf_win32
- writef=writef_win32
- h_file=h_file_win32
-try:
- x=''.encode('hex')
-except LookupError:
- import binascii
- def to_hex(s):
- ret=binascii.hexlify(s)
- if not isinstance(ret,str):
- ret=ret.decode('utf-8')
- return ret
-else:
- def to_hex(s):
- return s.encode('hex')
-to_hex.__doc__="""
-Return the hexadecimal representation of a string
-
-:param s: string to convert
-:type s: string
-"""
-listdir=os.listdir
-if is_win32:
- def listdir_win32(s):
- if not s:
- try:
- import ctypes
- except ImportError:
- return[x+':\\'for x in list('ABCDEFGHIJKLMNOPQRSTUVWXYZ')]
- else:
- dlen=4
- maxdrives=26
- buf=ctypes.create_string_buffer(maxdrives*dlen)
- ndrives=ctypes.windll.kernel32.GetLogicalDriveStringsA(maxdrives*dlen,ctypes.byref(buf))
- return[str(buf.raw[4*i:4*i+2].decode('ascii'))for i in range(int(ndrives/dlen))]
- if len(s)==2 and s[1]==":":
- s+=os.sep
- if not os.path.isdir(s):
- e=OSError('%s is not a directory'%s)
- e.errno=errno.ENOENT
- raise e
- return os.listdir(s)
- listdir=listdir_win32
-def num2ver(ver):
- if isinstance(ver,str):
- ver=tuple(ver.split('.'))
- if isinstance(ver,tuple):
- ret=0
- for i in range(4):
- if i<len(ver):
- ret+=256**(3-i)*int(ver[i])
- return ret
- return ver
-def ex_stack():
- exc_type,exc_value,tb=sys.exc_info()
- exc_lines=traceback.format_exception(exc_type,exc_value,tb)
- return''.join(exc_lines)
-def to_list(sth):
- if isinstance(sth,str):
- return sth.split()
- else:
- return sth
-re_nl=re.compile('\r*\n',re.M)
-def str_to_dict(txt):
- tbl={}
- lines=re_nl.split(txt)
- for x in lines:
- x=x.strip()
- if not x or x.startswith('#')or x.find('=')<0:
- continue
- tmp=x.split('=')
- tbl[tmp[0].strip()]='='.join(tmp[1:]).strip()
- return tbl
-def split_path(path):
- return path.split('/')
-def split_path_cygwin(path):
- if path.startswith('//'):
- ret=path.split('/')[2:]
- ret[0]='/'+ret[0]
- return ret
- return path.split('/')
-re_sp=re.compile('[/\\\\]')
-def split_path_win32(path):
- if path.startswith('\\\\'):
- ret=re.split(re_sp,path)[2:]
- ret[0]='\\'+ret[0]
- return ret
- return re.split(re_sp,path)
-if sys.platform=='cygwin':
- split_path=split_path_cygwin
-elif is_win32:
- split_path=split_path_win32
-split_path.__doc__="""
-Split a path by / or \\. This function is not like os.path.split
-
-:type path: string
-:param path: path to split
-:return: list of strings
-"""
-def check_dir(path):
- if not os.path.isdir(path):
- try:
- os.makedirs(path)
- except OSError ,e:
- if not os.path.isdir(path):
- raise Errors.WafError('Cannot create the folder %r'%path,ex=e)
-def def_attrs(cls,**kw):
- for k,v in kw.items():
- if not hasattr(cls,k):
- setattr(cls,k,v)
-def quote_define_name(s):
- fu=re.compile("[^a-zA-Z0-9]").sub("_",s)
- fu=fu.upper()
- return fu
-def h_list(lst):
- m=md5()
- m.update(str(lst))
- return m.digest()
-def h_fun(fun):
- try:
- return fun.code
- except AttributeError:
- try:
- h=inspect.getsource(fun)
- except IOError:
- h="nocode"
- try:
- fun.code=h
- except AttributeError:
- pass
- return h
-reg_subst=re.compile(r"(\\\\)|(\$\$)|\$\{([^}]+)\}")
-def subst_vars(expr,params):
- def repl_var(m):
- if m.group(1):
- return'\\'
- if m.group(2):
- return'$'
- try:
- return params.get_flat(m.group(3))
- except AttributeError:
- return params[m.group(3)]
- return reg_subst.sub(repl_var,expr)
-def destos_to_binfmt(key):
- if key=='darwin':
- return'mac-o'
- elif key in('win32','cygwin','uwin','msys'):
- return'pe'
- return'elf'
-def unversioned_sys_platform():
- s=sys.platform
- if s=='java':
- from java.lang import System
- s=System.getProperty('os.name')
- if s=='Mac OS X':
- return'darwin'
- elif s.startswith('Windows '):
- return'win32'
- elif s=='OS/2':
- return'os2'
- elif s=='HP-UX':
- return'hpux'
- elif s in('SunOS','Solaris'):
- return'sunos'
- else:s=s.lower()
- if s=='powerpc':
- return'darwin'
- if s=='win32'or s.endswith('os2')and s!='sunos2':return s
- return re.split('\d+$',s)[0]
-def nada(*k,**kw):
- pass
-class Timer(object):
- def __init__(self):
- self.start_time=datetime.datetime.utcnow()
- def __str__(self):
- delta=datetime.datetime.utcnow()-self.start_time
- days=int(delta.days)
- hours=delta.seconds//3600
- minutes=(delta.seconds-hours*3600)//60
- seconds=delta.seconds-hours*3600-minutes*60+float(delta.microseconds)/1000/1000
- result=''
- if days:
- result+='%dd'%days
- if days or hours:
- result+='%dh'%hours
- if days or hours or minutes:
- result+='%dm'%minutes
- return'%s%.3fs'%(result,seconds)
-if is_win32:
- old=shutil.copy2
- def copy2(src,dst):
- old(src,dst)
- shutil.copystat(src,dst)
- setattr(shutil,'copy2',copy2)
-if os.name=='java':
- try:
- gc.disable()
- gc.enable()
- except NotImplementedError:
- gc.disable=gc.enable
-def read_la_file(path):
- sp=re.compile(r'^([^=]+)=\'(.*)\'$')
- dc={}
- for line in readf(path).splitlines():
- try:
- _,left,right,_=sp.split(line.strip())
- dc[left]=right
- except ValueError:
- pass
- return dc
-def nogc(fun):
- def f(*k,**kw):
- try:
- gc.disable()
- ret=fun(*k,**kw)
- finally:
- gc.enable()
- return ret
- f.__doc__=fun.__doc__
- return f
-def run_once(fun):
- cache={}
- def wrap(k):
- try:
- return cache[k]
- except KeyError:
- ret=fun(k)
- cache[k]=ret
- return ret
- wrap.__cache__=cache
- return wrap
-def get_registry_app_path(key,filename):
- if not winreg:
- return None
- try:
- result=winreg.QueryValue(key,"Software\\Microsoft\\Windows\\CurrentVersion\\App Paths\\%s.exe"%filename[0])
- except WindowsError:
- pass
- else:
- if os.path.isfile(result):
- return result
diff --git a/waflib/__init__.py b/waflib/__init__.py
deleted file mode 100644
index efeed79..0000000
--- a/waflib/__init__.py
+++ /dev/null
@@ -1,4 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
diff --git a/waflib/ansiterm.py b/waflib/ansiterm.py
deleted file mode 100644
index 0de6171..0000000
--- a/waflib/ansiterm.py
+++ /dev/null
@@ -1,177 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import sys,os
-try:
- if not(sys.stderr.isatty()and sys.stdout.isatty()):
- raise ValueError('not a tty')
- from ctypes import*
- class COORD(Structure):
- _fields_=[("X",c_short),("Y",c_short)]
- class SMALL_RECT(Structure):
- _fields_=[("Left",c_short),("Top",c_short),("Right",c_short),("Bottom",c_short)]
- class CONSOLE_SCREEN_BUFFER_INFO(Structure):
- _fields_=[("Size",COORD),("CursorPosition",COORD),("Attributes",c_short),("Window",SMALL_RECT),("MaximumWindowSize",COORD)]
- class CONSOLE_CURSOR_INFO(Structure):
- _fields_=[('dwSize',c_ulong),('bVisible',c_int)]
- sbinfo=CONSOLE_SCREEN_BUFFER_INFO()
- csinfo=CONSOLE_CURSOR_INFO()
- hconsole=windll.kernel32.GetStdHandle(-11)
- windll.kernel32.GetConsoleScreenBufferInfo(hconsole,byref(sbinfo))
- if sbinfo.Size.X<9 or sbinfo.Size.Y<9:raise ValueError('small console')
- windll.kernel32.GetConsoleCursorInfo(hconsole,byref(csinfo))
-except Exception:
- pass
-else:
- import re,threading
- is_vista=getattr(sys,"getwindowsversion",None)and sys.getwindowsversion()[0]>=6
- try:
- _type=unicode
- except NameError:
- _type=str
- to_int=lambda number,default:number and int(number)or default
- wlock=threading.Lock()
- STD_OUTPUT_HANDLE=-11
- STD_ERROR_HANDLE=-12
- class AnsiTerm(object):
- def __init__(self):
- self.encoding=sys.stdout.encoding
- self.hconsole=windll.kernel32.GetStdHandle(STD_OUTPUT_HANDLE)
- self.cursor_history=[]
- self.orig_sbinfo=CONSOLE_SCREEN_BUFFER_INFO()
- self.orig_csinfo=CONSOLE_CURSOR_INFO()
- windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole,byref(self.orig_sbinfo))
- windll.kernel32.GetConsoleCursorInfo(hconsole,byref(self.orig_csinfo))
- def screen_buffer_info(self):
- sbinfo=CONSOLE_SCREEN_BUFFER_INFO()
- windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole,byref(sbinfo))
- return sbinfo
- def clear_line(self,param):
- mode=param and int(param)or 0
- sbinfo=self.screen_buffer_info()
- if mode==1:
- line_start=COORD(0,sbinfo.CursorPosition.Y)
- line_length=sbinfo.Size.X
- elif mode==2:
- line_start=COORD(sbinfo.CursorPosition.X,sbinfo.CursorPosition.Y)
- line_length=sbinfo.Size.X-sbinfo.CursorPosition.X
- else:
- line_start=sbinfo.CursorPosition
- line_length=sbinfo.Size.X-sbinfo.CursorPosition.X
- chars_written=c_int()
- windll.kernel32.FillConsoleOutputCharacterA(self.hconsole,c_wchar(' '),line_length,line_start,byref(chars_written))
- windll.kernel32.FillConsoleOutputAttribute(self.hconsole,sbinfo.Attributes,line_length,line_start,byref(chars_written))
- def clear_screen(self,param):
- mode=to_int(param,0)
- sbinfo=self.screen_buffer_info()
- if mode==1:
- clear_start=COORD(0,0)
- clear_length=sbinfo.CursorPosition.X*sbinfo.CursorPosition.Y
- elif mode==2:
- clear_start=COORD(0,0)
- clear_length=sbinfo.Size.X*sbinfo.Size.Y
- windll.kernel32.SetConsoleCursorPosition(self.hconsole,clear_start)
- else:
- clear_start=sbinfo.CursorPosition
- clear_length=((sbinfo.Size.X-sbinfo.CursorPosition.X)+sbinfo.Size.X*(sbinfo.Size.Y-sbinfo.CursorPosition.Y))
- chars_written=c_int()
- windll.kernel32.FillConsoleOutputCharacterA(self.hconsole,c_wchar(' '),clear_length,clear_start,byref(chars_written))
- windll.kernel32.FillConsoleOutputAttribute(self.hconsole,sbinfo.Attributes,clear_length,clear_start,byref(chars_written))
- def push_cursor(self,param):
- sbinfo=self.screen_buffer_info()
- self.cursor_history.append(sbinfo.CursorPosition)
- def pop_cursor(self,param):
- if self.cursor_history:
- old_pos=self.cursor_history.pop()
- windll.kernel32.SetConsoleCursorPosition(self.hconsole,old_pos)
- def set_cursor(self,param):
- y,sep,x=param.partition(';')
- x=to_int(x,1)-1
- y=to_int(y,1)-1
- sbinfo=self.screen_buffer_info()
- new_pos=COORD(min(max(0,x),sbinfo.Size.X),min(max(0,y),sbinfo.Size.Y))
- windll.kernel32.SetConsoleCursorPosition(self.hconsole,new_pos)
- def set_column(self,param):
- x=to_int(param,1)-1
- sbinfo=self.screen_buffer_info()
- new_pos=COORD(min(max(0,x),sbinfo.Size.X),sbinfo.CursorPosition.Y)
- windll.kernel32.SetConsoleCursorPosition(self.hconsole,new_pos)
- def move_cursor(self,x_offset=0,y_offset=0):
- sbinfo=self.screen_buffer_info()
- new_pos=COORD(min(max(0,sbinfo.CursorPosition.X+x_offset),sbinfo.Size.X),min(max(0,sbinfo.CursorPosition.Y+y_offset),sbinfo.Size.Y))
- windll.kernel32.SetConsoleCursorPosition(self.hconsole,new_pos)
- def move_up(self,param):
- self.move_cursor(y_offset=-to_int(param,1))
- def move_down(self,param):
- self.move_cursor(y_offset=to_int(param,1))
- def move_left(self,param):
- self.move_cursor(x_offset=-to_int(param,1))
- def move_right(self,param):
- self.move_cursor(x_offset=to_int(param,1))
- def next_line(self,param):
- sbinfo=self.screen_buffer_info()
- self.move_cursor(x_offset=-sbinfo.CursorPosition.X,y_offset=to_int(param,1))
- def prev_line(self,param):
- sbinfo=self.screen_buffer_info()
- self.move_cursor(x_offset=-sbinfo.CursorPosition.X,y_offset=-to_int(param,1))
- def rgb2bgr(self,c):
- return((c&1)<<2)|(c&2)|((c&4)>>2)
- def set_color(self,param):
- cols=param.split(';')
- sbinfo=CONSOLE_SCREEN_BUFFER_INFO()
- windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole,byref(sbinfo))
- attr=sbinfo.Attributes
- for c in cols:
- if is_vista:
- c=int(c)
- else:
- c=to_int(c,0)
- if c in range(30,38):
- attr=(attr&0xfff0)|self.rgb2bgr(c-30)
- elif c in range(40,48):
- attr=(attr&0xff0f)|(self.rgb2bgr(c-40)<<4)
- elif c==0:
- attr=self.orig_sbinfo.Attributes
- elif c==1:
- attr|=0x08
- elif c==4:
- attr|=0x80
- elif c==7:
- attr=(attr&0xff88)|((attr&0x70)>>4)|((attr&0x07)<<4)
- windll.kernel32.SetConsoleTextAttribute(self.hconsole,attr)
- def show_cursor(self,param):
- csinfo.bVisible=1
- windll.kernel32.SetConsoleCursorInfo(self.hconsole,byref(csinfo))
- def hide_cursor(self,param):
- csinfo.bVisible=0
- windll.kernel32.SetConsoleCursorInfo(self.hconsole,byref(csinfo))
- ansi_command_table={'A':move_up,'B':move_down,'C':move_right,'D':move_left,'E':next_line,'F':prev_line,'G':set_column,'H':set_cursor,'f':set_cursor,'J':clear_screen,'K':clear_line,'h':show_cursor,'l':hide_cursor,'m':set_color,'s':push_cursor,'u':pop_cursor,}
- ansi_tokens=re.compile('(?:\x1b\[([0-9?;]*)([a-zA-Z])|([^\x1b]+))')
- def write(self,text):
- try:
- wlock.acquire()
- for param,cmd,txt in self.ansi_tokens.findall(text):
- if cmd:
- cmd_func=self.ansi_command_table.get(cmd)
- if cmd_func:
- cmd_func(self,param)
- else:
- self.writeconsole(txt)
- finally:
- wlock.release()
- def writeconsole(self,txt):
- chars_written=c_int()
- writeconsole=windll.kernel32.WriteConsoleA
- if isinstance(txt,_type):
- writeconsole=windll.kernel32.WriteConsoleW
- TINY_STEP=3000
- for x in range(0,len(txt),TINY_STEP):
- tiny=txt[x:x+TINY_STEP]
- writeconsole(self.hconsole,tiny,len(tiny),byref(chars_written),None)
- def flush(self):
- pass
- def isatty(self):
- return True
- sys.stderr=sys.stdout=AnsiTerm()
- os.environ['TERM']='vt100'
diff --git a/waflib/extras/__init__.py b/waflib/extras/__init__.py
deleted file mode 100644
index efeed79..0000000
--- a/waflib/extras/__init__.py
+++ /dev/null
@@ -1,4 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
diff --git a/waflib/extras/compat15.py b/waflib/extras/compat15.py
deleted file mode 100644
index 3343afc..0000000
--- a/waflib/extras/compat15.py
+++ /dev/null
@@ -1,220 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import sys
-from waflib import ConfigSet,Logs,Options,Scripting,Task,Build,Configure,Node,Runner,TaskGen,Utils,Errors,Context
-sys.modules['Environment']=ConfigSet
-ConfigSet.Environment=ConfigSet.ConfigSet
-sys.modules['Logs']=Logs
-sys.modules['Options']=Options
-sys.modules['Scripting']=Scripting
-sys.modules['Task']=Task
-sys.modules['Build']=Build
-sys.modules['Configure']=Configure
-sys.modules['Node']=Node
-sys.modules['Runner']=Runner
-sys.modules['TaskGen']=TaskGen
-sys.modules['Utils']=Utils
-from waflib.Tools import c_preproc
-sys.modules['preproc']=c_preproc
-from waflib.Tools import c_config
-sys.modules['config_c']=c_config
-ConfigSet.ConfigSet.copy=ConfigSet.ConfigSet.derive
-ConfigSet.ConfigSet.set_variant=Utils.nada
-Build.BuildContext.add_subdirs=Build.BuildContext.recurse
-Build.BuildContext.new_task_gen=Build.BuildContext.__call__
-Build.BuildContext.is_install=0
-Node.Node.relpath_gen=Node.Node.path_from
-def name_to_obj(self,s,env=None):
- Logs.warn('compat: change "name_to_obj(name, env)" by "get_tgen_by_name(name)"')
- return self.get_tgen_by_name(s)
-Build.BuildContext.name_to_obj=name_to_obj
-def env_of_name(self,name):
- try:
- return self.all_envs[name]
- except KeyError:
- Logs.error('no such environment: '+name)
- return None
-Build.BuildContext.env_of_name=env_of_name
-def set_env_name(self,name,env):
- self.all_envs[name]=env
- return env
-Configure.ConfigurationContext.set_env_name=set_env_name
-def retrieve(self,name,fromenv=None):
- try:
- env=self.all_envs[name]
- except KeyError:
- env=ConfigSet.ConfigSet()
- self.prepare_env(env)
- self.all_envs[name]=env
- else:
- if fromenv:Logs.warn("The environment %s may have been configured already"%name)
- return env
-Configure.ConfigurationContext.retrieve=retrieve
-Configure.ConfigurationContext.sub_config=Configure.ConfigurationContext.recurse
-Configure.ConfigurationContext.check_tool=Configure.ConfigurationContext.load
-Configure.conftest=Configure.conf
-Configure.ConfigurationError=Errors.ConfigurationError
-Options.OptionsContext.sub_options=Options.OptionsContext.recurse
-Options.OptionsContext.tool_options=Context.Context.load
-Options.Handler=Options.OptionsContext
-Task.simple_task_type=Task.task_type_from_func=Task.task_factory
-Task.TaskBase.classes=Task.classes
-def setitem(self,key,value):
- if key.startswith('CCFLAGS'):
- key=key[1:]
- self.table[key]=value
-ConfigSet.ConfigSet.__setitem__=setitem
- at TaskGen.feature('d')
- at TaskGen.before('apply_incpaths')
-def old_importpaths(self):
- if getattr(self,'importpaths',[]):
- self.includes=self.importpaths
-from waflib import Context
-eld=Context.load_tool
-def load_tool(*k,**kw):
- ret=eld(*k,**kw)
- if'set_options'in ret.__dict__:
- Logs.warn('compat: rename "set_options" to options')
- ret.options=ret.set_options
- if'detect'in ret.__dict__:
- Logs.warn('compat: rename "detect" to "configure"')
- ret.configure=ret.detect
- return ret
-Context.load_tool=load_tool
-rev=Context.load_module
-def load_module(path):
- ret=rev(path)
- if'set_options'in ret.__dict__:
- Logs.warn('compat: rename "set_options" to "options" (%r)'%path)
- ret.options=ret.set_options
- if'srcdir'in ret.__dict__:
- Logs.warn('compat: rename "srcdir" to "top" (%r)'%path)
- ret.top=ret.srcdir
- if'blddir'in ret.__dict__:
- Logs.warn('compat: rename "blddir" to "out" (%r)'%path)
- ret.out=ret.blddir
- return ret
-Context.load_module=load_module
-old_post=TaskGen.task_gen.post
-def post(self):
- self.features=self.to_list(self.features)
- if'cc'in self.features:
- Logs.warn('compat: the feature cc does not exist anymore (use "c")')
- self.features.remove('cc')
- self.features.append('c')
- if'cstaticlib'in self.features:
- Logs.warn('compat: the feature cstaticlib does not exist anymore (use "cstlib" or "cxxstlib")')
- self.features.remove('cstaticlib')
- self.features.append(('cxx'in self.features)and'cxxstlib'or'cstlib')
- if getattr(self,'ccflags',None):
- Logs.warn('compat: "ccflags" was renamed to "cflags"')
- self.cflags=self.ccflags
- return old_post(self)
-TaskGen.task_gen.post=post
-def waf_version(*k,**kw):
- Logs.warn('wrong version (waf_version was removed in waf 1.6)')
-Utils.waf_version=waf_version
-import os
- at TaskGen.feature('c','cxx','d')
- at TaskGen.before('apply_incpaths','propagate_uselib_vars')
- at TaskGen.after('apply_link','process_source')
-def apply_uselib_local(self):
- env=self.env
- from waflib.Tools.ccroot import stlink_task
- self.uselib=self.to_list(getattr(self,'uselib',[]))
- self.includes=self.to_list(getattr(self,'includes',[]))
- names=self.to_list(getattr(self,'uselib_local',[]))
- get=self.bld.get_tgen_by_name
- seen=set([])
- tmp=Utils.deque(names)
- if tmp:
- Logs.warn('compat: "uselib_local" is deprecated, replace by "use"')
- while tmp:
- lib_name=tmp.popleft()
- if lib_name in seen:
- continue
- y=get(lib_name)
- y.post()
- seen.add(lib_name)
- if getattr(y,'uselib_local',None):
- for x in self.to_list(getattr(y,'uselib_local',[])):
- obj=get(x)
- obj.post()
- if getattr(obj,'link_task',None):
- if not isinstance(obj.link_task,stlink_task):
- tmp.append(x)
- if getattr(y,'link_task',None):
- link_name=y.target[y.target.rfind(os.sep)+1:]
- if isinstance(y.link_task,stlink_task):
- env.append_value('STLIB',[link_name])
- else:
- env.append_value('LIB',[link_name])
- self.link_task.set_run_after(y.link_task)
- self.link_task.dep_nodes+=y.link_task.outputs
- tmp_path=y.link_task.outputs[0].parent.bldpath()
- if not tmp_path in env['LIBPATH']:
- env.prepend_value('LIBPATH',[tmp_path])
- for v in self.to_list(getattr(y,'uselib',[])):
- if not env['STLIB_'+v]:
- if not v in self.uselib:
- self.uselib.insert(0,v)
- if getattr(y,'export_includes',None):
- self.includes.extend(y.to_incnodes(y.export_includes))
- at TaskGen.feature('cprogram','cxxprogram','cstlib','cxxstlib','cshlib','cxxshlib','dprogram','dstlib','dshlib')
- at TaskGen.after('apply_link')
-def apply_objdeps(self):
- names=getattr(self,'add_objects',[])
- if not names:
- return
- names=self.to_list(names)
- get=self.bld.get_tgen_by_name
- seen=[]
- while names:
- x=names[0]
- if x in seen:
- names=names[1:]
- continue
- y=get(x)
- if getattr(y,'add_objects',None):
- added=0
- lst=y.to_list(y.add_objects)
- lst.reverse()
- for u in lst:
- if u in seen:continue
- added=1
- names=[u]+names
- if added:continue
- y.post()
- seen.append(x)
- for t in getattr(y,'compiled_tasks',[]):
- self.link_task.inputs.extend(t.outputs)
- at TaskGen.after('apply_link')
-def process_obj_files(self):
- if not hasattr(self,'obj_files'):
- return
- for x in self.obj_files:
- node=self.path.find_resource(x)
- self.link_task.inputs.append(node)
- at TaskGen.taskgen_method
-def add_obj_file(self,file):
- if not hasattr(self,'obj_files'):self.obj_files=[]
- if not'process_obj_files'in self.meths:self.meths.append('process_obj_files')
- self.obj_files.append(file)
-old_define=Configure.ConfigurationContext.__dict__['define']
- at Configure.conf
-def define(self,key,val,quote=True):
- old_define(self,key,val,quote)
- if key.startswith('HAVE_'):
- self.env[key]=1
-old_undefine=Configure.ConfigurationContext.__dict__['undefine']
- at Configure.conf
-def undefine(self,key):
- old_undefine(self,key)
- if key.startswith('HAVE_'):
- self.env[key]=0
-def set_incdirs(self,val):
- Logs.warn('compat: change "export_incdirs" by "export_includes"')
- self.export_includes=val
-TaskGen.task_gen.export_incdirs=property(None,set_incdirs)
diff --git a/waflib/fixpy2.py b/waflib/fixpy2.py
deleted file mode 100644
index 98f7036..0000000
--- a/waflib/fixpy2.py
+++ /dev/null
@@ -1,53 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-
-import os
-all_modifs={}
-def fixdir(dir):
- global all_modifs
- for k in all_modifs:
- for v in all_modifs[k]:
- modif(os.path.join(dir,'waflib'),k,v)
-def modif(dir,name,fun):
- if name=='*':
- lst=[]
- for y in'. Tools extras'.split():
- for x in os.listdir(os.path.join(dir,y)):
- if x.endswith('.py'):
- lst.append(y+os.sep+x)
- for x in lst:
- modif(dir,x,fun)
- return
- filename=os.path.join(dir,name)
- f=open(filename,'r')
- try:
- txt=f.read()
- finally:
- f.close()
- txt=fun(txt)
- f=open(filename,'w')
- try:
- f.write(txt)
- finally:
- f.close()
-def subst(*k):
- def do_subst(fun):
- global all_modifs
- for x in k:
- try:
- all_modifs[x].append(fun)
- except KeyError:
- all_modifs[x]=[fun]
- return fun
- return do_subst
- at subst('*')
-def r1(code):
- code=code.replace(',e:',',e:')
- code=code.replace("",'')
- code=code.replace('','')
- return code
- at subst('Runner.py')
-def r4(code):
- code=code.replace('next(self.biter)','self.biter.next()')
- return code
diff --git a/wscript b/wscript
index bdf8f74..69d8d4e 100644
--- a/wscript
+++ b/wscript
@@ -1,5 +1,5 @@
APPNAME = 'sprai'
-VERSION = '0.9.9.13'
+VERSION = '0.9.9.14'
srcdir = '.'
blddir = 'build'
@@ -131,6 +131,7 @@ def dist(ctx):
'bfmt72s.c',
'col2fqcell.h',
'LICENSE.txt',
+ 'ChangeLog.txt',
'myrealigner.c',
'nss2v_v3.c',
'waf',
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-med/sprai.git
More information about the debian-med-commit
mailing list